duron 0.2.1 → 0.3.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/action-job.d.ts +2 -0
- package/dist/action-job.d.ts.map +1 -1
- package/dist/action-job.js +20 -1
- package/dist/action-manager.d.ts +2 -0
- package/dist/action-manager.d.ts.map +1 -1
- package/dist/action-manager.js +3 -0
- package/dist/action.d.ts +7 -0
- package/dist/action.d.ts.map +1 -1
- package/dist/action.js +1 -0
- package/dist/adapters/adapter.d.ts +10 -2
- package/dist/adapters/adapter.d.ts.map +1 -1
- package/dist/adapters/adapter.js +59 -1
- package/dist/adapters/postgres/base.d.ts +9 -4
- package/dist/adapters/postgres/base.d.ts.map +1 -1
- package/dist/adapters/postgres/base.js +269 -19
- package/dist/adapters/postgres/schema.d.ts +249 -105
- package/dist/adapters/postgres/schema.d.ts.map +1 -1
- package/dist/adapters/postgres/schema.default.d.ts +249 -106
- package/dist/adapters/postgres/schema.default.d.ts.map +1 -1
- package/dist/adapters/postgres/schema.default.js +2 -2
- package/dist/adapters/postgres/schema.js +29 -1
- package/dist/adapters/schemas.d.ts +140 -7
- package/dist/adapters/schemas.d.ts.map +1 -1
- package/dist/adapters/schemas.js +52 -4
- package/dist/client.d.ts +8 -1
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +29 -1
- package/dist/errors.d.ts +6 -0
- package/dist/errors.d.ts.map +1 -1
- package/dist/errors.js +16 -1
- package/dist/index.d.ts +3 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +3 -1
- package/dist/server.d.ts +220 -16
- package/dist/server.d.ts.map +1 -1
- package/dist/server.js +123 -8
- package/dist/step-manager.d.ts +8 -2
- package/dist/step-manager.d.ts.map +1 -1
- package/dist/step-manager.js +138 -15
- package/dist/telemetry/adapter.d.ts +85 -0
- package/dist/telemetry/adapter.d.ts.map +1 -0
- package/dist/telemetry/adapter.js +128 -0
- package/dist/telemetry/index.d.ts +5 -0
- package/dist/telemetry/index.d.ts.map +1 -0
- package/dist/telemetry/index.js +4 -0
- package/dist/telemetry/local.d.ts +21 -0
- package/dist/telemetry/local.d.ts.map +1 -0
- package/dist/telemetry/local.js +180 -0
- package/dist/telemetry/noop.d.ts +16 -0
- package/dist/telemetry/noop.d.ts.map +1 -0
- package/dist/telemetry/noop.js +39 -0
- package/dist/telemetry/opentelemetry.d.ts +24 -0
- package/dist/telemetry/opentelemetry.d.ts.map +1 -0
- package/dist/telemetry/opentelemetry.js +202 -0
- package/migrations/postgres/20260117231749_clumsy_penance/migration.sql +3 -0
- package/migrations/postgres/20260117231749_clumsy_penance/snapshot.json +988 -0
- package/migrations/postgres/20260118202533_wealthy_mysterio/migration.sql +24 -0
- package/migrations/postgres/20260118202533_wealthy_mysterio/snapshot.json +1362 -0
- package/package.json +6 -4
- package/src/action-job.ts +35 -0
- package/src/action-manager.ts +5 -0
- package/src/action.ts +56 -0
- package/src/adapters/adapter.ts +151 -0
- package/src/adapters/postgres/base.ts +342 -23
- package/src/adapters/postgres/schema.default.ts +2 -2
- package/src/adapters/postgres/schema.ts +49 -1
- package/src/adapters/schemas.ts +81 -5
- package/src/client.ts +80 -2
- package/src/errors.ts +45 -1
- package/src/index.ts +3 -1
- package/src/server.ts +163 -8
- package/src/step-manager.ts +232 -13
- package/src/telemetry/adapter.ts +468 -0
- package/src/telemetry/index.ts +17 -0
- package/src/telemetry/local.ts +336 -0
- package/src/telemetry/noop.ts +95 -0
- package/src/telemetry/opentelemetry.ts +310 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { and, asc, between, desc, eq, gt, gte, ilike, inArray, isNull, ne, or, sql } from 'drizzle-orm'
|
|
2
|
-
import type {
|
|
2
|
+
import type { PgAsyncDatabase, PgColumn } from 'drizzle-orm/pg-core'
|
|
3
3
|
|
|
4
4
|
import {
|
|
5
5
|
JOB_STATUS_ACTIVE,
|
|
@@ -24,6 +24,7 @@ import {
|
|
|
24
24
|
type DelayJobStepOptions,
|
|
25
25
|
type DeleteJobOptions,
|
|
26
26
|
type DeleteJobsOptions,
|
|
27
|
+
type DeleteMetricsOptions,
|
|
27
28
|
type FailJobOptions,
|
|
28
29
|
type FailJobStepOptions,
|
|
29
30
|
type FetchOptions,
|
|
@@ -32,13 +33,18 @@ import {
|
|
|
32
33
|
type GetJobStepsResult,
|
|
33
34
|
type GetJobsOptions,
|
|
34
35
|
type GetJobsResult,
|
|
36
|
+
type GetMetricsOptions,
|
|
37
|
+
type GetMetricsResult,
|
|
38
|
+
type InsertMetricOptions,
|
|
35
39
|
type Job,
|
|
36
40
|
type JobSort,
|
|
37
41
|
type JobStatusResult,
|
|
38
42
|
type JobStep,
|
|
39
43
|
type JobStepStatusResult,
|
|
44
|
+
type MetricSort,
|
|
40
45
|
type RecoverJobsOptions,
|
|
41
46
|
type RetryJobOptions,
|
|
47
|
+
type TimeTravelJobOptions,
|
|
42
48
|
} from '../adapter.js'
|
|
43
49
|
import createSchema from './schema.js'
|
|
44
50
|
|
|
@@ -47,7 +53,7 @@ type Schema = ReturnType<typeof createSchema>
|
|
|
47
53
|
// Re-export types for backward compatibility
|
|
48
54
|
export type { Job, JobStep } from '../adapter.js'
|
|
49
55
|
|
|
50
|
-
type DrizzleDatabase =
|
|
56
|
+
type DrizzleDatabase = PgAsyncDatabase<any, Schema>
|
|
51
57
|
|
|
52
58
|
export interface AdapterOptions<Connection> {
|
|
53
59
|
connection: Connection
|
|
@@ -168,6 +174,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
168
174
|
status: JOB_STATUS_COMPLETED,
|
|
169
175
|
output,
|
|
170
176
|
finished_at: sql`now()`,
|
|
177
|
+
updated_at: sql`now()`,
|
|
171
178
|
})
|
|
172
179
|
.where(
|
|
173
180
|
and(
|
|
@@ -194,6 +201,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
194
201
|
status: JOB_STATUS_FAILED,
|
|
195
202
|
error,
|
|
196
203
|
finished_at: sql`now()`,
|
|
204
|
+
updated_at: sql`now()`,
|
|
197
205
|
})
|
|
198
206
|
.where(
|
|
199
207
|
and(
|
|
@@ -218,6 +226,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
218
226
|
.set({
|
|
219
227
|
status: JOB_STATUS_CANCELLED,
|
|
220
228
|
finished_at: sql`now()`,
|
|
229
|
+
updated_at: sql`now()`,
|
|
221
230
|
})
|
|
222
231
|
.where(
|
|
223
232
|
and(
|
|
@@ -316,6 +325,196 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
316
325
|
return result[0]!.id
|
|
317
326
|
}
|
|
318
327
|
|
|
328
|
+
/**
|
|
329
|
+
* Internal method to time travel a job to restart from a specific step.
|
|
330
|
+
* The job must be in completed, failed, or cancelled status.
|
|
331
|
+
* Resets the job and ancestor steps to active status, deletes subsequent steps,
|
|
332
|
+
* and preserves completed parallel siblings.
|
|
333
|
+
*
|
|
334
|
+
* Algorithm:
|
|
335
|
+
* 1. Validate job is in terminal state (completed/failed/cancelled)
|
|
336
|
+
* 2. Find the target step and all its ancestors (using parent_step_id)
|
|
337
|
+
* 3. Determine which steps to keep:
|
|
338
|
+
* - Steps completed BEFORE the target step (by created_at)
|
|
339
|
+
* - Branch siblings that are completed (independent)
|
|
340
|
+
* 4. Delete steps that should not be kept
|
|
341
|
+
* 5. Reset ancestor steps to active status (they need to re-run)
|
|
342
|
+
* 6. Reset the target step to active status
|
|
343
|
+
* 7. Reset job to created status
|
|
344
|
+
*
|
|
345
|
+
* @returns Promise resolving to `true` if time travel succeeded, `false` otherwise
|
|
346
|
+
*/
|
|
347
|
+
protected async _timeTravelJob({ jobId, stepId }: TimeTravelJobOptions): Promise<boolean> {
|
|
348
|
+
const result = this._map(
|
|
349
|
+
await this.db.execute<{ success: boolean }>(sql`
|
|
350
|
+
WITH RECURSIVE
|
|
351
|
+
-- Lock and validate the job
|
|
352
|
+
locked_job AS (
|
|
353
|
+
SELECT j.id
|
|
354
|
+
FROM ${this.tables.jobsTable} j
|
|
355
|
+
WHERE j.id = ${jobId}
|
|
356
|
+
AND j.status IN (${JOB_STATUS_COMPLETED}, ${JOB_STATUS_FAILED}, ${JOB_STATUS_CANCELLED})
|
|
357
|
+
FOR UPDATE OF j
|
|
358
|
+
),
|
|
359
|
+
-- Validate target step exists and belongs to job
|
|
360
|
+
target_step AS (
|
|
361
|
+
SELECT s.id, s.parent_step_id, s.created_at
|
|
362
|
+
FROM ${this.tables.jobStepsTable} s
|
|
363
|
+
WHERE s.id = ${stepId}
|
|
364
|
+
AND s.job_id = ${jobId}
|
|
365
|
+
AND EXISTS (SELECT 1 FROM locked_job)
|
|
366
|
+
),
|
|
367
|
+
-- Find all ancestor steps recursively (from target up to root)
|
|
368
|
+
ancestors AS (
|
|
369
|
+
SELECT s.id, s.parent_step_id, 0 AS depth
|
|
370
|
+
FROM ${this.tables.jobStepsTable} s
|
|
371
|
+
WHERE s.id = (SELECT parent_step_id FROM target_step)
|
|
372
|
+
AND EXISTS (SELECT 1 FROM target_step)
|
|
373
|
+
UNION ALL
|
|
374
|
+
SELECT s.id, s.parent_step_id, a.depth + 1
|
|
375
|
+
FROM ${this.tables.jobStepsTable} s
|
|
376
|
+
INNER JOIN ancestors a ON s.id = a.parent_step_id
|
|
377
|
+
),
|
|
378
|
+
-- Steps to keep: completed steps created before target + completed parallel siblings of target and ancestors + their descendants
|
|
379
|
+
parallel_siblings AS (
|
|
380
|
+
-- Completed parallel siblings of target step
|
|
381
|
+
SELECT s.id
|
|
382
|
+
FROM ${this.tables.jobStepsTable} s
|
|
383
|
+
CROSS JOIN target_step ts
|
|
384
|
+
WHERE s.job_id = ${jobId}
|
|
385
|
+
AND s.id != ts.id
|
|
386
|
+
AND s.branch = true
|
|
387
|
+
AND s.status = ${STEP_STATUS_COMPLETED}
|
|
388
|
+
AND (
|
|
389
|
+
(s.parent_step_id IS NULL AND ts.parent_step_id IS NULL)
|
|
390
|
+
OR s.parent_step_id = ts.parent_step_id
|
|
391
|
+
)
|
|
392
|
+
UNION
|
|
393
|
+
-- Completed parallel siblings of each ancestor
|
|
394
|
+
SELECT s.id
|
|
395
|
+
FROM ${this.tables.jobStepsTable} s
|
|
396
|
+
INNER JOIN ancestors a ON (
|
|
397
|
+
(s.parent_step_id IS NULL AND a.parent_step_id IS NULL)
|
|
398
|
+
OR s.parent_step_id = a.parent_step_id
|
|
399
|
+
)
|
|
400
|
+
WHERE s.job_id = ${jobId}
|
|
401
|
+
AND s.id NOT IN (SELECT id FROM ancestors)
|
|
402
|
+
AND s.branch = true
|
|
403
|
+
AND s.status = ${STEP_STATUS_COMPLETED}
|
|
404
|
+
),
|
|
405
|
+
-- Find all descendants of parallel siblings (to keep their children too)
|
|
406
|
+
parallel_descendants AS (
|
|
407
|
+
SELECT s.id
|
|
408
|
+
FROM ${this.tables.jobStepsTable} s
|
|
409
|
+
WHERE s.id IN (SELECT id FROM parallel_siblings)
|
|
410
|
+
UNION ALL
|
|
411
|
+
SELECT s.id
|
|
412
|
+
FROM ${this.tables.jobStepsTable} s
|
|
413
|
+
INNER JOIN parallel_descendants pd ON s.parent_step_id = pd.id
|
|
414
|
+
WHERE s.job_id = ${jobId}
|
|
415
|
+
),
|
|
416
|
+
steps_to_keep AS (
|
|
417
|
+
-- Steps created before target that are completed (non-ancestor, non-target)
|
|
418
|
+
SELECT s.id
|
|
419
|
+
FROM ${this.tables.jobStepsTable} s
|
|
420
|
+
CROSS JOIN target_step ts
|
|
421
|
+
WHERE s.job_id = ${jobId}
|
|
422
|
+
AND s.created_at < ts.created_at
|
|
423
|
+
AND s.status = ${STEP_STATUS_COMPLETED}
|
|
424
|
+
AND s.id NOT IN (SELECT id FROM ancestors)
|
|
425
|
+
AND s.id != ts.id
|
|
426
|
+
UNION
|
|
427
|
+
-- All parallel siblings and their descendants
|
|
428
|
+
SELECT id FROM parallel_descendants
|
|
429
|
+
),
|
|
430
|
+
-- Calculate time offset: shift preserved steps to start from "now"
|
|
431
|
+
time_offset AS (
|
|
432
|
+
SELECT
|
|
433
|
+
now() - MIN(s.started_at) AS offset_interval
|
|
434
|
+
FROM ${this.tables.jobStepsTable} s
|
|
435
|
+
WHERE s.id IN (SELECT id FROM steps_to_keep)
|
|
436
|
+
),
|
|
437
|
+
-- Shift times of preserved steps to align with current time (only started_at/finished_at, NOT created_at to preserve ordering)
|
|
438
|
+
shift_preserved_times AS (
|
|
439
|
+
UPDATE ${this.tables.jobStepsTable}
|
|
440
|
+
SET
|
|
441
|
+
started_at = started_at + (SELECT offset_interval FROM time_offset),
|
|
442
|
+
finished_at = CASE
|
|
443
|
+
WHEN finished_at IS NOT NULL
|
|
444
|
+
THEN finished_at + (SELECT offset_interval FROM time_offset)
|
|
445
|
+
ELSE NULL
|
|
446
|
+
END,
|
|
447
|
+
updated_at = now()
|
|
448
|
+
WHERE id IN (SELECT id FROM steps_to_keep)
|
|
449
|
+
AND (SELECT offset_interval FROM time_offset) IS NOT NULL
|
|
450
|
+
RETURNING id
|
|
451
|
+
),
|
|
452
|
+
-- Delete steps that are not in the keep list and are not ancestors/target
|
|
453
|
+
deleted_steps AS (
|
|
454
|
+
DELETE FROM ${this.tables.jobStepsTable}
|
|
455
|
+
WHERE job_id = ${jobId}
|
|
456
|
+
AND id NOT IN (SELECT id FROM steps_to_keep)
|
|
457
|
+
AND id NOT IN (SELECT id FROM ancestors)
|
|
458
|
+
AND id != (SELECT id FROM target_step)
|
|
459
|
+
RETURNING id
|
|
460
|
+
),
|
|
461
|
+
-- Reset ancestor steps to active
|
|
462
|
+
reset_ancestors AS (
|
|
463
|
+
UPDATE ${this.tables.jobStepsTable}
|
|
464
|
+
SET
|
|
465
|
+
status = ${STEP_STATUS_ACTIVE},
|
|
466
|
+
output = NULL,
|
|
467
|
+
error = NULL,
|
|
468
|
+
finished_at = NULL,
|
|
469
|
+
started_at = now(),
|
|
470
|
+
expires_at = now() + (timeout_ms || ' milliseconds')::interval,
|
|
471
|
+
retries_count = 0,
|
|
472
|
+
delayed_ms = NULL,
|
|
473
|
+
history_failed_attempts = '{}'::jsonb,
|
|
474
|
+
updated_at = now()
|
|
475
|
+
WHERE id IN (SELECT id FROM ancestors)
|
|
476
|
+
RETURNING id
|
|
477
|
+
),
|
|
478
|
+
-- Reset target step to active
|
|
479
|
+
reset_target AS (
|
|
480
|
+
UPDATE ${this.tables.jobStepsTable}
|
|
481
|
+
SET
|
|
482
|
+
status = ${STEP_STATUS_ACTIVE},
|
|
483
|
+
output = NULL,
|
|
484
|
+
error = NULL,
|
|
485
|
+
finished_at = NULL,
|
|
486
|
+
started_at = now(),
|
|
487
|
+
expires_at = now() + (timeout_ms || ' milliseconds')::interval,
|
|
488
|
+
retries_count = 0,
|
|
489
|
+
delayed_ms = NULL,
|
|
490
|
+
history_failed_attempts = '{}'::jsonb,
|
|
491
|
+
updated_at = now()
|
|
492
|
+
WHERE id = (SELECT id FROM target_step)
|
|
493
|
+
RETURNING id
|
|
494
|
+
),
|
|
495
|
+
-- Reset job to created status
|
|
496
|
+
reset_job AS (
|
|
497
|
+
UPDATE ${this.tables.jobsTable}
|
|
498
|
+
SET
|
|
499
|
+
status = ${JOB_STATUS_CREATED},
|
|
500
|
+
output = NULL,
|
|
501
|
+
error = NULL,
|
|
502
|
+
started_at = NULL,
|
|
503
|
+
finished_at = NULL,
|
|
504
|
+
client_id = NULL,
|
|
505
|
+
expires_at = NULL,
|
|
506
|
+
updated_at = now()
|
|
507
|
+
WHERE id = ${jobId}
|
|
508
|
+
AND EXISTS (SELECT 1 FROM target_step)
|
|
509
|
+
RETURNING id
|
|
510
|
+
)
|
|
511
|
+
SELECT EXISTS(SELECT 1 FROM reset_job) AS success
|
|
512
|
+
`),
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
return result.length > 0 && result[0]!.success === true
|
|
516
|
+
}
|
|
517
|
+
|
|
319
518
|
/**
|
|
320
519
|
* Internal method to delete a job by its ID.
|
|
321
520
|
* Active jobs cannot be deleted.
|
|
@@ -449,7 +648,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
449
648
|
SET status = ${JOB_STATUS_ACTIVE},
|
|
450
649
|
started_at = now(),
|
|
451
650
|
expires_at = now() + (timeout_ms || ' milliseconds')::interval,
|
|
452
|
-
client_id = ${this.id}
|
|
651
|
+
client_id = ${this.id},
|
|
652
|
+
updated_at = now()
|
|
453
653
|
FROM verify_concurrency vc
|
|
454
654
|
WHERE j.id = vc.id
|
|
455
655
|
AND vc.current_active < vc.concurrency_limit -- Final concurrency check using job's concurrency limit
|
|
@@ -534,7 +734,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
534
734
|
expires_at = NULL,
|
|
535
735
|
finished_at = NULL,
|
|
536
736
|
output = NULL,
|
|
537
|
-
error = NULL
|
|
737
|
+
error = NULL,
|
|
738
|
+
updated_at = now()
|
|
538
739
|
WHERE EXISTS (SELECT 1 FROM locked_jobs lj WHERE lj.id = j.id)
|
|
539
740
|
RETURNING id, checksum
|
|
540
741
|
),
|
|
@@ -570,6 +771,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
570
771
|
name,
|
|
571
772
|
timeoutMs,
|
|
572
773
|
retriesLimit,
|
|
774
|
+
parentStepId,
|
|
775
|
+
parallel = false,
|
|
573
776
|
}: CreateOrRecoverJobStepOptions): Promise<CreateOrRecoverJobStepResult | null> {
|
|
574
777
|
type StepResult = CreateOrRecoverJobStepResult
|
|
575
778
|
|
|
@@ -591,6 +794,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
591
794
|
upserted_step AS (
|
|
592
795
|
INSERT INTO ${this.tables.jobStepsTable} (
|
|
593
796
|
job_id,
|
|
797
|
+
parent_step_id,
|
|
798
|
+
branch,
|
|
594
799
|
name,
|
|
595
800
|
timeout_ms,
|
|
596
801
|
retries_limit,
|
|
@@ -602,6 +807,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
602
807
|
)
|
|
603
808
|
SELECT
|
|
604
809
|
${jobId},
|
|
810
|
+
${parentStepId},
|
|
811
|
+
${parallel},
|
|
605
812
|
${name},
|
|
606
813
|
${timeoutMs},
|
|
607
814
|
${retriesLimit},
|
|
@@ -679,6 +886,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
679
886
|
status: STEP_STATUS_COMPLETED,
|
|
680
887
|
output,
|
|
681
888
|
finished_at: sql`now()`,
|
|
889
|
+
updated_at: sql`now()`,
|
|
682
890
|
})
|
|
683
891
|
.from(this.tables.jobsTable)
|
|
684
892
|
.where(
|
|
@@ -707,6 +915,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
707
915
|
status: STEP_STATUS_FAILED,
|
|
708
916
|
error,
|
|
709
917
|
finished_at: sql`now()`,
|
|
918
|
+
updated_at: sql`now()`,
|
|
710
919
|
})
|
|
711
920
|
.from(this.tables.jobsTable)
|
|
712
921
|
.where(
|
|
@@ -745,6 +954,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
745
954
|
'delayedMs', ${delayMs}::integer
|
|
746
955
|
)
|
|
747
956
|
)`,
|
|
957
|
+
updated_at: sql`now()`,
|
|
748
958
|
})
|
|
749
959
|
.from(jobsTable)
|
|
750
960
|
.where(
|
|
@@ -771,6 +981,7 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
771
981
|
.set({
|
|
772
982
|
status: STEP_STATUS_CANCELLED,
|
|
773
983
|
finished_at: sql`now()`,
|
|
984
|
+
updated_at: sql`now()`,
|
|
774
985
|
})
|
|
775
986
|
.from(this.tables.jobsTable)
|
|
776
987
|
.where(
|
|
@@ -823,12 +1034,12 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
823
1034
|
}
|
|
824
1035
|
|
|
825
1036
|
/**
|
|
826
|
-
* Internal method to get steps for a job with
|
|
1037
|
+
* Internal method to get all steps for a job with optional fuzzy search.
|
|
827
1038
|
* Steps are always ordered by created_at ASC.
|
|
828
1039
|
* Steps do not include output data.
|
|
829
1040
|
*/
|
|
830
1041
|
protected async _getJobSteps(options: GetJobStepsOptions): Promise<GetJobStepsResult> {
|
|
831
|
-
const { jobId,
|
|
1042
|
+
const { jobId, search } = options
|
|
832
1043
|
|
|
833
1044
|
const jobStepsTable = this.tables.jobStepsTable
|
|
834
1045
|
|
|
@@ -847,22 +1058,12 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
847
1058
|
: undefined,
|
|
848
1059
|
)
|
|
849
1060
|
|
|
850
|
-
// Get total count
|
|
851
|
-
const total = await this.db.$count(jobStepsTable, where)
|
|
852
|
-
|
|
853
|
-
if (!total) {
|
|
854
|
-
return {
|
|
855
|
-
steps: [],
|
|
856
|
-
total: 0,
|
|
857
|
-
page,
|
|
858
|
-
pageSize,
|
|
859
|
-
}
|
|
860
|
-
}
|
|
861
|
-
|
|
862
1061
|
const steps = await this.db
|
|
863
1062
|
.select({
|
|
864
1063
|
id: jobStepsTable.id,
|
|
865
1064
|
jobId: jobStepsTable.job_id,
|
|
1065
|
+
parentStepId: jobStepsTable.parent_step_id,
|
|
1066
|
+
parallel: jobStepsTable.parallel,
|
|
866
1067
|
name: jobStepsTable.name,
|
|
867
1068
|
status: jobStepsTable.status,
|
|
868
1069
|
error: jobStepsTable.error,
|
|
@@ -880,14 +1081,10 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
880
1081
|
.from(jobStepsTable)
|
|
881
1082
|
.where(where)
|
|
882
1083
|
.orderBy(asc(jobStepsTable.created_at))
|
|
883
|
-
.limit(pageSize)
|
|
884
|
-
.offset((page - 1) * pageSize)
|
|
885
1084
|
|
|
886
1085
|
return {
|
|
887
1086
|
steps,
|
|
888
|
-
total,
|
|
889
|
-
page,
|
|
890
|
-
pageSize,
|
|
1087
|
+
total: steps.length,
|
|
891
1088
|
}
|
|
892
1089
|
}
|
|
893
1090
|
|
|
@@ -1054,6 +1251,8 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
1054
1251
|
.select({
|
|
1055
1252
|
id: this.tables.jobStepsTable.id,
|
|
1056
1253
|
jobId: this.tables.jobStepsTable.job_id,
|
|
1254
|
+
parentStepId: this.tables.jobStepsTable.parent_step_id,
|
|
1255
|
+
parallel: this.tables.jobStepsTable.parallel,
|
|
1057
1256
|
name: this.tables.jobStepsTable.name,
|
|
1058
1257
|
output: this.tables.jobStepsTable.output,
|
|
1059
1258
|
status: this.tables.jobStepsTable.status,
|
|
@@ -1155,6 +1354,126 @@ export class PostgresBaseAdapter<Database extends DrizzleDatabase, Connection> e
|
|
|
1155
1354
|
}
|
|
1156
1355
|
}
|
|
1157
1356
|
|
|
1357
|
+
// ============================================================================
|
|
1358
|
+
// Metrics Methods
|
|
1359
|
+
// ============================================================================
|
|
1360
|
+
|
|
1361
|
+
/**
|
|
1362
|
+
* Internal method to insert multiple metric records in a single batch.
|
|
1363
|
+
*/
|
|
1364
|
+
protected async _insertMetrics(metrics: InsertMetricOptions[]): Promise<number> {
|
|
1365
|
+
if (metrics.length === 0) {
|
|
1366
|
+
return 0
|
|
1367
|
+
}
|
|
1368
|
+
|
|
1369
|
+
const values = metrics.map((m) => ({
|
|
1370
|
+
job_id: m.jobId,
|
|
1371
|
+
step_id: m.stepId ?? null,
|
|
1372
|
+
name: m.name,
|
|
1373
|
+
value: m.value,
|
|
1374
|
+
attributes: m.attributes ?? {},
|
|
1375
|
+
type: m.type,
|
|
1376
|
+
}))
|
|
1377
|
+
|
|
1378
|
+
const result = await this.db
|
|
1379
|
+
.insert(this.tables.metricsTable)
|
|
1380
|
+
.values(values)
|
|
1381
|
+
.returning({ id: this.tables.metricsTable.id })
|
|
1382
|
+
|
|
1383
|
+
return result.length
|
|
1384
|
+
}
|
|
1385
|
+
|
|
1386
|
+
/**
|
|
1387
|
+
* Internal method to get metrics for a job or step.
|
|
1388
|
+
*/
|
|
1389
|
+
protected async _getMetrics(options: GetMetricsOptions): Promise<GetMetricsResult> {
|
|
1390
|
+
const metricsTable = this.tables.metricsTable
|
|
1391
|
+
const filters = options.filters ?? {}
|
|
1392
|
+
|
|
1393
|
+
// Build WHERE clause
|
|
1394
|
+
const where = this._buildMetricsWhereClause(options.jobId, options.stepId, filters)
|
|
1395
|
+
|
|
1396
|
+
// Build sort
|
|
1397
|
+
const sortInput = options.sort ?? { field: 'timestamp', order: 'desc' }
|
|
1398
|
+
const sortFieldMap: Record<MetricSort['field'], any> = {
|
|
1399
|
+
name: metricsTable.name,
|
|
1400
|
+
value: metricsTable.value,
|
|
1401
|
+
timestamp: metricsTable.timestamp,
|
|
1402
|
+
createdAt: metricsTable.created_at,
|
|
1403
|
+
}
|
|
1404
|
+
|
|
1405
|
+
// Get total count
|
|
1406
|
+
const total = await this.db.$count(metricsTable, where)
|
|
1407
|
+
if (!total) {
|
|
1408
|
+
return {
|
|
1409
|
+
metrics: [],
|
|
1410
|
+
total: 0,
|
|
1411
|
+
}
|
|
1412
|
+
}
|
|
1413
|
+
|
|
1414
|
+
const sortField = sortFieldMap[sortInput.field]
|
|
1415
|
+
const orderByClause = sortInput.order === 'asc' ? asc(sortField) : desc(sortField)
|
|
1416
|
+
|
|
1417
|
+
const metrics = await this.db
|
|
1418
|
+
.select({
|
|
1419
|
+
id: metricsTable.id,
|
|
1420
|
+
jobId: metricsTable.job_id,
|
|
1421
|
+
stepId: metricsTable.step_id,
|
|
1422
|
+
name: metricsTable.name,
|
|
1423
|
+
value: metricsTable.value,
|
|
1424
|
+
attributes: metricsTable.attributes,
|
|
1425
|
+
type: metricsTable.type,
|
|
1426
|
+
timestamp: metricsTable.timestamp,
|
|
1427
|
+
createdAt: metricsTable.created_at,
|
|
1428
|
+
})
|
|
1429
|
+
.from(metricsTable)
|
|
1430
|
+
.where(where)
|
|
1431
|
+
.orderBy(orderByClause)
|
|
1432
|
+
|
|
1433
|
+
return {
|
|
1434
|
+
metrics,
|
|
1435
|
+
total,
|
|
1436
|
+
}
|
|
1437
|
+
}
|
|
1438
|
+
|
|
1439
|
+
/**
|
|
1440
|
+
* Internal method to delete all metrics for a job.
|
|
1441
|
+
*/
|
|
1442
|
+
protected async _deleteMetrics(options: DeleteMetricsOptions): Promise<number> {
|
|
1443
|
+
const result = await this.db
|
|
1444
|
+
.delete(this.tables.metricsTable)
|
|
1445
|
+
.where(eq(this.tables.metricsTable.job_id, options.jobId))
|
|
1446
|
+
.returning({ id: this.tables.metricsTable.id })
|
|
1447
|
+
|
|
1448
|
+
return result.length
|
|
1449
|
+
}
|
|
1450
|
+
|
|
1451
|
+
/**
|
|
1452
|
+
* Build WHERE clause for metrics queries.
|
|
1453
|
+
*/
|
|
1454
|
+
protected _buildMetricsWhereClause(jobId?: string, stepId?: string, filters?: GetMetricsOptions['filters']) {
|
|
1455
|
+
const metricsTable = this.tables.metricsTable
|
|
1456
|
+
|
|
1457
|
+
return and(
|
|
1458
|
+
jobId ? eq(metricsTable.job_id, jobId) : undefined,
|
|
1459
|
+
stepId ? eq(metricsTable.step_id, stepId) : undefined,
|
|
1460
|
+
filters?.name
|
|
1461
|
+
? Array.isArray(filters.name)
|
|
1462
|
+
? or(...filters.name.map((n) => ilike(metricsTable.name, `%${n}%`)))
|
|
1463
|
+
: ilike(metricsTable.name, `%${filters.name}%`)
|
|
1464
|
+
: undefined,
|
|
1465
|
+
filters?.type
|
|
1466
|
+
? inArray(metricsTable.type, Array.isArray(filters.type) ? filters.type : [filters.type])
|
|
1467
|
+
: undefined,
|
|
1468
|
+
filters?.timestampRange && filters.timestampRange.length === 2
|
|
1469
|
+
? between(metricsTable.timestamp, filters.timestampRange[0]!, filters.timestampRange[1]!)
|
|
1470
|
+
: undefined,
|
|
1471
|
+
...(filters?.attributesFilter && Object.keys(filters.attributesFilter).length > 0
|
|
1472
|
+
? this.#buildJsonbWhereConditions(filters.attributesFilter, metricsTable.attributes)
|
|
1473
|
+
: []),
|
|
1474
|
+
)
|
|
1475
|
+
}
|
|
1476
|
+
|
|
1158
1477
|
// ============================================================================
|
|
1159
1478
|
// Private Methods
|
|
1160
1479
|
// ============================================================================
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import createSchema from './schema.js'
|
|
2
2
|
|
|
3
|
-
const { schema, jobsTable, jobStepsTable } = createSchema('duron')
|
|
3
|
+
const { schema, jobsTable, jobStepsTable, metricsTable } = createSchema('duron')
|
|
4
4
|
|
|
5
|
-
export { schema, jobsTable, jobStepsTable }
|
|
5
|
+
export { schema, jobsTable, jobStepsTable, metricsTable }
|
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
import { sql } from 'drizzle-orm'
|
|
2
|
-
import {
|
|
2
|
+
import {
|
|
3
|
+
boolean,
|
|
4
|
+
check,
|
|
5
|
+
doublePrecision,
|
|
6
|
+
index,
|
|
7
|
+
integer,
|
|
8
|
+
jsonb,
|
|
9
|
+
pgSchema,
|
|
10
|
+
text,
|
|
11
|
+
timestamp,
|
|
12
|
+
unique,
|
|
13
|
+
uuid,
|
|
14
|
+
} from 'drizzle-orm/pg-core'
|
|
3
15
|
|
|
4
16
|
import { JOB_STATUSES, type JobStatus, STEP_STATUS_ACTIVE, STEP_STATUSES, type StepStatus } from '../../constants.js'
|
|
5
17
|
import type { SerializableError } from '../../errors.js'
|
|
@@ -66,6 +78,8 @@ export default function createSchema(schemaName: string) {
|
|
|
66
78
|
job_id: uuid('job_id')
|
|
67
79
|
.notNull()
|
|
68
80
|
.references(() => jobsTable.id, { onDelete: 'cascade' }),
|
|
81
|
+
parent_step_id: uuid('parent_step_id'),
|
|
82
|
+
parallel: boolean('branch').notNull().default(false), // DB column is 'branch', TypeScript uses 'parallel'
|
|
69
83
|
name: text('name').notNull(),
|
|
70
84
|
status: text('status').$type<StepStatus>().notNull().default(STEP_STATUS_ACTIVE),
|
|
71
85
|
output: jsonb('output'),
|
|
@@ -98,6 +112,7 @@ export default function createSchema(schemaName: string) {
|
|
|
98
112
|
index('idx_job_steps_status').on(table.status),
|
|
99
113
|
index('idx_job_steps_name').on(table.name),
|
|
100
114
|
index('idx_job_steps_expires_at').on(table.expires_at),
|
|
115
|
+
index('idx_job_steps_parent_step_id').on(table.parent_step_id),
|
|
101
116
|
// Composite indexes
|
|
102
117
|
index('idx_job_steps_job_status').on(table.job_id, table.status),
|
|
103
118
|
index('idx_job_steps_job_name').on(table.job_id, table.name),
|
|
@@ -111,9 +126,42 @@ export default function createSchema(schemaName: string) {
|
|
|
111
126
|
],
|
|
112
127
|
)
|
|
113
128
|
|
|
129
|
+
const metricsTable = schema.table(
|
|
130
|
+
'metrics',
|
|
131
|
+
{
|
|
132
|
+
id: uuid('id').primaryKey().defaultRandom(),
|
|
133
|
+
job_id: uuid('job_id')
|
|
134
|
+
.notNull()
|
|
135
|
+
.references(() => jobsTable.id, { onDelete: 'cascade' }),
|
|
136
|
+
step_id: uuid('step_id').references(() => jobStepsTable.id, { onDelete: 'cascade' }),
|
|
137
|
+
name: text('name').notNull(),
|
|
138
|
+
value: doublePrecision('value').notNull(),
|
|
139
|
+
attributes: jsonb('attributes').$type<Record<string, any>>().notNull().default({}),
|
|
140
|
+
type: text('type').$type<'metric' | 'span_event' | 'span_attribute'>().notNull(),
|
|
141
|
+
timestamp: timestamp('timestamp', { withTimezone: true }).notNull().defaultNow(),
|
|
142
|
+
created_at: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
|
|
143
|
+
},
|
|
144
|
+
(table) => [
|
|
145
|
+
// Single column indexes
|
|
146
|
+
index('idx_metrics_job_id').on(table.job_id),
|
|
147
|
+
index('idx_metrics_step_id').on(table.step_id),
|
|
148
|
+
index('idx_metrics_name').on(table.name),
|
|
149
|
+
index('idx_metrics_type').on(table.type),
|
|
150
|
+
index('idx_metrics_timestamp').on(table.timestamp),
|
|
151
|
+
// Composite indexes
|
|
152
|
+
index('idx_metrics_job_step').on(table.job_id, table.step_id),
|
|
153
|
+
index('idx_metrics_job_name').on(table.job_id, table.name),
|
|
154
|
+
index('idx_metrics_job_type').on(table.job_id, table.type),
|
|
155
|
+
// GIN index for JSONB attributes filtering
|
|
156
|
+
index('idx_metrics_attributes').using('gin', table.attributes),
|
|
157
|
+
check('metrics_type_check', sql`${table.type} IN ('metric', 'span_event', 'span_attribute')`),
|
|
158
|
+
],
|
|
159
|
+
)
|
|
160
|
+
|
|
114
161
|
return {
|
|
115
162
|
schema,
|
|
116
163
|
jobsTable,
|
|
117
164
|
jobStepsTable,
|
|
165
|
+
metricsTable,
|
|
118
166
|
}
|
|
119
167
|
}
|