@coji/durably 0.11.0 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -69,15 +69,6 @@ interface RunDeleteEvent extends BaseEvent {
69
69
  jobName: string;
70
70
  labels: Record<string, string>;
71
71
  }
72
- /**
73
- * Run retry event (emitted when a failed run is retried)
74
- */
75
- interface RunRetryEvent extends BaseEvent {
76
- type: 'run:retry';
77
- runId: string;
78
- jobName: string;
79
- labels: Record<string, string>;
80
- }
81
72
  /**
82
73
  * Progress data reported by step.progress()
83
74
  */
@@ -172,7 +163,7 @@ interface WorkerErrorEvent extends BaseEvent {
172
163
  /**
173
164
  * All event types as discriminated union
174
165
  */
175
- type DurablyEvent = RunTriggerEvent | RunStartEvent | RunCompleteEvent | RunFailEvent | RunCancelEvent | RunDeleteEvent | RunRetryEvent | RunProgressEvent | StepStartEvent | StepCompleteEvent | StepFailEvent | StepCancelEvent | LogWriteEvent | WorkerErrorEvent;
166
+ type DurablyEvent = RunTriggerEvent | RunStartEvent | RunCompleteEvent | RunFailEvent | RunCancelEvent | RunDeleteEvent | RunProgressEvent | StepStartEvent | StepCompleteEvent | StepFailEvent | StepCancelEvent | LogWriteEvent | WorkerErrorEvent;
176
167
  /**
177
168
  * Event types for type-safe event names
178
169
  */
@@ -190,7 +181,7 @@ type EventInput<T extends EventType> = Omit<EventByType<T>, 'timestamp' | 'seque
190
181
  /**
191
182
  * All possible event inputs as a union (properly distributed)
192
183
  */
193
- type AnyEventInput = EventInput<'run:trigger'> | EventInput<'run:start'> | EventInput<'run:complete'> | EventInput<'run:fail'> | EventInput<'run:cancel'> | EventInput<'run:delete'> | EventInput<'run:retry'> | EventInput<'run:progress'> | EventInput<'step:start'> | EventInput<'step:complete'> | EventInput<'step:fail'> | EventInput<'step:cancel'> | EventInput<'log:write'> | EventInput<'worker:error'>;
184
+ type AnyEventInput = EventInput<'run:trigger'> | EventInput<'run:start'> | EventInput<'run:complete'> | EventInput<'run:fail'> | EventInput<'run:cancel'> | EventInput<'run:delete'> | EventInput<'run:progress'> | EventInput<'step:start'> | EventInput<'step:complete'> | EventInput<'step:fail'> | EventInput<'step:cancel'> | EventInput<'log:write'> | EventInput<'worker:error'>;
194
185
  /**
195
186
  * Event listener function
196
187
  */
@@ -394,6 +385,7 @@ interface Storage {
394
385
  getRuns<T extends Run = Run>(filter?: RunFilter): Promise<T[]>;
395
386
  claimNextPendingRun(excludeConcurrencyKeys: string[]): Promise<Run | null>;
396
387
  createStep(input: CreateStepInput): Promise<Step>;
388
+ deleteSteps(runId: string): Promise<void>;
397
389
  getSteps(runId: string): Promise<Step[]>;
398
390
  getCompletedStep(runId: string, name: string): Promise<Step | null>;
399
391
  createLog(input: CreateLogInput): Promise<Log>;
@@ -561,6 +553,7 @@ interface DurablyOptions<TLabels extends Record<string, string> = Record<string,
561
553
  pollingInterval?: number;
562
554
  heartbeatInterval?: number;
563
555
  staleThreshold?: number;
556
+ cleanupSteps?: boolean;
564
557
  /**
565
558
  * Zod schema for labels. When provided:
566
559
  * - Labels are type-checked at compile time
@@ -661,10 +654,10 @@ interface Durably<TJobs extends Record<string, JobHandle<string, unknown, unknow
661
654
  */
662
655
  stop(): Promise<void>;
663
656
  /**
664
- * Retry a failed run by resetting it to pending
665
- * @throws Error if run is not in failed status
657
+ * Create a fresh run from a completed, failed, or cancelled run
658
+ * @throws Error if run is pending, running, or does not exist
666
659
  */
667
- retry(runId: string): Promise<void>;
660
+ retrigger(runId: string): Promise<Run<TLabels>>;
668
661
  /**
669
662
  * Cancel a pending or running run
670
663
  * @throws Error if run is already completed, failed, or cancelled
@@ -729,4 +722,4 @@ declare function createDurably<TLabels extends Record<string, string> = Record<s
729
722
  */
730
723
  declare function withLogPersistence(): DurablyPlugin;
731
724
 
732
- export { type StepStartEvent as A, type BatchTriggerInput as B, type ClientRun as C, type Durably as D, type ErrorHandler as E, type StepsTable as F, type TriggerAndWaitResult as G, type TriggerOptions as H, createDurably as I, type JobDefinition as J, defineJob as K, type Log as L, toClientRun as M, withLogPersistence as N, type ProgressData as P, type Run as R, type SchemaVersionsTable as S, type TriggerAndWaitOptions as T, type WorkerErrorEvent as W, type RunFilter as a, type Database as b, type DurablyEvent as c, type DurablyOptions as d, type DurablyPlugin as e, type EventType as f, type JobHandle as g, type JobInput as h, type JobOutput as i, type LogData as j, type LogWriteEvent as k, type LogsTable as l, type RunCancelEvent as m, type RunCompleteEvent as n, type RunDeleteEvent as o, type RunFailEvent as p, type RunProgressEvent as q, type RunRetryEvent as r, type RunStartEvent as s, type RunTriggerEvent as t, type RunsTable as u, type Step as v, type StepCancelEvent as w, type StepCompleteEvent as x, type StepContext as y, type StepFailEvent as z };
725
+ export { type StepsTable as A, type BatchTriggerInput as B, type ClientRun as C, type Durably as D, type ErrorHandler as E, type TriggerAndWaitResult as F, type TriggerOptions as G, createDurably as H, defineJob as I, type JobDefinition as J, toClientRun as K, type Log as L, withLogPersistence as M, type ProgressData as P, type Run as R, type SchemaVersionsTable as S, type TriggerAndWaitOptions as T, type WorkerErrorEvent as W, type RunFilter as a, type Database as b, type DurablyEvent as c, type DurablyOptions as d, type DurablyPlugin as e, type EventType as f, type JobHandle as g, type JobInput as h, type JobOutput as i, type LogData as j, type LogWriteEvent as k, type LogsTable as l, type RunCancelEvent as m, type RunCompleteEvent as n, type RunDeleteEvent as o, type RunFailEvent as p, type RunProgressEvent as q, type RunStartEvent as r, type RunTriggerEvent as s, type RunsTable as t, type Step as u, type StepCancelEvent as v, type StepCompleteEvent as w, type StepContext as x, type StepFailEvent as y, type StepStartEvent as z };
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { R as Run, a as RunFilter, D as Durably } from './index-fppJjkF-.js';
2
- export { B as BatchTriggerInput, C as ClientRun, b as Database, c as DurablyEvent, d as DurablyOptions, e as DurablyPlugin, E as ErrorHandler, f as EventType, J as JobDefinition, g as JobHandle, h as JobInput, i as JobOutput, L as Log, j as LogData, k as LogWriteEvent, l as LogsTable, P as ProgressData, m as RunCancelEvent, n as RunCompleteEvent, o as RunDeleteEvent, p as RunFailEvent, q as RunProgressEvent, r as RunRetryEvent, s as RunStartEvent, t as RunTriggerEvent, u as RunsTable, S as SchemaVersionsTable, v as Step, w as StepCancelEvent, x as StepCompleteEvent, y as StepContext, z as StepFailEvent, A as StepStartEvent, F as StepsTable, T as TriggerAndWaitOptions, G as TriggerAndWaitResult, H as TriggerOptions, W as WorkerErrorEvent, I as createDurably, K as defineJob, M as toClientRun, N as withLogPersistence } from './index-fppJjkF-.js';
1
+ import { R as Run, a as RunFilter, D as Durably } from './index-hM7-oiyj.js';
2
+ export { B as BatchTriggerInput, C as ClientRun, b as Database, c as DurablyEvent, d as DurablyOptions, e as DurablyPlugin, E as ErrorHandler, f as EventType, J as JobDefinition, g as JobHandle, h as JobInput, i as JobOutput, L as Log, j as LogData, k as LogWriteEvent, l as LogsTable, P as ProgressData, m as RunCancelEvent, n as RunCompleteEvent, o as RunDeleteEvent, p as RunFailEvent, q as RunProgressEvent, r as RunStartEvent, s as RunTriggerEvent, t as RunsTable, S as SchemaVersionsTable, u as Step, v as StepCancelEvent, w as StepCompleteEvent, x as StepContext, y as StepFailEvent, z as StepStartEvent, A as StepsTable, T as TriggerAndWaitOptions, F as TriggerAndWaitResult, G as TriggerOptions, W as WorkerErrorEvent, H as createDurably, I as defineJob, K as toClientRun, M as withLogPersistence } from './index-hM7-oiyj.js';
3
3
  import 'kysely';
4
4
  import 'zod';
5
5
 
@@ -15,7 +15,7 @@ declare class CancelledError extends Error {
15
15
  /**
16
16
  * Run operation types for onRunAccess
17
17
  */
18
- type RunOperation = 'read' | 'subscribe' | 'steps' | 'retry' | 'cancel' | 'delete';
18
+ type RunOperation = 'read' | 'subscribe' | 'steps' | 'retrigger' | 'cancel' | 'delete';
19
19
  /**
20
20
  * Subscription filter — only fields that SSE subscriptions actually support.
21
21
  */
@@ -70,7 +70,7 @@ interface DurablyHandler {
70
70
  * - GET {basePath}/run?runId=xxx - Get single run
71
71
  * - GET {basePath}/steps?runId=xxx - Get steps
72
72
  * - POST {basePath}/trigger - Trigger a job
73
- * - POST {basePath}/retry?runId=xxx - Retry a failed run
73
+ * - POST {basePath}/retrigger?runId=xxx - Create a fresh run from a terminal run
74
74
  * - POST {basePath}/cancel?runId=xxx - Cancel a run
75
75
  * - DELETE {basePath}/run?runId=xxx - Delete a run
76
76
  */
package/dist/index.js CHANGED
@@ -578,6 +578,9 @@ function createKyselyStorage(db) {
578
578
  await db.insertInto("durably_steps").values(step).execute();
579
579
  return rowToStep(step);
580
580
  },
581
+ async deleteSteps(runId) {
582
+ await db.deleteFrom("durably_steps").where("run_id", "=", runId).execute();
583
+ },
581
584
  async getSteps(runId) {
582
585
  const rows = await db.selectFrom("durably_steps").selectAll().where("run_id", "=", runId).orderBy("index", "asc").execute();
583
586
  return rows.map(rowToStep);
@@ -877,6 +880,12 @@ function createWorker(config, storage, eventEmitter, jobRegistry) {
877
880
  } catch (error) {
878
881
  await handleRunFailure(run.id, run.jobName, error);
879
882
  } finally {
883
+ if (config.cleanupSteps) {
884
+ try {
885
+ await storage.deleteSteps(run.id);
886
+ } catch {
887
+ }
888
+ }
880
889
  dispose();
881
890
  if (heartbeatInterval) {
882
891
  clearInterval(heartbeatInterval);
@@ -963,10 +972,18 @@ function createWorker(config, storage, eventEmitter, jobRegistry) {
963
972
  var DEFAULTS = {
964
973
  pollingInterval: 1e3,
965
974
  heartbeatInterval: 5e3,
966
- staleThreshold: 3e4
975
+ staleThreshold: 3e4,
976
+ cleanupSteps: true
967
977
  };
968
978
  function createDurablyInstance(state, jobs) {
969
979
  const { db, storage, eventEmitter, jobRegistry, worker } = state;
980
+ async function getRunOrThrow(runId) {
981
+ const run = await storage.getRun(runId);
982
+ if (!run) {
983
+ throw new Error(`Run not found: ${runId}`);
984
+ }
985
+ return run;
986
+ }
970
987
  const durably = {
971
988
  db,
972
989
  storage,
@@ -1018,7 +1035,6 @@ function createDurablyInstance(state, jobs) {
1018
1035
  "run:fail",
1019
1036
  "run:cancel",
1020
1037
  "run:delete",
1021
- "run:retry",
1022
1038
  "run:progress",
1023
1039
  "step:start",
1024
1040
  "step:complete",
@@ -1050,36 +1066,34 @@ function createDurablyInstance(state, jobs) {
1050
1066
  }
1051
1067
  });
1052
1068
  },
1053
- async retry(runId) {
1054
- const run = await storage.getRun(runId);
1055
- if (!run) {
1056
- throw new Error(`Run not found: ${runId}`);
1057
- }
1058
- if (run.status === "completed") {
1059
- throw new Error(`Cannot retry completed run: ${runId}`);
1060
- }
1069
+ async retrigger(runId) {
1070
+ const run = await getRunOrThrow(runId);
1061
1071
  if (run.status === "pending") {
1062
- throw new Error(`Cannot retry pending run: ${runId}`);
1072
+ throw new Error(`Cannot retrigger pending run: ${runId}`);
1063
1073
  }
1064
1074
  if (run.status === "running") {
1065
- throw new Error(`Cannot retry running run: ${runId}`);
1075
+ throw new Error(`Cannot retrigger running run: ${runId}`);
1066
1076
  }
1067
- await storage.updateRun(runId, {
1068
- status: "pending",
1069
- error: null
1077
+ if (!jobRegistry.get(run.jobName)) {
1078
+ throw new Error(`Unknown job: ${run.jobName}`);
1079
+ }
1080
+ const nextRun = await storage.createRun({
1081
+ jobName: run.jobName,
1082
+ input: run.input,
1083
+ concurrencyKey: run.concurrencyKey ?? void 0,
1084
+ labels: run.labels
1070
1085
  });
1071
1086
  eventEmitter.emit({
1072
- type: "run:retry",
1073
- runId,
1087
+ type: "run:trigger",
1088
+ runId: nextRun.id,
1074
1089
  jobName: run.jobName,
1090
+ input: run.input,
1075
1091
  labels: run.labels
1076
1092
  });
1093
+ return nextRun;
1077
1094
  },
1078
1095
  async cancel(runId) {
1079
- const run = await storage.getRun(runId);
1080
- if (!run) {
1081
- throw new Error(`Run not found: ${runId}`);
1082
- }
1096
+ const run = await getRunOrThrow(runId);
1083
1097
  if (run.status === "completed") {
1084
1098
  throw new Error(`Cannot cancel completed run: ${runId}`);
1085
1099
  }
@@ -1089,9 +1103,14 @@ function createDurablyInstance(state, jobs) {
1089
1103
  if (run.status === "cancelled") {
1090
1104
  throw new Error(`Cannot cancel already cancelled run: ${runId}`);
1091
1105
  }
1106
+ const wasPending = run.status === "pending";
1092
1107
  await storage.updateRun(runId, {
1093
- status: "cancelled"
1108
+ status: "cancelled",
1109
+ completedAt: (/* @__PURE__ */ new Date()).toISOString()
1094
1110
  });
1111
+ if (wasPending && state.cleanupSteps) {
1112
+ await storage.deleteSteps(runId);
1113
+ }
1095
1114
  eventEmitter.emit({
1096
1115
  type: "run:cancel",
1097
1116
  runId,
@@ -1100,10 +1119,7 @@ function createDurablyInstance(state, jobs) {
1100
1119
  });
1101
1120
  },
1102
1121
  async deleteRun(runId) {
1103
- const run = await storage.getRun(runId);
1104
- if (!run) {
1105
- throw new Error(`Run not found: ${runId}`);
1106
- }
1122
+ const run = await getRunOrThrow(runId);
1107
1123
  if (run.status === "pending") {
1108
1124
  throw new Error(`Cannot delete pending run: ${runId}`);
1109
1125
  }
@@ -1143,7 +1159,8 @@ function createDurably(options) {
1143
1159
  const config = {
1144
1160
  pollingInterval: options.pollingInterval ?? DEFAULTS.pollingInterval,
1145
1161
  heartbeatInterval: options.heartbeatInterval ?? DEFAULTS.heartbeatInterval,
1146
- staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold
1162
+ staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold,
1163
+ cleanupSteps: options.cleanupSteps ?? DEFAULTS.cleanupSteps
1147
1164
  };
1148
1165
  const db = new Kysely({ dialect: options.dialect });
1149
1166
  const storage = createKyselyStorage(db);
@@ -1157,6 +1174,7 @@ function createDurably(options) {
1157
1174
  jobRegistry,
1158
1175
  worker,
1159
1176
  labelsSchema: options.labels,
1177
+ cleanupSteps: config.cleanupSteps,
1160
1178
  migrating: null,
1161
1179
  migrated: false
1162
1180
  };
@@ -1564,12 +1582,12 @@ function createDurablyHandler(durably, options) {
1564
1582
  return jsonResponse(steps);
1565
1583
  });
1566
1584
  }
1567
- async function handleRetry(url, ctx) {
1585
+ async function handleRetrigger(url, ctx) {
1568
1586
  return withErrorHandling(async () => {
1569
- const result = await requireRunAccess(url, ctx, "retry");
1587
+ const result = await requireRunAccess(url, ctx, "retrigger");
1570
1588
  if (result instanceof Response) return result;
1571
- await durably.retry(result.runId);
1572
- return successResponse();
1589
+ const run = await durably.retrigger(result.runId);
1590
+ return jsonResponse({ success: true, runId: run.id });
1573
1591
  });
1574
1592
  }
1575
1593
  async function handleCancel(url, ctx) {
@@ -1688,16 +1706,6 @@ function createDurablyHandler(durably, options) {
1688
1706
  });
1689
1707
  }
1690
1708
  }),
1691
- durably.on("run:retry", (event) => {
1692
- if (matchesFilter(event.jobName, event.labels)) {
1693
- ctrl.enqueue({
1694
- type: "run:retry",
1695
- runId: event.runId,
1696
- jobName: event.jobName,
1697
- labels: event.labels
1698
- });
1699
- }
1700
- }),
1701
1709
  durably.on("run:progress", (event) => {
1702
1710
  if (matchesFilter(event.jobName, event.labels)) {
1703
1711
  ctrl.enqueue({
@@ -1801,7 +1809,7 @@ function createDurablyHandler(durably, options) {
1801
1809
  }
1802
1810
  if (method === "POST") {
1803
1811
  if (path === "/trigger") return await handleTrigger(request, ctx);
1804
- if (path === "/retry") return await handleRetry(url, ctx);
1812
+ if (path === "/retrigger") return await handleRetrigger(url, ctx);
1805
1813
  if (path === "/cancel") return await handleCancel(url, ctx);
1806
1814
  }
1807
1815
  if (method === "DELETE") {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/durably.ts","../src/events.ts","../src/job.ts","../src/migrations.ts","../src/storage.ts","../src/worker.ts","../src/errors.ts","../src/context.ts","../src/define-job.ts","../src/http.ts","../src/sse.ts","../src/server.ts"],"sourcesContent":["import type { Dialect } from 'kysely'\nimport { Kysely } from 'kysely'\nimport type { z } from 'zod'\nimport type { JobDefinition } from './define-job'\nimport {\n type AnyEventInput,\n type DurablyEvent,\n type ErrorHandler,\n type EventEmitter,\n type EventListener,\n type EventType,\n type Unsubscribe,\n createEventEmitter,\n} from './events'\nimport {\n type JobHandle,\n type JobRegistry,\n createJobHandle,\n createJobRegistry,\n} from './job'\nimport { runMigrations } from './migrations'\nimport type { Database } from './schema'\nimport {\n type Run,\n type RunFilter,\n type Storage,\n createKyselyStorage,\n} from './storage'\nimport { type Worker, createWorker } from './worker'\n\n/**\n * Options for creating a Durably instance\n */\nexport interface DurablyOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n> {\n dialect: Dialect\n pollingInterval?: number\n heartbeatInterval?: number\n staleThreshold?: number\n /**\n * Zod schema for labels. When provided:\n * - Labels are type-checked at compile time\n * - Labels are validated at runtime on trigger()\n */\n labels?: z.ZodType<TLabels>\n /**\n * Job definitions to register. Shorthand for calling .register() after creation.\n * @example\n * ```ts\n * const durably = createDurably({\n * dialect,\n * jobs: { importCsv: importCsvJob, syncUsers: syncUsersJob },\n * })\n * ```\n */\n jobs?: TJobs\n}\n\n/**\n * Default configuration values\n */\nconst DEFAULTS = {\n pollingInterval: 1000,\n heartbeatInterval: 5000,\n staleThreshold: 30000,\n} as const\n\n/**\n * Plugin interface for extending Durably\n */\nexport interface DurablyPlugin {\n name: string\n // biome-ignore lint/suspicious/noExplicitAny: plugin needs to accept any Durably instance\n install(durably: Durably<any, any>): void\n}\n\n/**\n * Helper type to transform JobDefinition record to JobHandle record\n */\ntype TransformToHandles<\n TJobs extends Record<string, JobDefinition<string, unknown, unknown>>,\n TLabels extends Record<string, string> = Record<string, string>,\n> = {\n [K in keyof TJobs]: TJobs[K] extends JobDefinition<\n infer TName,\n infer TInput,\n infer TOutput\n >\n ? JobHandle<TName & string, TInput, TOutput, TLabels>\n : never\n}\n\n/**\n * Durably instance with type-safe jobs\n */\nexport interface Durably<\n TJobs extends Record<\n string,\n JobHandle<string, unknown, unknown, Record<string, string>>\n > = Record<string, never>,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /**\n * Registered job handles (type-safe)\n */\n readonly jobs: TJobs\n\n /**\n * Initialize Durably: run migrations and start the worker\n * This is the recommended way to start Durably.\n * Equivalent to calling migrate() then start().\n * @example\n * ```ts\n * const durably = createDurably({ dialect }).register({ ... })\n * await durably.init()\n * ```\n */\n init(): Promise<void>\n\n /**\n * Run database migrations\n * This is idempotent and safe to call multiple times\n */\n migrate(): Promise<void>\n\n /**\n * Get the underlying Kysely database instance\n * Useful for testing and advanced use cases\n */\n readonly db: Kysely<Database>\n\n /**\n * Storage layer for database operations\n */\n readonly storage: Storage\n\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Register job definitions and return a new Durably instance with type-safe jobs\n * @example\n * ```ts\n * const durably = createDurably({ dialect })\n * .register({\n * importCsv: importCsvJob,\n * syncUsers: syncUsersJob,\n * })\n * await durably.migrate()\n * // Usage: durably.jobs.importCsv.trigger({ rows: [...] })\n * ```\n */\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n register<TNewJobs extends Record<string, JobDefinition<string, any, any>>>(\n jobDefs: TNewJobs,\n ): Durably<TJobs & TransformToHandles<TNewJobs, TLabels>, TLabels>\n\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Retry a failed run by resetting it to pending\n * @throws Error if run is not in failed status\n */\n retry(runId: string): Promise<void>\n\n /**\n * Cancel a pending or running run\n * @throws Error if run is already completed, failed, or cancelled\n */\n cancel(runId: string): Promise<void>\n\n /**\n * Delete a completed, failed, or cancelled run and its associated steps and logs\n * @throws Error if run is pending or running, or does not exist\n */\n deleteRun(runId: string): Promise<void>\n\n /**\n * Get a run by ID\n * @example\n * ```ts\n * // Untyped (returns Run)\n * const run = await durably.getRun(runId)\n *\n * // Typed (returns custom type)\n * type MyRun = Run & { input: { userId: string }; output: { count: number } | null }\n * const typedRun = await durably.getRun<MyRun>(runId)\n * ```\n */\n getRun<T extends Run<TLabels> = Run<TLabels>>(\n runId: string,\n ): Promise<T | null>\n\n /**\n * Get runs with optional filtering\n * @example\n * ```ts\n * // Untyped (returns Run[])\n * const runs = await durably.getRuns({ status: 'completed' })\n *\n * // Typed (returns custom type[])\n * type MyRun = Run & { input: { userId: string }; output: { count: number } | null }\n * const typedRuns = await durably.getRuns<MyRun>({ jobName: 'my-job' })\n * ```\n */\n getRuns<T extends Run<TLabels> = Run<TLabels>>(\n filter?: RunFilter<TLabels>,\n ): Promise<T[]>\n\n /**\n * Register a plugin\n */\n use(plugin: DurablyPlugin): void\n\n /**\n * Get a registered job handle by name\n * Returns undefined if job is not registered\n */\n getJob<TName extends string = string>(\n name: TName,\n ): JobHandle<TName, Record<string, unknown>, unknown, TLabels> | undefined\n\n /**\n * Subscribe to events for a specific run\n * Returns a ReadableStream that can be used for SSE\n */\n subscribe(runId: string): ReadableStream<DurablyEvent>\n}\n\n/**\n * Internal state shared across Durably instances\n */\ninterface DurablyState {\n db: Kysely<Database>\n storage: Storage\n eventEmitter: EventEmitter\n jobRegistry: JobRegistry\n worker: Worker\n labelsSchema: z.ZodType | undefined\n migrating: Promise<void> | null\n migrated: boolean\n}\n\n/**\n * Create a Durably instance implementation\n */\nfunction createDurablyInstance<\n TJobs extends Record<\n string,\n JobHandle<string, unknown, unknown, Record<string, string>>\n >,\n TLabels extends Record<string, string> = Record<string, string>,\n>(state: DurablyState, jobs: TJobs): Durably<TJobs, TLabels> {\n const { db, storage, eventEmitter, jobRegistry, worker } = state\n\n const durably: Durably<TJobs, TLabels> = {\n db,\n storage,\n jobs,\n on: eventEmitter.on,\n emit: eventEmitter.emit,\n onError: eventEmitter.onError,\n start: worker.start,\n stop: worker.stop,\n\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n register<TNewJobs extends Record<string, JobDefinition<string, any, any>>>(\n jobDefs: TNewJobs,\n ): Durably<TJobs & TransformToHandles<TNewJobs, TLabels>, TLabels> {\n const newHandles = {} as TransformToHandles<TNewJobs, TLabels>\n\n for (const key of Object.keys(jobDefs) as (keyof TNewJobs)[]) {\n const jobDef = jobDefs[key]\n const handle = createJobHandle(\n jobDef,\n storage,\n eventEmitter,\n jobRegistry,\n state.labelsSchema as z.ZodType<TLabels> | undefined,\n )\n newHandles[key] = handle as TransformToHandles<\n TNewJobs,\n TLabels\n >[typeof key]\n }\n\n // Create new instance with merged jobs\n const mergedJobs = { ...jobs, ...newHandles } as TJobs &\n TransformToHandles<TNewJobs, TLabels>\n return createDurablyInstance<typeof mergedJobs, TLabels>(\n state,\n mergedJobs,\n )\n },\n\n getRun: storage.getRun,\n getRuns: storage.getRuns,\n\n use(plugin: DurablyPlugin): void {\n plugin.install(durably)\n },\n\n getJob<TName extends string = string>(\n name: TName,\n ): JobHandle<TName, Record<string, unknown>, unknown, TLabels> | undefined {\n const registeredJob = jobRegistry.get(name)\n if (!registeredJob) {\n return undefined\n }\n return registeredJob.handle as JobHandle<\n TName,\n Record<string, unknown>,\n unknown,\n TLabels\n >\n },\n\n subscribe(runId: string): ReadableStream<DurablyEvent> {\n // Track closed state and cleanup function in outer scope for cancel handler\n let closed = false\n let cleanup: (() => void) | null = null\n\n // Events that close the stream after enqueuing\n const closeEvents = new Set<EventType>(['run:complete', 'run:delete'])\n // All event types to subscribe to for a run\n const subscribedEvents: EventType[] = [\n 'run:start',\n 'run:complete',\n 'run:fail',\n 'run:cancel',\n 'run:delete',\n 'run:retry',\n 'run:progress',\n 'step:start',\n 'step:complete',\n 'step:fail',\n 'log:write',\n ]\n\n return new ReadableStream<DurablyEvent>({\n start: (controller) => {\n const unsubscribes = subscribedEvents.map((type) =>\n eventEmitter.on(type, (event) => {\n if (closed || event.runId !== runId) return\n controller.enqueue(event)\n if (closeEvents.has(type)) {\n closed = true\n cleanup?.()\n controller.close()\n }\n }),\n )\n\n cleanup = () => {\n for (const unsub of unsubscribes) unsub()\n }\n },\n cancel: () => {\n // Clean up event listeners when stream is cancelled by consumer\n if (!closed) {\n closed = true\n cleanup?.()\n }\n },\n })\n },\n\n async retry(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot retry completed run: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot retry pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot retry running run: ${runId}`)\n }\n await storage.updateRun(runId, {\n status: 'pending',\n error: null,\n })\n\n // Emit run:retry event\n eventEmitter.emit({\n type: 'run:retry',\n runId,\n jobName: run.jobName,\n labels: run.labels,\n })\n },\n\n async cancel(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot cancel completed run: ${runId}`)\n }\n if (run.status === 'failed') {\n throw new Error(`Cannot cancel failed run: ${runId}`)\n }\n if (run.status === 'cancelled') {\n throw new Error(`Cannot cancel already cancelled run: ${runId}`)\n }\n await storage.updateRun(runId, {\n status: 'cancelled',\n })\n\n // Emit run:cancel event\n eventEmitter.emit({\n type: 'run:cancel',\n runId,\n jobName: run.jobName,\n labels: run.labels,\n })\n },\n\n async deleteRun(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot delete pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot delete running run: ${runId}`)\n }\n await storage.deleteRun(runId)\n\n // Emit run:delete event\n eventEmitter.emit({\n type: 'run:delete',\n runId,\n jobName: run.jobName,\n labels: run.labels,\n })\n },\n\n async migrate(): Promise<void> {\n if (state.migrated) {\n return\n }\n\n if (state.migrating) {\n return state.migrating\n }\n\n state.migrating = runMigrations(db)\n .then(() => {\n state.migrated = true\n })\n .finally(() => {\n state.migrating = null\n })\n\n return state.migrating\n },\n\n async init(): Promise<void> {\n await this.migrate()\n this.start()\n },\n }\n\n return durably\n}\n\n/**\n * Create a Durably instance\n */\n// Overload: with jobs\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n>(\n options: DurablyOptions<TLabels, TJobs> & { jobs: TJobs },\n): Durably<TransformToHandles<TJobs, TLabels>, TLabels>\n\n// Overload: without jobs\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n>(options: DurablyOptions<TLabels>): Durably<Record<string, never>, TLabels>\n\n// Implementation\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n>(\n options: DurablyOptions<TLabels, TJobs>,\n):\n | Durably<TransformToHandles<TJobs, TLabels>, TLabels>\n | Durably<Record<string, never>, TLabels> {\n const config = {\n pollingInterval: options.pollingInterval ?? DEFAULTS.pollingInterval,\n heartbeatInterval: options.heartbeatInterval ?? DEFAULTS.heartbeatInterval,\n staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold,\n }\n\n const db = new Kysely<Database>({ dialect: options.dialect })\n const storage = createKyselyStorage(db)\n const eventEmitter = createEventEmitter()\n const jobRegistry = createJobRegistry()\n const worker = createWorker(config, storage, eventEmitter, jobRegistry)\n\n const state: DurablyState = {\n db,\n storage,\n eventEmitter,\n jobRegistry,\n worker,\n labelsSchema: options.labels,\n migrating: null,\n migrated: false,\n }\n\n const instance = createDurablyInstance<Record<string, never>, TLabels>(\n state,\n {},\n )\n\n if (options.jobs) {\n return instance.register(options.jobs)\n }\n\n return instance\n}\n","/**\n * Base event interface\n */\nexport interface BaseEvent {\n type: string\n timestamp: string\n sequence: number\n}\n\n/**\n * Run trigger event (emitted when a job is triggered, before worker picks it up)\n */\nexport interface RunTriggerEvent extends BaseEvent {\n type: 'run:trigger'\n runId: string\n jobName: string\n input: unknown\n labels: Record<string, string>\n}\n\n/**\n * Run start event\n */\nexport interface RunStartEvent extends BaseEvent {\n type: 'run:start'\n runId: string\n jobName: string\n input: unknown\n labels: Record<string, string>\n}\n\n/**\n * Run complete event\n */\nexport interface RunCompleteEvent extends BaseEvent {\n type: 'run:complete'\n runId: string\n jobName: string\n output: unknown\n duration: number\n labels: Record<string, string>\n}\n\n/**\n * Run fail event\n */\nexport interface RunFailEvent extends BaseEvent {\n type: 'run:fail'\n runId: string\n jobName: string\n error: string\n failedStepName: string\n labels: Record<string, string>\n}\n\n/**\n * Run cancel event\n */\nexport interface RunCancelEvent extends BaseEvent {\n type: 'run:cancel'\n runId: string\n jobName: string\n labels: Record<string, string>\n}\n\n/**\n * Run delete event (emitted when a run is deleted)\n */\nexport interface RunDeleteEvent extends BaseEvent {\n type: 'run:delete'\n runId: string\n jobName: string\n labels: Record<string, string>\n}\n\n/**\n * Run retry event (emitted when a failed run is retried)\n */\nexport interface RunRetryEvent extends BaseEvent {\n type: 'run:retry'\n runId: string\n jobName: string\n labels: Record<string, string>\n}\n\n/**\n * Progress data reported by step.progress()\n */\nexport interface ProgressData {\n current: number\n total?: number\n message?: string\n}\n\n/**\n * Run progress event\n */\nexport interface RunProgressEvent extends BaseEvent {\n type: 'run:progress'\n runId: string\n jobName: string\n progress: ProgressData\n labels: Record<string, string>\n}\n\n/**\n * Step start event\n */\nexport interface StepStartEvent extends BaseEvent {\n type: 'step:start'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n labels: Record<string, string>\n}\n\n/**\n * Step complete event\n */\nexport interface StepCompleteEvent extends BaseEvent {\n type: 'step:complete'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n output: unknown\n duration: number\n labels: Record<string, string>\n}\n\n/**\n * Step fail event\n */\nexport interface StepFailEvent extends BaseEvent {\n type: 'step:fail'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n error: string\n labels: Record<string, string>\n}\n\nexport interface StepCancelEvent extends BaseEvent {\n type: 'step:cancel'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n labels: Record<string, string>\n}\n\n/**\n * Log data reported by step.log\n */\nexport interface LogData {\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n stepName?: string | null\n}\n\n/**\n * Log write event\n */\nexport interface LogWriteEvent extends BaseEvent, LogData {\n type: 'log:write'\n runId: string\n jobName: string\n labels: Record<string, string>\n stepName: string | null\n data: unknown\n}\n\n/**\n * Worker error event (internal errors like heartbeat failures)\n */\nexport interface WorkerErrorEvent extends BaseEvent {\n type: 'worker:error'\n error: string\n context: string\n runId?: string\n}\n\n/**\n * All event types as discriminated union\n */\nexport type DurablyEvent =\n | RunTriggerEvent\n | RunStartEvent\n | RunCompleteEvent\n | RunFailEvent\n | RunCancelEvent\n | RunDeleteEvent\n | RunRetryEvent\n | RunProgressEvent\n | StepStartEvent\n | StepCompleteEvent\n | StepFailEvent\n | StepCancelEvent\n | LogWriteEvent\n | WorkerErrorEvent\n\n/**\n * Event types for type-safe event names\n */\nexport type EventType = DurablyEvent['type']\n\n/**\n * Extract event by type\n */\nexport type EventByType<T extends EventType> = Extract<\n DurablyEvent,\n { type: T }\n>\n\n/**\n * Event input (without auto-generated fields)\n */\nexport type EventInput<T extends EventType> = Omit<\n EventByType<T>,\n 'timestamp' | 'sequence'\n>\n\n/**\n * All possible event inputs as a union (properly distributed)\n */\nexport type AnyEventInput =\n | EventInput<'run:trigger'>\n | EventInput<'run:start'>\n | EventInput<'run:complete'>\n | EventInput<'run:fail'>\n | EventInput<'run:cancel'>\n | EventInput<'run:delete'>\n | EventInput<'run:retry'>\n | EventInput<'run:progress'>\n | EventInput<'step:start'>\n | EventInput<'step:complete'>\n | EventInput<'step:fail'>\n | EventInput<'step:cancel'>\n | EventInput<'log:write'>\n | EventInput<'worker:error'>\n\n/**\n * Event listener function\n */\nexport type EventListener<T extends EventType> = (event: EventByType<T>) => void\n\n/**\n * Unsubscribe function returned by on()\n */\nexport type Unsubscribe = () => void\n\n/**\n * Error handler function for listener exceptions\n */\nexport type ErrorHandler = (error: Error, event: DurablyEvent) => void\n\n/**\n * Event emitter interface\n */\nexport interface EventEmitter {\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n}\n\n/**\n * Create an event emitter\n */\nexport function createEventEmitter(): EventEmitter {\n const listeners = new Map<EventType, Set<EventListener<EventType>>>()\n let sequence = 0\n let errorHandler: ErrorHandler | null = null\n\n return {\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe {\n if (!listeners.has(type)) {\n listeners.set(type, new Set())\n }\n\n const typeListeners = listeners.get(type)\n typeListeners?.add(listener as unknown as EventListener<EventType>)\n\n return () => {\n typeListeners?.delete(listener as unknown as EventListener<EventType>)\n }\n },\n\n onError(handler: ErrorHandler): void {\n errorHandler = handler\n },\n\n emit(event: AnyEventInput): void {\n sequence++\n const fullEvent = {\n ...event,\n timestamp: new Date().toISOString(),\n sequence,\n } as DurablyEvent\n\n const typeListeners = listeners.get(event.type)\n if (!typeListeners) {\n return\n }\n\n for (const listener of typeListeners) {\n try {\n listener(fullEvent)\n } catch (error) {\n if (errorHandler) {\n errorHandler(\n error instanceof Error ? error : new Error(String(error)),\n fullEvent,\n )\n }\n // Continue to next listener regardless of error\n }\n }\n },\n }\n}\n","import { type z, prettifyError } from 'zod'\nimport type { JobDefinition } from './define-job'\nimport type { EventEmitter, LogData, ProgressData } from './events'\nimport type { Run, RunFilter, Storage } from './storage'\n\n// eslint-disable-next-line @typescript-eslint/no-empty-function\nconst noop = () => {}\n\n/**\n * Validate job input and throw on failure\n */\nfunction validateJobInputOrThrow<T>(\n schema: z.ZodType<T>,\n input: unknown,\n context?: string,\n): T {\n const result = schema.safeParse(input)\n if (!result.success) {\n const prefix = context ? `${context}: ` : ''\n throw new Error(`${prefix}Invalid input: ${prettifyError(result.error)}`)\n }\n return result.data\n}\n\n/**\n * Step context passed to the job function\n */\nexport interface StepContext {\n /**\n * The ID of the current run\n */\n readonly runId: string\n\n /**\n * Execute a step with automatic persistence and replay\n */\n run<T>(name: string, fn: (signal: AbortSignal) => T | Promise<T>): Promise<T>\n\n /**\n * Report progress for the current run\n */\n progress(current: number, total?: number, message?: string): void\n\n /**\n * Log a message\n */\n log: {\n info(message: string, data?: unknown): void\n warn(message: string, data?: unknown): void\n error(message: string, data?: unknown): void\n }\n}\n\n/**\n * Job function type\n */\nexport type JobFunction<TInput, TOutput> = (\n step: StepContext,\n input: TInput,\n) => Promise<TOutput>\n\n/**\n * Trigger options for trigger() and batchTrigger()\n */\nexport interface TriggerOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Options for triggerAndWait() (extends TriggerOptions with wait-specific options)\n */\nexport interface TriggerAndWaitOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n> extends TriggerOptions<TLabels> {\n /** Timeout in milliseconds */\n timeout?: number\n /** Called when step.progress() is invoked during execution */\n onProgress?: (progress: ProgressData) => void | Promise<void>\n /** Called when step.log is invoked during execution */\n onLog?: (log: LogData) => void | Promise<void>\n}\n\n/**\n * Typed run with output type\n */\nexport interface TypedRun<\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n> extends Omit<Run<TLabels>, 'output'> {\n output: TOutput | null\n}\n\n/**\n * Batch trigger input - either just the input or input with options\n */\nexport type BatchTriggerInput<\n TInput,\n TLabels extends Record<string, string> = Record<string, string>,\n> = TInput | { input: TInput; options?: TriggerOptions<TLabels> }\n\n/**\n * Result of triggerAndWait\n */\nexport interface TriggerAndWaitResult<TOutput> {\n id: string\n output: TOutput\n}\n\n/**\n * Job handle returned by defineJob\n */\nexport interface JobHandle<\n TName extends string,\n TInput,\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n readonly name: TName\n\n /**\n * Trigger a new run\n */\n trigger(\n input: TInput,\n options?: TriggerOptions<TLabels>,\n ): Promise<TypedRun<TOutput, TLabels>>\n\n /**\n * Trigger a new run and wait for completion\n * Returns the output directly, throws if the run fails\n */\n triggerAndWait(\n input: TInput,\n options?: TriggerAndWaitOptions<TLabels>,\n ): Promise<TriggerAndWaitResult<TOutput>>\n\n /**\n * Trigger multiple runs in a batch\n * All inputs are validated before any runs are created\n */\n batchTrigger(\n inputs: BatchTriggerInput<TInput, TLabels>[],\n ): Promise<TypedRun<TOutput, TLabels>[]>\n\n /**\n * Get a run by ID\n */\n getRun(id: string): Promise<TypedRun<TOutput, TLabels> | null>\n\n /**\n * Get runs with optional filter\n */\n getRuns(\n filter?: Omit<RunFilter<TLabels>, 'jobName'>,\n ): Promise<TypedRun<TOutput, TLabels>[]>\n}\n\n/**\n * Internal job registration\n */\nexport interface RegisteredJob<TInput, TOutput> {\n name: string\n inputSchema: z.ZodType\n outputSchema: z.ZodType | undefined\n labelsSchema: z.ZodType | undefined\n fn: JobFunction<TInput, TOutput>\n jobDef: JobDefinition<string, TInput, TOutput>\n // biome-ignore lint/suspicious/noExplicitAny: handle may have any labels type\n handle: JobHandle<string, TInput, TOutput, any>\n}\n\n/**\n * Job registry for managing registered jobs\n */\nexport interface JobRegistry {\n /**\n * Register a job (called internally by createJobHandle)\n */\n set<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void\n\n /**\n * Get a registered job by name\n */\n get(name: string): RegisteredJob<unknown, unknown> | undefined\n\n /**\n * Check if a job is registered\n */\n has(name: string): boolean\n}\n\n/**\n * Create a job registry\n */\nexport function createJobRegistry(): JobRegistry {\n const jobs = new Map<string, RegisteredJob<unknown, unknown>>()\n\n return {\n set<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void {\n jobs.set(job.name, job as RegisteredJob<unknown, unknown>)\n },\n\n get(name: string): RegisteredJob<unknown, unknown> | undefined {\n return jobs.get(name)\n },\n\n has(name: string): boolean {\n return jobs.has(name)\n },\n }\n}\n\n/**\n * Create a job handle from a JobDefinition\n */\nexport function createJobHandle<\n TName extends string,\n TInput,\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n>(\n jobDef: JobDefinition<TName, TInput, TOutput>,\n storage: Storage,\n eventEmitter: EventEmitter,\n registry: JobRegistry,\n labelsSchema?: z.ZodType<TLabels>,\n): JobHandle<TName, TInput, TOutput, TLabels> {\n // Check if same JobDefinition is already registered (idempotent)\n const existingJob = registry.get(jobDef.name)\n if (existingJob) {\n // If same JobDefinition (same reference), return existing handle\n if (existingJob.jobDef === jobDef) {\n return existingJob.handle as JobHandle<TName, TInput, TOutput, TLabels>\n }\n // Different JobDefinition with same name - error\n throw new Error(\n `Job \"${jobDef.name}\" is already registered with a different definition`,\n )\n }\n\n const inputSchema = jobDef.input as z.ZodType<TInput>\n const outputSchema = jobDef.output as z.ZodType<TOutput> | undefined\n\n const handle: JobHandle<TName, TInput, TOutput, TLabels> = {\n name: jobDef.name,\n\n async trigger(\n input: TInput,\n options?: TriggerOptions<TLabels>,\n ): Promise<TypedRun<TOutput, TLabels>> {\n // Validate input\n const validatedInput = validateJobInputOrThrow(inputSchema, input)\n\n // Validate labels if schema provided\n if (labelsSchema && options?.labels) {\n validateJobInputOrThrow(labelsSchema, options.labels, 'labels')\n }\n\n // Create the run\n const run = await storage.createRun({\n jobName: jobDef.name,\n input: validatedInput,\n idempotencyKey: options?.idempotencyKey,\n concurrencyKey: options?.concurrencyKey,\n labels: options?.labels,\n })\n\n // Emit run:trigger event\n eventEmitter.emit({\n type: 'run:trigger',\n runId: run.id,\n jobName: jobDef.name,\n input: validatedInput,\n labels: run.labels,\n })\n\n return run as TypedRun<TOutput, TLabels>\n },\n\n async triggerAndWait(\n input: TInput,\n options?: TriggerAndWaitOptions<TLabels>,\n ): Promise<TriggerAndWaitResult<TOutput>> {\n // Trigger the run\n const run = await this.trigger(input, options)\n\n // Wait for completion via event subscription\n return new Promise((resolve, reject) => {\n let timeoutId: ReturnType<typeof setTimeout> | undefined\n let resolved = false\n\n const unsubscribes: (() => void)[] = []\n\n const cleanup = () => {\n if (resolved) return\n resolved = true\n for (const unsub of unsubscribes) unsub()\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n }\n\n unsubscribes.push(\n eventEmitter.on('run:complete', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n resolve({\n id: run.id,\n output: event.output as TOutput,\n })\n }\n }),\n )\n\n unsubscribes.push(\n eventEmitter.on('run:fail', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n reject(new Error(event.error))\n }\n }),\n )\n\n if (options?.onProgress) {\n const onProgress = options.onProgress\n unsubscribes.push(\n eventEmitter.on('run:progress', (event) => {\n if (event.runId === run.id && !resolved) {\n void Promise.resolve(onProgress(event.progress)).catch(noop)\n }\n }),\n )\n }\n\n if (options?.onLog) {\n const onLog = options.onLog\n unsubscribes.push(\n eventEmitter.on('log:write', (event) => {\n if (event.runId === run.id && !resolved) {\n const { level, message, data, stepName } = event\n void Promise.resolve(\n onLog({ level, message, data, stepName }),\n ).catch(noop)\n }\n }),\n )\n }\n\n // Check current status after subscribing (race condition mitigation)\n // If the run completed before we subscribed, we need to handle it\n storage\n .getRun(run.id)\n .then((currentRun) => {\n if (resolved || !currentRun) return\n if (currentRun.status === 'completed') {\n cleanup()\n resolve({\n id: run.id,\n output: currentRun.output as TOutput,\n })\n } else if (currentRun.status === 'failed') {\n cleanup()\n reject(new Error(currentRun.error || 'Run failed'))\n }\n })\n .catch((error) => {\n if (resolved) return\n cleanup()\n reject(error instanceof Error ? error : new Error(String(error)))\n })\n\n // Set timeout if specified\n if (options?.timeout !== undefined) {\n timeoutId = setTimeout(() => {\n if (!resolved) {\n cleanup()\n reject(\n new Error(`triggerAndWait timeout after ${options.timeout}ms`),\n )\n }\n }, options.timeout)\n }\n })\n },\n\n async batchTrigger(\n inputs: (TInput | { input: TInput; options?: TriggerOptions<TLabels> })[],\n ): Promise<TypedRun<TOutput, TLabels>[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Normalize inputs to { input, options } format\n const normalized = inputs.map((item) => {\n if (item && typeof item === 'object' && 'input' in item) {\n return item as { input: TInput; options?: TriggerOptions<TLabels> }\n }\n return { input: item as TInput, options: undefined }\n })\n\n // Validate all inputs and labels first (before creating any runs)\n const validated: {\n input: unknown\n options?: TriggerOptions<TLabels>\n }[] = []\n for (let i = 0; i < normalized.length; i++) {\n const validatedInput = validateJobInputOrThrow(\n inputSchema,\n normalized[i].input,\n `at index ${i}`,\n )\n if (labelsSchema && normalized[i].options?.labels) {\n validateJobInputOrThrow(\n labelsSchema,\n normalized[i].options?.labels,\n `labels at index ${i}`,\n )\n }\n validated.push({\n input: validatedInput,\n options: normalized[i].options,\n })\n }\n\n // Create all runs\n const runs = await storage.batchCreateRuns(\n validated.map((v) => ({\n jobName: jobDef.name,\n input: v.input,\n idempotencyKey: v.options?.idempotencyKey,\n concurrencyKey: v.options?.concurrencyKey,\n labels: v.options?.labels,\n })),\n )\n\n // Emit run:trigger events for all created runs\n for (let i = 0; i < runs.length; i++) {\n eventEmitter.emit({\n type: 'run:trigger',\n runId: runs[i].id,\n jobName: jobDef.name,\n input: validated[i].input,\n labels: runs[i].labels,\n })\n }\n\n return runs as TypedRun<TOutput, TLabels>[]\n },\n\n async getRun(id: string): Promise<TypedRun<TOutput, TLabels> | null> {\n const run = await storage.getRun(id)\n if (!run || run.jobName !== jobDef.name) {\n return null\n }\n return run as TypedRun<TOutput, TLabels>\n },\n\n async getRuns(\n filter?: Omit<RunFilter<TLabels>, 'jobName'>,\n ): Promise<TypedRun<TOutput, TLabels>[]> {\n const runs = await storage.getRuns({\n ...filter,\n jobName: jobDef.name,\n })\n return runs as TypedRun<TOutput, TLabels>[]\n },\n }\n\n // Register the job with the handle\n registry.set({\n name: jobDef.name,\n inputSchema,\n outputSchema,\n labelsSchema,\n fn: jobDef.run as JobFunction<unknown, unknown>,\n jobDef: jobDef as JobDefinition<string, TInput, TOutput>,\n handle,\n })\n\n return handle\n}\n","import type { Kysely } from 'kysely'\nimport type { Database } from './schema'\n\n/**\n * Migration definitions\n */\ninterface Migration {\n version: number\n up: (db: Kysely<Database>) => Promise<void>\n}\n\nexport const LATEST_SCHEMA_VERSION = 1\n\nconst migrations: Migration[] = [\n {\n version: 1,\n up: async (db) => {\n // Create runs table\n await db.schema\n .createTable('durably_runs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('job_name', 'text', (col) => col.notNull())\n .addColumn('input', 'text', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('idempotency_key', 'text')\n .addColumn('concurrency_key', 'text')\n .addColumn('labels', 'text', (col) => col.notNull().defaultTo('{}'))\n .addColumn('current_step_index', 'integer', (col) =>\n col.notNull().defaultTo(0),\n )\n .addColumn('progress', 'text')\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('heartbeat_at', 'text', (col) => col.notNull())\n .addColumn('started_at', 'text')\n .addColumn('completed_at', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .addColumn('updated_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create runs indexes\n await db.schema\n .createIndex('idx_durably_runs_job_idempotency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['job_name', 'idempotency_key'])\n .unique()\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_concurrency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'concurrency_key'])\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_created')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'created_at'])\n .execute()\n\n // Create steps table\n await db.schema\n .createTable('durably_steps')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('name', 'text', (col) => col.notNull())\n .addColumn('index', 'integer', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('started_at', 'text', (col) => col.notNull())\n .addColumn('completed_at', 'text')\n .execute()\n\n // Create steps index\n await db.schema\n .createIndex('idx_durably_steps_run_index')\n .ifNotExists()\n .on('durably_steps')\n .columns(['run_id', 'index'])\n .execute()\n\n // Create logs table\n await db.schema\n .createTable('durably_logs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('step_name', 'text')\n .addColumn('level', 'text', (col) => col.notNull())\n .addColumn('message', 'text', (col) => col.notNull())\n .addColumn('data', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create logs index\n await db.schema\n .createIndex('idx_durably_logs_run_created')\n .ifNotExists()\n .on('durably_logs')\n .columns(['run_id', 'created_at'])\n .execute()\n\n // Create schema_versions table\n await db.schema\n .createTable('durably_schema_versions')\n .ifNotExists()\n .addColumn('version', 'integer', (col) => col.primaryKey())\n .addColumn('applied_at', 'text', (col) => col.notNull())\n .execute()\n },\n },\n]\n\n/**\n * Get the current schema version from the database\n */\nasync function getCurrentVersion(db: Kysely<Database>): Promise<number> {\n try {\n const result = await db\n .selectFrom('durably_schema_versions')\n .select('version')\n .orderBy('version', 'desc')\n .limit(1)\n .executeTakeFirst()\n\n return result?.version ?? 0\n } catch {\n // Table doesn't exist yet\n return 0\n }\n}\n\n/**\n * Run pending migrations\n */\nexport async function runMigrations(db: Kysely<Database>): Promise<void> {\n const currentVersion = await getCurrentVersion(db)\n\n for (const migration of migrations) {\n if (migration.version > currentVersion) {\n await db.transaction().execute(async (trx) => {\n await migration.up(trx)\n\n await trx\n .insertInto('durably_schema_versions')\n .values({\n version: migration.version,\n applied_at: new Date().toISOString(),\n })\n .execute()\n })\n }\n }\n}\n","import { type Kysely, sql } from 'kysely'\nimport { monotonicFactory } from 'ulidx'\nimport type { Database } from './schema'\n\nconst ulid = monotonicFactory()\n\n/**\n * Run data for creating a new run\n */\nexport interface CreateRunInput<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n jobName: string\n input: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Run data returned from storage\n */\nexport interface Run<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n id: string\n jobName: string\n input: unknown\n status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n idempotencyKey: string | null\n concurrencyKey: string | null\n currentStepIndex: number\n stepCount: number\n progress: { current: number; total?: number; message?: string } | null\n output: unknown | null\n error: string | null\n labels: TLabels\n heartbeatAt: string\n startedAt: string | null\n completedAt: string | null\n createdAt: string\n updatedAt: string\n}\n\n/**\n * Run update data\n */\nexport interface UpdateRunInput {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n currentStepIndex?: number\n progress?: { current: number; total?: number; message?: string } | null\n output?: unknown\n error?: string | null\n heartbeatAt?: string\n startedAt?: string\n completedAt?: string\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n /** Filter by job name(s). Pass a string for one, or an array for multiple (OR). */\n jobName?: string | string[]\n /** Filter by labels (all specified labels must match) */\n labels?: { [K in keyof TLabels]?: TLabels[K] }\n /** Maximum number of runs to return */\n limit?: number\n /** Number of runs to skip (for pagination) */\n offset?: number\n}\n\n/**\n * Step data for creating a new step\n */\nexport interface CreateStepInput {\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed' | 'cancelled'\n output?: unknown\n error?: string\n startedAt: string // ISO8601 timestamp when step execution started\n}\n\n/**\n * Step data returned from storage\n */\nexport interface Step {\n id: string\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed' | 'cancelled'\n output: unknown | null\n error: string | null\n startedAt: string\n completedAt: string | null\n}\n\n/**\n * Log data for creating a new log\n */\nexport interface CreateLogInput {\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n}\n\n/**\n * Log data returned from storage\n */\nexport interface Log {\n id: string\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown | null\n createdAt: string\n}\n\n/**\n * A client-safe subset of Run, excluding internal fields like\n * heartbeatAt, idempotencyKey, concurrencyKey, and updatedAt.\n */\nexport type ClientRun<\n TLabels extends Record<string, string> = Record<string, string>,\n> = Omit<\n Run<TLabels>,\n 'idempotencyKey' | 'concurrencyKey' | 'heartbeatAt' | 'updatedAt'\n>\n\n/**\n * Project a full Run to a ClientRun by stripping internal fields.\n */\nexport function toClientRun<\n TLabels extends Record<string, string> = Record<string, string>,\n>(run: Run<TLabels>): ClientRun<TLabels> {\n const {\n idempotencyKey,\n concurrencyKey,\n heartbeatAt,\n updatedAt,\n ...clientRun\n } = run\n return clientRun\n}\n\n/**\n * Storage interface for database operations\n */\nexport interface Storage {\n // Run operations\n createRun(input: CreateRunInput): Promise<Run>\n batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]>\n updateRun(runId: string, data: UpdateRunInput): Promise<void>\n deleteRun(runId: string): Promise<void>\n getRun<T extends Run = Run>(runId: string): Promise<T | null>\n getRuns<T extends Run = Run>(filter?: RunFilter): Promise<T[]>\n claimNextPendingRun(excludeConcurrencyKeys: string[]): Promise<Run | null>\n\n // Step operations\n createStep(input: CreateStepInput): Promise<Step>\n getSteps(runId: string): Promise<Step[]>\n getCompletedStep(runId: string, name: string): Promise<Step | null>\n\n // Log operations\n createLog(input: CreateLogInput): Promise<Log>\n getLogs(runId: string): Promise<Log[]>\n}\n\n/**\n * Convert database row to Run object\n */\n/**\n * Validate label keys: alphanumeric, dash, underscore, dot, slash only\n */\nconst LABEL_KEY_PATTERN = /^[a-zA-Z0-9\\-_./]+$/\n\nfunction validateLabels(labels: Record<string, string> | undefined): void {\n if (!labels) return\n for (const key of Object.keys(labels)) {\n if (!LABEL_KEY_PATTERN.test(key)) {\n throw new Error(\n `Invalid label key \"${key}\": must contain only alphanumeric characters, dashes, underscores, dots, and slashes`,\n )\n }\n }\n}\n\nfunction rowToRun(\n row: Database['durably_runs'] & { step_count?: number | bigint | null },\n): Run {\n return {\n id: row.id,\n jobName: row.job_name,\n input: JSON.parse(row.input),\n status: row.status,\n idempotencyKey: row.idempotency_key,\n concurrencyKey: row.concurrency_key,\n currentStepIndex: row.current_step_index,\n stepCount: Number(row.step_count ?? 0),\n progress: row.progress ? JSON.parse(row.progress) : null,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n labels: JSON.parse(row.labels),\n heartbeatAt: row.heartbeat_at,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n }\n}\n\n/**\n * Convert database row to Step object\n */\nfunction rowToStep(row: Database['durably_steps']): Step {\n return {\n id: row.id,\n runId: row.run_id,\n name: row.name,\n index: row.index,\n status: row.status,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n }\n}\n\n/**\n * Convert database row to Log object\n */\nfunction rowToLog(row: Database['durably_logs']): Log {\n return {\n id: row.id,\n runId: row.run_id,\n stepName: row.step_name,\n level: row.level,\n message: row.message,\n data: row.data ? JSON.parse(row.data) : null,\n createdAt: row.created_at,\n }\n}\n\n/**\n * Create a Kysely-based Storage implementation\n */\nexport function createKyselyStorage(db: Kysely<Database>): Storage {\n return {\n async createRun(input: CreateRunInput): Promise<Run> {\n const now = new Date().toISOString()\n\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n return rowToRun(existing)\n }\n }\n\n validateLabels(input.labels)\n\n const id = ulid()\n const run: Database['durably_runs'] = {\n id,\n job_name: input.jobName,\n input: JSON.stringify(input.input),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n labels: JSON.stringify(input.labels ?? {}),\n heartbeat_at: now,\n started_at: null,\n completed_at: null,\n created_at: now,\n updated_at: now,\n }\n\n await db.insertInto('durably_runs').values(run).execute()\n\n return rowToRun(run)\n },\n\n async batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Use transaction to ensure atomicity of idempotency checks and inserts\n return await db.transaction().execute(async (trx) => {\n const now = new Date().toISOString()\n const runs: Database['durably_runs'][] = []\n\n // Validate all labels upfront\n for (const input of inputs) {\n validateLabels(input.labels)\n }\n\n // Process inputs - check idempotency keys and create run objects\n for (const input of inputs) {\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await trx\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n runs.push(existing)\n continue\n }\n }\n\n const id = ulid()\n runs.push({\n id,\n job_name: input.jobName,\n input: JSON.stringify(input.input),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n labels: JSON.stringify(input.labels ?? {}),\n heartbeat_at: now,\n started_at: null,\n completed_at: null,\n created_at: now,\n updated_at: now,\n })\n }\n\n // Insert all new runs in a single batch\n const newRuns = runs.filter((r) => r.created_at === now)\n if (newRuns.length > 0) {\n await trx.insertInto('durably_runs').values(newRuns).execute()\n }\n\n return runs.map(rowToRun)\n })\n },\n\n async updateRun(runId: string, data: UpdateRunInput): Promise<void> {\n const now = new Date().toISOString()\n const updates: Partial<Database['durably_runs']> = {\n updated_at: now,\n }\n\n if (data.status !== undefined) updates.status = data.status\n if (data.currentStepIndex !== undefined)\n updates.current_step_index = data.currentStepIndex\n if (data.progress !== undefined)\n updates.progress = data.progress ? JSON.stringify(data.progress) : null\n if (data.output !== undefined)\n updates.output = JSON.stringify(data.output)\n if (data.error !== undefined) updates.error = data.error\n if (data.heartbeatAt !== undefined)\n updates.heartbeat_at = data.heartbeatAt\n if (data.startedAt !== undefined) updates.started_at = data.startedAt\n if (data.completedAt !== undefined)\n updates.completed_at = data.completedAt\n\n await db\n .updateTable('durably_runs')\n .set(updates)\n .where('id', '=', runId)\n .execute()\n },\n\n async deleteRun(runId: string): Promise<void> {\n // Delete in order: logs -> steps -> run (due to foreign key constraints)\n await db.deleteFrom('durably_logs').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_steps').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_runs').where('id', '=', runId).execute()\n },\n\n async getRun<T extends Run = Run>(runId: string): Promise<T | null> {\n const row = await db\n .selectFrom('durably_runs')\n .leftJoin('durably_steps', 'durably_runs.id', 'durably_steps.run_id')\n .selectAll('durably_runs')\n .select((eb) =>\n eb.fn.count<number>('durably_steps.id').as('step_count'),\n )\n .where('durably_runs.id', '=', runId)\n .groupBy('durably_runs.id')\n .executeTakeFirst()\n\n return row ? (rowToRun(row) as T) : null\n },\n\n async getRuns<T extends Run = Run>(filter?: RunFilter): Promise<T[]> {\n let query = db\n .selectFrom('durably_runs')\n .leftJoin('durably_steps', 'durably_runs.id', 'durably_steps.run_id')\n .selectAll('durably_runs')\n .select((eb) =>\n eb.fn.count<number>('durably_steps.id').as('step_count'),\n )\n .groupBy('durably_runs.id')\n\n if (filter?.status) {\n query = query.where('durably_runs.status', '=', filter.status)\n }\n if (filter?.jobName) {\n if (Array.isArray(filter.jobName)) {\n if (filter.jobName.length > 0) {\n query = query.where('durably_runs.job_name', 'in', filter.jobName)\n }\n } else {\n query = query.where('durably_runs.job_name', '=', filter.jobName)\n }\n }\n if (filter?.labels) {\n const labels = filter.labels as Record<string, string>\n validateLabels(labels)\n for (const [key, value] of Object.entries(labels)) {\n if (value === undefined) continue\n query = query.where(\n sql`json_extract(durably_runs.labels, ${`$.\"${key}\"`})`,\n '=',\n value,\n )\n }\n }\n\n query = query.orderBy('durably_runs.created_at', 'desc')\n\n if (filter?.limit !== undefined) {\n query = query.limit(filter.limit)\n }\n if (filter?.offset !== undefined) {\n // SQLite requires LIMIT when using OFFSET\n if (filter.limit === undefined) {\n query = query.limit(-1) // -1 means unlimited in SQLite\n }\n query = query.offset(filter.offset)\n }\n\n const rows = await query.execute()\n return rows.map(rowToRun) as T[]\n },\n\n async claimNextPendingRun(\n excludeConcurrencyKeys: string[],\n ): Promise<Run | null> {\n const now = new Date().toISOString()\n\n let subquery = db\n .selectFrom('durably_runs')\n .select('id')\n .where('status', '=', 'pending')\n .orderBy('created_at', 'asc')\n .orderBy('id', 'asc')\n .limit(1)\n\n if (excludeConcurrencyKeys.length > 0) {\n subquery = subquery.where((eb) =>\n eb.or([\n eb('concurrency_key', 'is', null),\n eb('concurrency_key', 'not in', excludeConcurrencyKeys),\n ]),\n )\n }\n\n const row = await db\n .updateTable('durably_runs')\n .set({\n status: 'running',\n heartbeat_at: now,\n started_at: sql`COALESCE(started_at, ${now})`,\n updated_at: now,\n })\n .where('id', '=', (eb) =>\n eb.selectFrom(subquery.as('sub')).select('id'),\n )\n .returningAll()\n .executeTakeFirst()\n\n if (!row) return null\n return rowToRun({ ...row, step_count: 0 })\n },\n\n async createStep(input: CreateStepInput): Promise<Step> {\n const completedAt = new Date().toISOString()\n const id = ulid()\n\n const step: Database['durably_steps'] = {\n id,\n run_id: input.runId,\n name: input.name,\n index: input.index,\n status: input.status,\n output:\n input.output !== undefined ? JSON.stringify(input.output) : null,\n error: input.error ?? null,\n started_at: input.startedAt,\n completed_at: completedAt,\n }\n\n await db.insertInto('durably_steps').values(step).execute()\n\n return rowToStep(step)\n },\n\n async getSteps(runId: string): Promise<Step[]> {\n const rows = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('index', 'asc')\n .execute()\n\n return rows.map(rowToStep)\n },\n\n async getCompletedStep(runId: string, name: string): Promise<Step | null> {\n const row = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .where('name', '=', name)\n .where('status', '=', 'completed')\n .executeTakeFirst()\n\n return row ? rowToStep(row) : null\n },\n\n async createLog(input: CreateLogInput): Promise<Log> {\n const now = new Date().toISOString()\n const id = ulid()\n\n const log: Database['durably_logs'] = {\n id,\n run_id: input.runId,\n step_name: input.stepName,\n level: input.level,\n message: input.message,\n data: input.data !== undefined ? JSON.stringify(input.data) : null,\n created_at: now,\n }\n\n await db.insertInto('durably_logs').values(log).execute()\n\n return rowToLog(log)\n },\n\n async getLogs(runId: string): Promise<Log[]> {\n const rows = await db\n .selectFrom('durably_logs')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('created_at', 'asc')\n .execute()\n\n return rows.map(rowToLog)\n },\n }\n}\n","import { prettifyError } from 'zod'\nimport { createStepContext } from './context'\nimport { CancelledError, getErrorMessage } from './errors'\nimport type { EventEmitter } from './events'\nimport type { JobRegistry } from './job'\nimport type { Storage } from './storage'\n\n/**\n * Worker configuration\n */\nexport interface WorkerConfig {\n pollingInterval: number\n heartbeatInterval: number\n staleThreshold: number\n}\n\n/**\n * Worker state\n */\nexport interface Worker {\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Check if worker is running\n */\n readonly isRunning: boolean\n}\n\n/**\n * Create a worker instance\n */\nexport function createWorker(\n config: WorkerConfig,\n storage: Storage,\n eventEmitter: EventEmitter,\n jobRegistry: JobRegistry,\n): Worker {\n let running = false\n let currentRunPromise: Promise<void> | null = null\n let pollingTimeout: ReturnType<typeof setTimeout> | null = null\n let stopResolver: (() => void) | null = null\n let heartbeatInterval: ReturnType<typeof setInterval> | null = null\n let currentRunId: string | null = null\n\n /**\n * Recover stale runs by resetting them to pending\n */\n async function recoverStaleRuns(): Promise<void> {\n const staleThreshold = new Date(\n Date.now() - config.staleThreshold,\n ).toISOString()\n const runningRuns = await storage.getRuns({ status: 'running' })\n\n for (const run of runningRuns) {\n if (run.heartbeatAt < staleThreshold) {\n // This run is stale - reset to pending\n await storage.updateRun(run.id, {\n status: 'pending',\n })\n }\n }\n }\n\n /**\n * Update heartbeat for current run\n */\n async function updateHeartbeat(): Promise<void> {\n if (currentRunId) {\n await storage.updateRun(currentRunId, {\n heartbeatAt: new Date().toISOString(),\n })\n }\n }\n\n /**\n * Handle successful run completion\n */\n async function handleRunSuccess(\n runId: string,\n jobName: string,\n output: unknown,\n startTime: number,\n ): Promise<void> {\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (!currentRun || currentRun.status === 'cancelled') {\n return\n }\n\n await storage.updateRun(runId, {\n status: 'completed',\n output,\n completedAt: new Date().toISOString(),\n })\n\n eventEmitter.emit({\n type: 'run:complete',\n runId,\n jobName,\n output,\n duration: Date.now() - startTime,\n labels: currentRun.labels,\n })\n }\n\n /**\n * Handle failed run\n */\n async function handleRunFailure(\n runId: string,\n jobName: string,\n error: unknown,\n ): Promise<void> {\n // If the error is CancelledError, don't treat it as a failure\n // The run status is already 'cancelled'\n if (error instanceof CancelledError) {\n return\n }\n\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (!currentRun || currentRun.status === 'cancelled') {\n return\n }\n\n const errorMessage = getErrorMessage(error)\n\n // Get the failed step name if available\n const steps = await storage.getSteps(runId)\n const failedStep = steps.find((s) => s.status === 'failed')\n\n await storage.updateRun(runId, {\n status: 'failed',\n error: errorMessage,\n completedAt: new Date().toISOString(),\n })\n\n eventEmitter.emit({\n type: 'run:fail',\n runId,\n jobName,\n error: errorMessage,\n failedStepName: failedStep?.name ?? 'unknown',\n labels: currentRun.labels,\n })\n }\n\n /**\n * Execute a run with heartbeat management\n */\n async function executeRun(\n run: Awaited<ReturnType<typeof storage.getRun>> & { id: string },\n job: NonNullable<ReturnType<typeof jobRegistry.get>>,\n ): Promise<void> {\n // Track current run for heartbeat updates\n currentRunId = run.id\n\n // Start heartbeat interval\n // Errors are emitted as events but don't stop execution\n heartbeatInterval = setInterval(() => {\n updateHeartbeat().catch((error) => {\n eventEmitter.emit({\n type: 'worker:error',\n error: getErrorMessage(error),\n context: 'heartbeat',\n runId: run.id,\n })\n })\n }, config.heartbeatInterval)\n\n // Emit run:start event\n eventEmitter.emit({\n type: 'run:start',\n runId: run.id,\n jobName: run.jobName,\n input: run.input,\n labels: run.labels,\n })\n\n const startTime = Date.now()\n\n const { step, dispose } = createStepContext(\n run,\n run.jobName,\n storage,\n eventEmitter,\n )\n\n try {\n // Execute job with step context\n const output = await job.fn(step, run.input)\n\n // Validate output if schema exists\n if (job.outputSchema) {\n const parseResult = job.outputSchema.safeParse(output)\n if (!parseResult.success) {\n throw new Error(`Invalid output: ${prettifyError(parseResult.error)}`)\n }\n }\n\n await handleRunSuccess(run.id, run.jobName, output, startTime)\n } catch (error) {\n await handleRunFailure(run.id, run.jobName, error)\n } finally {\n dispose()\n // Stop heartbeat interval\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n currentRunId = null\n }\n }\n\n async function processNextRun(): Promise<boolean> {\n // Get running runs to exclude their concurrency keys\n const runningRuns = await storage.getRuns({ status: 'running' })\n const excludeConcurrencyKeys = runningRuns\n .filter(\n (r): r is typeof r & { concurrencyKey: string } =>\n r.concurrencyKey !== null,\n )\n .map((r) => r.concurrencyKey)\n\n // Atomically claim next pending run (SELECT + UPDATE in one statement)\n const run = await storage.claimNextPendingRun(excludeConcurrencyKeys)\n if (!run) {\n return false\n }\n\n // Get the job definition\n const job = jobRegistry.get(run.jobName)\n if (!job) {\n // Unknown job - mark as failed\n await storage.updateRun(run.id, {\n status: 'failed',\n error: `Unknown job: ${run.jobName}`,\n })\n return true\n }\n\n await executeRun(run, job)\n\n return true\n }\n\n async function poll(): Promise<void> {\n if (!running) {\n return\n }\n\n const doWork = async () => {\n // Recover stale runs before processing\n await recoverStaleRuns()\n await processNextRun()\n }\n\n try {\n currentRunPromise = doWork()\n await currentRunPromise\n } finally {\n currentRunPromise = null\n }\n\n if (running) {\n pollingTimeout = setTimeout(() => poll(), config.pollingInterval)\n } else if (stopResolver) {\n stopResolver()\n stopResolver = null\n }\n }\n\n return {\n get isRunning(): boolean {\n return running\n },\n\n start(): void {\n if (running) {\n return\n }\n running = true\n poll()\n },\n\n async stop(): Promise<void> {\n if (!running) {\n return\n }\n\n running = false\n\n if (pollingTimeout) {\n clearTimeout(pollingTimeout)\n pollingTimeout = null\n }\n\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n\n if (currentRunPromise) {\n // Wait for current run to complete\n return new Promise<void>((resolve) => {\n stopResolver = resolve\n })\n }\n },\n }\n}\n","/**\n * Error thrown when a run is cancelled during execution.\n * The worker catches this error and treats it specially - it does not\n * mark the run as failed, as the run status is already 'cancelled'.\n */\nexport class CancelledError extends Error {\n constructor(runId: string) {\n super(`Run was cancelled: ${runId}`)\n this.name = 'CancelledError'\n }\n}\n\n/**\n * Extract error message from unknown error\n */\nexport function getErrorMessage(error: unknown): string {\n return error instanceof Error ? error.message : String(error)\n}\n","import { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { StepContext } from './job'\nimport type { Run, Storage } from './storage'\n\n/**\n * Create a step context for executing a run\n */\nexport function createStepContext(\n run: Run,\n jobName: string,\n storage: Storage,\n eventEmitter: EventEmitter,\n): { step: StepContext; dispose: () => void } {\n let stepIndex = run.currentStepIndex\n let currentStepName: string | null = null\n\n const controller = new AbortController()\n\n const unsubscribe = eventEmitter.on('run:cancel', (event) => {\n if (event.runId === run.id) {\n controller.abort()\n }\n })\n\n const step: StepContext = {\n get runId(): string {\n return run.id\n },\n\n async run<T>(\n name: string,\n fn: (signal: AbortSignal) => T | Promise<T>,\n ): Promise<T> {\n // Fast path: check in-memory signal first (set by run:cancel event)\n if (controller.signal.aborted) {\n throw new CancelledError(run.id)\n }\n\n // Slow path: DB check for cases where event wasn't received\n // (e.g., run cancelled while worker was down, then resumed)\n const currentRun = await storage.getRun(run.id)\n if (currentRun?.status === 'cancelled') {\n controller.abort()\n throw new CancelledError(run.id)\n }\n\n // Check cancellation before replaying cached steps\n if (controller.signal.aborted) {\n throw new CancelledError(run.id)\n }\n\n // Check if step was already completed\n const existingStep = await storage.getCompletedStep(run.id, name)\n if (existingStep) {\n stepIndex++\n return existingStep.output as T\n }\n\n // Track current step for log attribution\n currentStepName = name\n\n // Record step start time\n const startedAt = new Date().toISOString()\n const startTime = Date.now()\n\n // Emit step:start event\n eventEmitter.emit({\n type: 'step:start',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n labels: run.labels,\n })\n\n try {\n // Execute the step with the abort signal\n const result = await fn(controller.signal)\n\n // Save step result\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'completed',\n output: result,\n startedAt,\n })\n\n // Update run's current step index\n stepIndex++\n await storage.updateRun(run.id, { currentStepIndex: stepIndex })\n\n // Emit step:complete event\n eventEmitter.emit({\n type: 'step:complete',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex: stepIndex - 1,\n output: result,\n duration: Date.now() - startTime,\n labels: run.labels,\n })\n\n return result\n } catch (error) {\n const isCancelled = controller.signal.aborted\n const errorMessage =\n error instanceof Error ? error.message : String(error)\n\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: isCancelled ? 'cancelled' : 'failed',\n error: errorMessage,\n startedAt,\n })\n\n eventEmitter.emit({\n ...(isCancelled\n ? { type: 'step:cancel' as const }\n : { type: 'step:fail' as const, error: errorMessage }),\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n labels: run.labels,\n })\n\n if (isCancelled) {\n throw new CancelledError(run.id)\n }\n throw error\n } finally {\n // Clear current step after execution\n currentStepName = null\n }\n },\n\n progress(current: number, total?: number, message?: string): void {\n const progressData = { current, total, message }\n // Fire and forget - don't await\n storage.updateRun(run.id, { progress: progressData })\n // Emit progress event\n eventEmitter.emit({\n type: 'run:progress',\n runId: run.id,\n jobName,\n progress: progressData,\n labels: run.labels,\n })\n },\n\n log: {\n info(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'info',\n message,\n data,\n })\n },\n\n warn(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'warn',\n message,\n data,\n })\n },\n\n error(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'error',\n message,\n data,\n })\n },\n },\n }\n\n return { step, dispose: unsubscribe }\n}\n","import type { z } from 'zod'\nimport type { StepContext } from './job'\n\n/**\n * Job run function type\n */\nexport type JobRunFunction<TInput, TOutput> = (\n step: StepContext,\n input: TInput,\n) => Promise<TOutput>\n\n/**\n * Job definition - a standalone description of a job\n * This is the result of calling defineJob() and can be passed to durably.register()\n */\nexport interface JobDefinition<TName extends string, TInput, TOutput> {\n readonly name: TName\n readonly input: z.ZodType<TInput>\n readonly output: z.ZodType<TOutput> | undefined\n readonly run: JobRunFunction<TInput, TOutput>\n}\n\n/**\n * Extract input type from a JobDefinition\n * @example\n * ```ts\n * type Input = JobInput<typeof myJob> // { userId: string }\n * ```\n */\nexport type JobInput<T> =\n T extends JobDefinition<string, infer TInput, unknown> ? TInput : never\n\n/**\n * Extract output type from a JobDefinition\n * @example\n * ```ts\n * type Output = JobOutput<typeof myJob> // { count: number }\n * ```\n */\nexport type JobOutput<T> =\n T extends JobDefinition<string, unknown, infer TOutput> ? TOutput : never\n\n/**\n * Configuration for defining a job\n */\nexport interface DefineJobConfig<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n> {\n name: TName\n input: TInputSchema\n output?: TOutputSchema\n run: JobRunFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n}\n\n/**\n * Define a job - creates a JobDefinition that can be registered with durably.register()\n *\n * @example\n * ```ts\n * import { defineJob } from '@coji/durably'\n * import { z } from 'zod'\n *\n * export const syncUsers = defineJob({\n * name: 'sync-users',\n * input: z.object({ orgId: z.string() }),\n * output: z.object({ syncedCount: z.number() }),\n * run: async (step, input) => {\n * const users = await step.run('fetch-users', () => fetchUsers(input.orgId))\n * return { syncedCount: users.length }\n * },\n * })\n * ```\n */\nexport function defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n>(\n config: DefineJobConfig<TName, TInputSchema, TOutputSchema>,\n): JobDefinition<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n> {\n return {\n name: config.name,\n input: config.input,\n output: config.output,\n run: config.run,\n } as JobDefinition<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n}\n","/**\n * HTTP response utilities for the Durably HTTP handler.\n * Extracted to eliminate duplication in server.ts handlers.\n */\n\nimport { getErrorMessage } from './errors'\n\nexport { getErrorMessage }\n\n/**\n * JSON response headers\n */\nconst JSON_HEADERS = {\n 'Content-Type': 'application/json',\n} as const\n\n/**\n * Create a JSON response\n */\nexport function jsonResponse(data: unknown, status = 200): Response {\n return new Response(JSON.stringify(data), {\n status,\n headers: JSON_HEADERS,\n })\n}\n\n/**\n * Create an error response with consistent format\n */\nexport function errorResponse(\n message: string,\n status: 400 | 404 | 500 = 500,\n): Response {\n return jsonResponse({ error: message }, status)\n}\n\n/**\n * Create a success response with { success: true }\n */\nexport function successResponse(): Response {\n return jsonResponse({ success: true })\n}\n\n/**\n * Get required query parameter or return error response\n */\nexport function getRequiredQueryParam(\n url: URL,\n paramName: string,\n): string | Response {\n const value = url.searchParams.get(paramName)\n if (!value) {\n return errorResponse(`${paramName} query parameter is required`, 400)\n }\n return value\n}\n","/**\n * SSE (Server-Sent Events) utilities for streaming events to clients.\n * Extracted to eliminate duplication between subscribe and runsSubscribe handlers.\n */\n\nimport type { Unsubscribe } from './events'\n\n/**\n * SSE response headers\n */\nconst SSE_HEADERS = {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n} as const\n\n/**\n * Encode data as SSE format: `data: ${json}\\n\\n`\n */\nfunction formatSSE(data: unknown): string {\n return `data: ${JSON.stringify(data)}\\n\\n`\n}\n\n/**\n * Create a TextEncoder for SSE streams\n */\nfunction createSSEEncoder(): TextEncoder {\n return new TextEncoder()\n}\n\n/**\n * Encode and format data for SSE streaming\n */\nfunction encodeSSE(encoder: TextEncoder, data: unknown): Uint8Array {\n return encoder.encode(formatSSE(data))\n}\n\n/**\n * Create an SSE Response from a ReadableStream\n */\nexport function createSSEResponse(stream: ReadableStream): Response {\n return new Response(stream, {\n status: 200,\n headers: SSE_HEADERS,\n })\n}\n\n/**\n * Transform a ReadableStream of events into an SSE-formatted stream\n */\nexport function createSSEStreamFromReader<T>(\n reader: ReadableStreamDefaultReader<T>,\n): ReadableStream<Uint8Array> {\n const encoder = createSSEEncoder()\n\n return new ReadableStream({\n async start(controller) {\n try {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n controller.close()\n break\n }\n\n controller.enqueue(encodeSSE(encoder, value))\n }\n } catch (error) {\n controller.error(error)\n }\n },\n cancel() {\n reader.releaseLock()\n },\n })\n}\n\n/**\n * Transform a ReadableStream of events into an SSE-formatted stream with\n * throttling for `run:progress` events.\n */\nexport function createThrottledSSEStreamFromReader<T>(\n reader: ReadableStreamDefaultReader<T>,\n throttleMs: number,\n): ReadableStream<Uint8Array> {\n if (throttleMs <= 0) {\n return createSSEStreamFromReader(reader)\n }\n\n const encoder = createSSEEncoder()\n let closed = false\n let throttle: {\n controller: SSEStreamController\n dispose: () => void\n } | null = null\n\n return new ReadableStream({\n async start(controller) {\n const innerCtrl: SSEStreamController = {\n enqueue: (data: unknown) =>\n controller.enqueue(encodeSSE(encoder, data)),\n close: () => {\n closed = true\n controller.close()\n },\n get closed() {\n return closed\n },\n }\n throttle = createThrottledSSEController(innerCtrl, throttleMs)\n\n try {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n throttle.controller.close()\n break\n }\n throttle.controller.enqueue(value)\n }\n } catch (error) {\n throttle.dispose()\n reader.releaseLock()\n controller.error(error)\n }\n },\n cancel() {\n closed = true\n throttle?.dispose()\n reader.releaseLock()\n },\n })\n}\n\n/**\n * SSE stream controller with cleanup support\n */\nexport interface SSEStreamController {\n enqueue: (data: unknown) => void\n close: () => void\n readonly closed: boolean\n}\n\n/**\n * Create an SSE stream from event subscriptions.\n * Handles the common pattern of subscribing to multiple events and streaming them.\n *\n * @param setup - Function to set up event subscriptions, returns cleanup functions\n * @returns SSE Response\n */\nexport function createSSEStreamFromSubscriptions(\n setup: (controller: SSEStreamController) => Unsubscribe[],\n): ReadableStream<Uint8Array> {\n const encoder = createSSEEncoder()\n let closed = false\n let unsubscribes: Unsubscribe[] = []\n\n return new ReadableStream({\n start(controller) {\n const sseController: SSEStreamController = {\n enqueue: (data: unknown) => {\n if (closed) return\n controller.enqueue(encodeSSE(encoder, data))\n },\n close: () => {\n if (closed) return\n closed = true\n controller.close()\n },\n get closed() {\n return closed\n },\n }\n\n unsubscribes = setup(sseController)\n },\n cancel() {\n closed = true\n for (const unsubscribe of unsubscribes) {\n unsubscribe()\n }\n },\n })\n}\n\nconst TERMINAL_EVENT_TYPES = new Set([\n 'run:complete',\n 'run:fail',\n 'run:cancel',\n 'run:delete',\n])\n\n/**\n * Create an SSE stream controller that throttles `run:progress` events.\n *\n * - First progress event per run is delivered immediately\n * - Subsequent events within the throttle window are coalesced (latest wins)\n * - A trailing flush ensures the last progress is always delivered\n * - Non-progress events pass through immediately\n */\nexport function createThrottledSSEController(\n inner: SSEStreamController,\n throttleMs: number,\n): { controller: SSEStreamController; dispose: () => void } {\n if (throttleMs <= 0) {\n return { controller: inner, dispose: () => {} }\n }\n\n // Per-run throttle state\n const pending = new Map<\n string,\n { data: unknown; timer: ReturnType<typeof setTimeout> }\n >()\n\n // Track last send time per run for leading-edge delivery\n const lastSent = new Map<string, number>()\n\n const controller: SSEStreamController = {\n enqueue(data: unknown) {\n if (inner.closed) return\n\n const event =\n typeof data === 'object' && data !== null\n ? (data as { type?: string; runId?: string })\n : null\n\n // Flush and clean up throttle state for terminal run events\n if (event?.runId && TERMINAL_EVENT_TYPES.has(event.type ?? '')) {\n lastSent.delete(event.runId)\n const entry = pending.get(event.runId)\n if (entry) {\n clearTimeout(entry.timer)\n if (!inner.closed) inner.enqueue(entry.data)\n pending.delete(event.runId)\n }\n }\n\n if (event?.type !== 'run:progress' || !event?.runId) {\n inner.enqueue(data)\n return\n }\n\n const runId = event.runId\n const now = Date.now()\n const last = lastSent.get(runId) ?? 0\n\n // Leading edge: send immediately if enough time has passed\n if (now - last >= throttleMs) {\n lastSent.set(runId, now)\n // Clear any pending flush for this run\n const entry = pending.get(runId)\n if (entry) {\n clearTimeout(entry.timer)\n pending.delete(runId)\n }\n inner.enqueue(data)\n return\n }\n\n // Trailing edge: buffer latest and schedule flush\n const existing = pending.get(runId)\n if (existing) {\n clearTimeout(existing.timer)\n }\n\n const delay = Math.max(0, throttleMs - (now - last))\n const timer = setTimeout(() => {\n const current = pending.get(runId)\n if (!current || current.timer !== timer) return\n\n pending.delete(runId)\n if (!inner.closed) {\n lastSent.set(runId, Date.now())\n inner.enqueue(current.data)\n }\n }, delay)\n\n pending.set(runId, { data, timer })\n },\n close() {\n // Flush all pending progress events before closing\n for (const [, entry] of pending) {\n clearTimeout(entry.timer)\n if (!inner.closed) {\n inner.enqueue(entry.data)\n }\n }\n pending.clear()\n lastSent.clear()\n inner.close()\n },\n get closed() {\n return inner.closed\n },\n }\n\n const dispose = () => {\n for (const [, entry] of pending) {\n clearTimeout(entry.timer)\n }\n pending.clear()\n lastSent.clear()\n }\n\n return { controller, dispose }\n}\n","import type { Durably } from './durably'\nimport type { AnyEventInput } from './events'\nimport {\n errorResponse,\n getErrorMessage,\n getRequiredQueryParam,\n jsonResponse,\n successResponse,\n} from './http'\nimport {\n createSSEResponse,\n createSSEStreamFromSubscriptions,\n createThrottledSSEController,\n createThrottledSSEStreamFromReader,\n type SSEStreamController,\n} from './sse'\nimport type { Run, RunFilter } from './storage'\nimport { toClientRun } from './storage'\n\n/**\n * Run operation types for onRunAccess\n */\nexport type RunOperation =\n | 'read'\n | 'subscribe'\n | 'steps'\n | 'retry'\n | 'cancel'\n | 'delete'\n\n/**\n * Subscription filter — only fields that SSE subscriptions actually support.\n */\nexport type RunsSubscribeFilter<\n TLabels extends Record<string, string> = Record<string, string>,\n> = Pick<RunFilter<TLabels>, 'jobName' | 'labels'>\n\n/**\n * Request body for triggering a job\n */\nexport interface TriggerRequest<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n jobName: string\n input: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Response for trigger endpoint\n */\nexport interface TriggerResponse {\n runId: string\n}\n\n/**\n * Auth middleware configuration.\n * When `auth` is set, `authenticate` is required.\n * TContext is inferred from authenticate's return type.\n * TLabels is inferred from the Durably instance.\n */\nexport interface AuthConfig<\n TContext,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /** Authenticate every request. Return context or throw Response to reject. */\n authenticate: (request: Request) => Promise<TContext> | TContext\n\n /** Guard before trigger. Called after body validation and job resolution. */\n onTrigger?: (\n ctx: TContext,\n trigger: TriggerRequest<TLabels>,\n ) => Promise<void> | void\n\n /** Guard before run-level operations. Run is pre-fetched. */\n onRunAccess?: (\n ctx: TContext,\n run: Run<TLabels>,\n info: { operation: RunOperation },\n ) => Promise<void> | void\n\n /** Scope runs list queries (GET /runs). */\n scopeRuns?: (\n ctx: TContext,\n filter: RunFilter<TLabels>,\n ) => RunFilter<TLabels> | Promise<RunFilter<TLabels>>\n\n /** Scope runs subscribe stream (GET /runs/subscribe). Falls back to scopeRuns if not set. */\n scopeRunsSubscribe?: (\n ctx: TContext,\n filter: RunsSubscribeFilter<TLabels>,\n ) => RunsSubscribeFilter<TLabels> | Promise<RunsSubscribeFilter<TLabels>>\n}\n\n/**\n * Handler interface for HTTP endpoints\n */\nexport interface DurablyHandler {\n /**\n * Handle all Durably HTTP requests with automatic routing + auth\n *\n * Routes:\n * - GET {basePath}/subscribe?runId=xxx - SSE stream\n * - GET {basePath}/runs - List runs\n * - GET {basePath}/runs/subscribe - SSE stream of run updates\n * - GET {basePath}/run?runId=xxx - Get single run\n * - GET {basePath}/steps?runId=xxx - Get steps\n * - POST {basePath}/trigger - Trigger a job\n * - POST {basePath}/retry?runId=xxx - Retry a failed run\n * - POST {basePath}/cancel?runId=xxx - Cancel a run\n * - DELETE {basePath}/run?runId=xxx - Delete a run\n */\n handle(request: Request, basePath: string): Promise<Response>\n}\n\n/**\n * Options for createDurablyHandler\n */\nexport interface CreateDurablyHandlerOptions<\n TContext = undefined,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /**\n * Called before handling each request (after authentication).\n * Use this to initialize Durably (migrate, start worker, etc.)\n */\n onRequest?: () => Promise<void> | void\n\n /**\n * Throttle interval in milliseconds for SSE progress events.\n * @default 100\n */\n sseThrottleMs?: number\n\n /**\n * Auth middleware. When set, authenticate is required and auth applies to ALL endpoints.\n */\n auth?: AuthConfig<TContext, TLabels>\n}\n\n/**\n * Valid status values for runs\n */\nconst VALID_STATUSES = [\n 'pending',\n 'running',\n 'completed',\n 'failed',\n 'cancelled',\n] as const satisfies readonly RunFilter['status'][]\n\nconst VALID_STATUSES_SET: ReadonlySet<string> = new Set(VALID_STATUSES)\n\n/**\n * Parse label.* query params into a Record<string, string>\n */\nfunction parseLabelsFromParams(\n searchParams: URLSearchParams,\n): Record<string, string> | undefined {\n const labels: Record<string, string> = {}\n for (const [key, value] of searchParams.entries()) {\n if (key.startsWith('label.')) {\n labels[key.slice(6)] = value\n }\n }\n return Object.keys(labels).length > 0 ? labels : undefined\n}\n\n/**\n * Parse and validate RunFilter from query params.\n * Returns the filter or an error Response.\n */\nfunction parseRunFilter(url: URL): RunFilter | Response {\n const jobNames = url.searchParams.getAll('jobName')\n const statusParam = url.searchParams.get('status')\n const limitParam = url.searchParams.get('limit')\n const offsetParam = url.searchParams.get('offset')\n const labels = parseLabelsFromParams(url.searchParams)\n\n // Validate status\n if (statusParam && !VALID_STATUSES_SET.has(statusParam)) {\n return errorResponse(\n `Invalid status: ${statusParam}. Must be one of: ${VALID_STATUSES.join(', ')}`,\n 400,\n )\n }\n\n // Validate limit\n let limit: number | undefined\n if (limitParam) {\n limit = Number.parseInt(limitParam, 10)\n if (Number.isNaN(limit) || limit < 0) {\n return errorResponse('Invalid limit: must be a non-negative integer', 400)\n }\n }\n\n // Validate offset\n let offset: number | undefined\n if (offsetParam) {\n offset = Number.parseInt(offsetParam, 10)\n if (Number.isNaN(offset) || offset < 0) {\n return errorResponse(\n 'Invalid offset: must be a non-negative integer',\n 400,\n )\n }\n }\n\n return {\n jobName: jobNames.length > 0 ? jobNames : undefined,\n status: statusParam as RunFilter['status'],\n labels,\n limit,\n offset,\n }\n}\n\n/**\n * Parse RunsSubscribeFilter from query params.\n */\nfunction parseRunsSubscribeFilter(url: URL): RunsSubscribeFilter {\n const jobNames = url.searchParams.getAll('jobName')\n const labels = parseLabelsFromParams(url.searchParams)\n\n return {\n jobName: jobNames.length > 0 ? jobNames : undefined,\n labels,\n }\n}\n\n/**\n * Check if event labels match filter labels (all filter labels must match)\n */\nfunction matchesLabels(\n eventLabels: Record<string, string>,\n filterLabels: Record<string, string>,\n): boolean {\n for (const [key, value] of Object.entries(filterLabels)) {\n if (eventLabels[key] !== value) return false\n }\n return true\n}\n\n/**\n * Create HTTP handlers for Durably\n * Uses Web Standard Request/Response for framework-agnostic usage\n */\n// biome-ignore lint/suspicious/noExplicitAny: TLabels must be inferred from Durably instance\nexport function createDurablyHandler<\n TContext = undefined,\n TLabels extends Record<string, string> = Record<string, string>,\n>(\n durably: Durably<any, TLabels>,\n options?: CreateDurablyHandlerOptions<TContext, TLabels>,\n): DurablyHandler {\n const throttleMs = options?.sseThrottleMs ?? 100\n const auth = options?.auth\n\n // Validate: auth requires authenticate\n if (auth && !auth.authenticate) {\n throw new Error(\n 'createDurablyHandler: auth.authenticate is required when auth is provided',\n )\n }\n\n // --- Shared helpers ---\n\n /** Wrap handler with try/catch that re-throws Response and catches everything else as 500 */\n async function withErrorHandling(\n fn: () => Promise<Response>,\n ): Promise<Response> {\n try {\n return await fn()\n } catch (error) {\n if (error instanceof Response) throw error\n return errorResponse(getErrorMessage(error), 500)\n }\n }\n\n /** Fetch run, check auth, return run or error Response */\n async function requireRunAccess(\n url: URL,\n ctx: TContext | undefined,\n operation: RunOperation,\n ): Promise<{ run: Run<TLabels>; runId: string } | Response> {\n const runId = getRequiredQueryParam(url, 'runId')\n if (runId instanceof Response) return runId\n\n const run = await durably.getRun(runId)\n if (!run) return errorResponse('Run not found', 404)\n\n if (auth?.onRunAccess && ctx !== undefined) {\n await auth.onRunAccess(ctx as TContext, run as Run<TLabels>, {\n operation,\n })\n }\n\n return { run: run as Run<TLabels>, runId }\n }\n\n // --- Private endpoint handlers (closure-scoped, not exposed on returned object) ---\n\n async function handleTrigger(\n request: Request,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const body = (await request.json()) as TriggerRequest<TLabels>\n\n if (!body.jobName) {\n return errorResponse('jobName is required', 400)\n }\n\n const job = durably.getJob(body.jobName)\n if (!job) {\n return errorResponse(`Job not found: ${body.jobName}`, 404)\n }\n\n // Auth hook: onTrigger (after validation)\n if (auth?.onTrigger && ctx !== undefined) {\n await auth.onTrigger(ctx as TContext, body)\n }\n\n const run = await job.trigger(\n (body.input ?? {}) as Record<string, unknown>,\n {\n idempotencyKey: body.idempotencyKey,\n concurrencyKey: body.concurrencyKey,\n labels: body.labels,\n },\n )\n\n const response: TriggerResponse = { runId: run.id }\n return jsonResponse(response)\n })\n }\n\n async function handleSubscribe(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n const result = await requireRunAccess(url, ctx, 'subscribe')\n if (result instanceof Response) return result\n\n const stream = durably.subscribe(result.runId)\n const sseStream = createThrottledSSEStreamFromReader(\n stream.getReader() as ReadableStreamDefaultReader<AnyEventInput>,\n throttleMs,\n )\n return createSSEResponse(sseStream)\n }\n\n async function handleRuns(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const filterOrError = parseRunFilter(url)\n if (filterOrError instanceof Response) return filterOrError\n\n let filter: RunFilter<TLabels> = filterOrError as RunFilter<TLabels>\n\n // Auth hook: scopeRuns\n if (auth?.scopeRuns && ctx !== undefined) {\n filter = await auth.scopeRuns(ctx as TContext, filter)\n }\n\n const runs = await durably.getRuns(filter)\n return jsonResponse(runs.map(toClientRun))\n })\n }\n\n async function handleRun(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'read')\n if (result instanceof Response) return result\n\n return jsonResponse(toClientRun(result.run))\n })\n }\n\n async function handleSteps(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'steps')\n if (result instanceof Response) return result\n\n const steps = await durably.storage.getSteps(result.runId)\n return jsonResponse(steps)\n })\n }\n\n async function handleRetry(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'retry')\n if (result instanceof Response) return result\n\n await durably.retry(result.runId)\n return successResponse()\n })\n }\n\n async function handleCancel(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'cancel')\n if (result instanceof Response) return result\n\n await durably.cancel(result.runId)\n return successResponse()\n })\n }\n\n async function handleDelete(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'delete')\n if (result instanceof Response) return result\n\n await durably.deleteRun(result.runId)\n return successResponse()\n })\n }\n\n async function handleRunsSubscribe(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n let filter: RunsSubscribeFilter<TLabels>\n\n if (ctx !== undefined && auth?.scopeRunsSubscribe) {\n const parsed = parseRunsSubscribeFilter(\n url,\n ) as RunsSubscribeFilter<TLabels>\n filter = await auth.scopeRunsSubscribe(ctx as TContext, parsed)\n } else if (ctx !== undefined && auth?.scopeRuns) {\n // Fallback: use scopeRuns with subscribe-compatible filter\n const parsed = parseRunsSubscribeFilter(\n url,\n ) as RunsSubscribeFilter<TLabels>\n const scoped = await auth.scopeRuns(\n ctx as TContext,\n {\n ...parsed,\n } as RunFilter<TLabels>,\n )\n filter = { jobName: scoped.jobName, labels: scoped.labels }\n } else {\n filter = parseRunsSubscribeFilter(url) as RunsSubscribeFilter<TLabels>\n }\n\n return createRunsSSEStream(filter)\n }\n\n function createRunsSSEStream(filter: RunsSubscribeFilter): Response {\n const jobNameFilter = Array.isArray(filter.jobName)\n ? filter.jobName\n : filter.jobName\n ? [filter.jobName]\n : []\n const labelsFilter = filter.labels\n\n const matchesFilter = (\n jobName: string,\n labels?: Record<string, string>,\n ) => {\n if (jobNameFilter.length > 0 && !jobNameFilter.includes(jobName))\n return false\n if (\n labelsFilter &&\n (!labels ||\n !matchesLabels(labels, labelsFilter as Record<string, string>))\n )\n return false\n return true\n }\n\n const sseStream = createSSEStreamFromSubscriptions(\n (innerCtrl: SSEStreamController) => {\n const { controller: ctrl, dispose } = createThrottledSSEController(\n innerCtrl,\n throttleMs,\n )\n\n const unsubscribes = [\n durably.on('run:trigger', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:trigger',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:start', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:start',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:complete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:complete',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:fail', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:fail',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:cancel', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:cancel',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:delete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:delete',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:retry', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:retry',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:progress', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:progress',\n runId: event.runId,\n jobName: event.jobName,\n progress: event.progress,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:start', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:start',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:complete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:complete',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:fail', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:fail',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n error: event.error,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:cancel', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:cancel',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('log:write', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'log:write',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n stepName: event.stepName,\n level: event.level,\n message: event.message,\n data: event.data,\n })\n }\n }),\n ]\n\n return [...unsubscribes, dispose]\n },\n )\n\n return createSSEResponse(sseStream)\n }\n\n // --- Public API: only handle() ---\n\n return {\n async handle(request: Request, basePath: string): Promise<Response> {\n try {\n // 1. Authenticate (fail fast before anything else)\n let ctx: TContext | undefined\n if (auth?.authenticate) {\n ctx = await auth.authenticate(request)\n }\n\n // 2. Run onRequest hook (lazy init: migrations, worker start)\n if (options?.onRequest) {\n await options.onRequest()\n }\n\n // 3. Route by path + method\n const url = new URL(request.url)\n const path = url.pathname.replace(basePath, '')\n const method = request.method\n\n // GET routes\n if (method === 'GET') {\n if (path === '/subscribe') return await handleSubscribe(url, ctx)\n if (path === '/runs') return await handleRuns(url, ctx)\n if (path === '/run') return await handleRun(url, ctx)\n if (path === '/steps') return await handleSteps(url, ctx)\n if (path === '/runs/subscribe')\n return await handleRunsSubscribe(url, ctx)\n }\n\n // POST routes\n if (method === 'POST') {\n if (path === '/trigger') return await handleTrigger(request, ctx)\n if (path === '/retry') return await handleRetry(url, ctx)\n if (path === '/cancel') return await handleCancel(url, ctx)\n }\n\n // DELETE routes\n if (method === 'DELETE') {\n if (path === '/run') return await handleDelete(url, ctx)\n }\n\n return new Response('Not Found', { status: 404 })\n } catch (error) {\n // Auth hooks throw Response to reject — return as-is\n if (error instanceof Response) return error\n return errorResponse(getErrorMessage(error), 500)\n }\n },\n }\n}\n"],"mappings":";;;;;AACA,SAAS,cAAc;;;AC0RhB,SAAS,qBAAmC;AACjD,QAAM,YAAY,oBAAI,IAA8C;AACpE,MAAI,WAAW;AACf,MAAI,eAAoC;AAExC,SAAO;AAAA,IACL,GAAwB,MAAS,UAAyC;AACxE,UAAI,CAAC,UAAU,IAAI,IAAI,GAAG;AACxB,kBAAU,IAAI,MAAM,oBAAI,IAAI,CAAC;AAAA,MAC/B;AAEA,YAAM,gBAAgB,UAAU,IAAI,IAAI;AACxC,qBAAe,IAAI,QAA+C;AAElE,aAAO,MAAM;AACX,uBAAe,OAAO,QAA+C;AAAA,MACvE;AAAA,IACF;AAAA,IAEA,QAAQ,SAA6B;AACnC,qBAAe;AAAA,IACjB;AAAA,IAEA,KAAK,OAA4B;AAC/B;AACA,YAAM,YAAY;AAAA,QAChB,GAAG;AAAA,QACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF;AAEA,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAC9C,UAAI,CAAC,eAAe;AAClB;AAAA,MACF;AAEA,iBAAW,YAAY,eAAe;AACpC,YAAI;AACF,mBAAS,SAAS;AAAA,QACpB,SAAS,OAAO;AACd,cAAI,cAAc;AAChB;AAAA,cACE,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,cACxD;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AC9UA,SAAiB,qBAAqB;AAMtC,IAAM,OAAO,MAAM;AAAC;AAKpB,SAAS,wBACP,QACA,OACA,SACG;AACH,QAAM,SAAS,OAAO,UAAU,KAAK;AACrC,MAAI,CAAC,OAAO,SAAS;AACnB,UAAM,SAAS,UAAU,GAAG,OAAO,OAAO;AAC1C,UAAM,IAAI,MAAM,GAAG,MAAM,kBAAkB,cAAc,OAAO,KAAK,CAAC,EAAE;AAAA,EAC1E;AACA,SAAO,OAAO;AAChB;AAgLO,SAAS,oBAAiC;AAC/C,QAAM,OAAO,oBAAI,IAA6C;AAE9D,SAAO;AAAA,IACL,IAAqB,KAA2C;AAC9D,WAAK,IAAI,IAAI,MAAM,GAAsC;AAAA,IAC3D;AAAA,IAEA,IAAI,MAA2D;AAC7D,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,IAEA,IAAI,MAAuB;AACzB,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,EACF;AACF;AAKO,SAAS,gBAMd,QACA,SACA,cACA,UACA,cAC4C;AAE5C,QAAM,cAAc,SAAS,IAAI,OAAO,IAAI;AAC5C,MAAI,aAAa;AAEf,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO,YAAY;AAAA,IACrB;AAEA,UAAM,IAAI;AAAA,MACR,QAAQ,OAAO,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,QAAM,cAAc,OAAO;AAC3B,QAAM,eAAe,OAAO;AAE5B,QAAM,SAAqD;AAAA,IACzD,MAAM,OAAO;AAAA,IAEb,MAAM,QACJ,OACA,SACqC;AAErC,YAAM,iBAAiB,wBAAwB,aAAa,KAAK;AAGjE,UAAI,gBAAgB,SAAS,QAAQ;AACnC,gCAAwB,cAAc,QAAQ,QAAQ,QAAQ;AAAA,MAChE;AAGA,YAAM,MAAM,MAAM,QAAQ,UAAU;AAAA,QAClC,SAAS,OAAO;AAAA,QAChB,OAAO;AAAA,QACP,gBAAgB,SAAS;AAAA,QACzB,gBAAgB,SAAS;AAAA,QACzB,QAAQ,SAAS;AAAA,MACnB,CAAC;AAGD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX,SAAS,OAAO;AAAA,QAChB,OAAO;AAAA,QACP,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,eACJ,OACA,SACwC;AAExC,YAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,OAAO;AAG7C,aAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAI;AACJ,YAAI,WAAW;AAEf,cAAM,eAA+B,CAAC;AAEtC,cAAM,UAAU,MAAM;AACpB,cAAI,SAAU;AACd,qBAAW;AACX,qBAAW,SAAS,aAAc,OAAM;AACxC,cAAI,WAAW;AACb,yBAAa,SAAS;AAAA,UACxB;AAAA,QACF;AAEA,qBAAa;AAAA,UACX,aAAa,GAAG,gBAAgB,CAAC,UAAU;AACzC,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,sBAAQ;AAAA,gBACN,IAAI,IAAI;AAAA,gBACR,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAEA,qBAAa;AAAA,UACX,aAAa,GAAG,YAAY,CAAC,UAAU;AACrC,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,qBAAO,IAAI,MAAM,MAAM,KAAK,CAAC;AAAA,YAC/B;AAAA,UACF,CAAC;AAAA,QACH;AAEA,YAAI,SAAS,YAAY;AACvB,gBAAM,aAAa,QAAQ;AAC3B,uBAAa;AAAA,YACX,aAAa,GAAG,gBAAgB,CAAC,UAAU;AACzC,kBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,qBAAK,QAAQ,QAAQ,WAAW,MAAM,QAAQ,CAAC,EAAE,MAAM,IAAI;AAAA,cAC7D;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI,SAAS,OAAO;AAClB,gBAAM,QAAQ,QAAQ;AACtB,uBAAa;AAAA,YACX,aAAa,GAAG,aAAa,CAAC,UAAU;AACtC,kBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAM,EAAE,OAAO,SAAS,MAAM,SAAS,IAAI;AAC3C,qBAAK,QAAQ;AAAA,kBACX,MAAM,EAAE,OAAO,SAAS,MAAM,SAAS,CAAC;AAAA,gBAC1C,EAAE,MAAM,IAAI;AAAA,cACd;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAIA,gBACG,OAAO,IAAI,EAAE,EACb,KAAK,CAAC,eAAe;AACpB,cAAI,YAAY,CAAC,WAAY;AAC7B,cAAI,WAAW,WAAW,aAAa;AACrC,oBAAQ;AACR,oBAAQ;AAAA,cACN,IAAI,IAAI;AAAA,cACR,QAAQ,WAAW;AAAA,YACrB,CAAC;AAAA,UACH,WAAW,WAAW,WAAW,UAAU;AACzC,oBAAQ;AACR,mBAAO,IAAI,MAAM,WAAW,SAAS,YAAY,CAAC;AAAA,UACpD;AAAA,QACF,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,cAAI,SAAU;AACd,kBAAQ;AACR,iBAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,QAClE,CAAC;AAGH,YAAI,SAAS,YAAY,QAAW;AAClC,sBAAY,WAAW,MAAM;AAC3B,gBAAI,CAAC,UAAU;AACb,sBAAQ;AACR;AAAA,gBACE,IAAI,MAAM,gCAAgC,QAAQ,OAAO,IAAI;AAAA,cAC/D;AAAA,YACF;AAAA,UACF,GAAG,QAAQ,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,aACJ,QACuC;AACvC,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,YAAM,aAAa,OAAO,IAAI,CAAC,SAAS;AACtC,YAAI,QAAQ,OAAO,SAAS,YAAY,WAAW,MAAM;AACvD,iBAAO;AAAA,QACT;AACA,eAAO,EAAE,OAAO,MAAgB,SAAS,OAAU;AAAA,MACrD,CAAC;AAGD,YAAM,YAGA,CAAC;AACP,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,iBAAiB;AAAA,UACrB;AAAA,UACA,WAAW,CAAC,EAAE;AAAA,UACd,YAAY,CAAC;AAAA,QACf;AACA,YAAI,gBAAgB,WAAW,CAAC,EAAE,SAAS,QAAQ;AACjD;AAAA,YACE;AAAA,YACA,WAAW,CAAC,EAAE,SAAS;AAAA,YACvB,mBAAmB,CAAC;AAAA,UACtB;AAAA,QACF;AACA,kBAAU,KAAK;AAAA,UACb,OAAO;AAAA,UACP,SAAS,WAAW,CAAC,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAGA,YAAM,OAAO,MAAM,QAAQ;AAAA,QACzB,UAAU,IAAI,CAAC,OAAO;AAAA,UACpB,SAAS,OAAO;AAAA,UAChB,OAAO,EAAE;AAAA,UACT,gBAAgB,EAAE,SAAS;AAAA,UAC3B,gBAAgB,EAAE,SAAS;AAAA,UAC3B,QAAQ,EAAE,SAAS;AAAA,QACrB,EAAE;AAAA,MACJ;AAGA,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,KAAK,CAAC,EAAE;AAAA,UACf,SAAS,OAAO;AAAA,UAChB,OAAO,UAAU,CAAC,EAAE;AAAA,UACpB,QAAQ,KAAK,CAAC,EAAE;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,OAAO,IAAwD;AACnE,YAAM,MAAM,MAAM,QAAQ,OAAO,EAAE;AACnC,UAAI,CAAC,OAAO,IAAI,YAAY,OAAO,MAAM;AACvC,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QACJ,QACuC;AACvC,YAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACjC,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,WAAS,IAAI;AAAA,IACX,MAAM,OAAO;AAAA,IACb;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI,OAAO;AAAA,IACX;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO;AACT;;;ACvdA,IAAM,aAA0B;AAAA,EAC9B;AAAA,IACE,SAAS;AAAA,IACT,IAAI,OAAO,OAAO;AAEhB,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,YAAY,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,mBAAmB,MAAM,EACnC,UAAU,mBAAmB,MAAM,EACnC,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,EAAE,UAAU,IAAI,CAAC,EAClE;AAAA,QAAU;AAAA,QAAsB;AAAA,QAAW,CAAC,QAC3C,IAAI,QAAQ,EAAE,UAAU,CAAC;AAAA,MAC3B,EACC,UAAU,YAAY,MAAM,EAC5B,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,gBAAgB,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACxD,UAAU,cAAc,MAAM,EAC9B,UAAU,gBAAgB,MAAM,EAChC,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,kCAAkC,EAC9C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,YAAY,iBAAiB,CAAC,EACvC,OAAO,EACP,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,qCAAqC,EACjD,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,iBAAiB,CAAC,EACrC,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,iCAAiC,EAC7C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,eAAe,EAC3B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,QAAQ,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAChD,UAAU,SAAS,WAAW,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,gBAAgB,MAAM,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,6BAA6B,EACzC,YAAY,EACZ,GAAG,eAAe,EAClB,QAAQ,CAAC,UAAU,OAAO,CAAC,EAC3B,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,aAAa,MAAM,EAC7B,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,QAAQ,MAAM,EACxB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,8BAA8B,EAC1C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,yBAAyB,EACrC,YAAY,EACZ,UAAU,WAAW,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,EACzD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAAA,IACb;AAAA,EACF;AACF;AAKA,eAAe,kBAAkB,IAAuC;AACtE,MAAI;AACF,UAAM,SAAS,MAAM,GAClB,WAAW,yBAAyB,EACpC,OAAO,SAAS,EAChB,QAAQ,WAAW,MAAM,EACzB,MAAM,CAAC,EACP,iBAAiB;AAEpB,WAAO,QAAQ,WAAW;AAAA,EAC5B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cAAc,IAAqC;AACvE,QAAM,iBAAiB,MAAM,kBAAkB,EAAE;AAEjD,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,UAAU,gBAAgB;AACtC,YAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AAC5C,cAAM,UAAU,GAAG,GAAG;AAEtB,cAAM,IACH,WAAW,yBAAyB,EACpC,OAAO;AAAA,UACN,SAAS,UAAU;AAAA,UACnB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACrC,CAAC,EACA,QAAQ;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC/JA,SAAsB,WAAW;AACjC,SAAS,wBAAwB;AAGjC,IAAM,OAAO,iBAAiB;AAyIvB,SAAS,YAEd,KAAuC;AACvC,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,IAAI;AACJ,SAAO;AACT;AA+BA,IAAM,oBAAoB;AAE1B,SAAS,eAAe,QAAkD;AACxE,MAAI,CAAC,OAAQ;AACb,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,CAAC,kBAAkB,KAAK,GAAG,GAAG;AAChC,YAAM,IAAI;AAAA,QACR,sBAAsB,GAAG;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,SACP,KACK;AACL,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,SAAS,IAAI;AAAA,IACb,OAAO,KAAK,MAAM,IAAI,KAAK;AAAA,IAC3B,QAAQ,IAAI;AAAA,IACZ,gBAAgB,IAAI;AAAA,IACpB,gBAAgB,IAAI;AAAA,IACpB,kBAAkB,IAAI;AAAA,IACtB,WAAW,OAAO,IAAI,cAAc,CAAC;AAAA,IACrC,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,IACpD,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,QAAQ,KAAK,MAAM,IAAI,MAAM;AAAA,IAC7B,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,EACjB;AACF;AAKA,SAAS,UAAU,KAAsC;AACvD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAM,IAAI;AAAA,IACV,OAAO,IAAI;AAAA,IACX,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,EACnB;AACF;AAKA,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,UAAU,IAAI;AAAA,IACd,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,IAAI,IAAI;AAAA,IACxC,WAAW,IAAI;AAAA,EACjB;AACF;AAKO,SAAS,oBAAoB,IAA+B;AACjE,SAAO;AAAA,IACL,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAGnC,UAAI,MAAM,gBAAgB;AACxB,cAAM,WAAW,MAAM,GACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,YAAI,UAAU;AACZ,iBAAO,SAAS,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,qBAAe,MAAM,MAAM;AAE3B,YAAM,KAAK,KAAK;AAChB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,UAAU,MAAM;AAAA,QAChB,OAAO,KAAK,UAAU,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,oBAAoB;AAAA,QACpB,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,QAAQ,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,QACzC,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,gBAAgB,QAA0C;AAC9D,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,aAAO,MAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AACnD,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAmC,CAAC;AAG1C,mBAAW,SAAS,QAAQ;AAC1B,yBAAe,MAAM,MAAM;AAAA,QAC7B;AAGA,mBAAW,SAAS,QAAQ;AAE1B,cAAI,MAAM,gBAAgB;AACxB,kBAAM,WAAW,MAAM,IACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,gBAAI,UAAU;AACZ,mBAAK,KAAK,QAAQ;AAClB;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK,KAAK;AAChB,eAAK,KAAK;AAAA,YACR;AAAA,YACA,UAAU,MAAM;AAAA,YAChB,OAAO,KAAK,UAAU,MAAM,KAAK;AAAA,YACjC,QAAQ;AAAA,YACR,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,oBAAoB;AAAA,YACpB,UAAU;AAAA,YACV,QAAQ;AAAA,YACR,OAAO;AAAA,YACP,QAAQ,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,YACzC,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,cAAM,UAAU,KAAK,OAAO,CAAC,MAAM,EAAE,eAAe,GAAG;AACvD,YAAI,QAAQ,SAAS,GAAG;AACtB,gBAAM,IAAI,WAAW,cAAc,EAAE,OAAO,OAAO,EAAE,QAAQ;AAAA,QAC/D;AAEA,eAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAAe,MAAqC;AAClE,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,UAA6C;AAAA,QACjD,YAAY;AAAA,MACd;AAEA,UAAI,KAAK,WAAW,OAAW,SAAQ,SAAS,KAAK;AACrD,UAAI,KAAK,qBAAqB;AAC5B,gBAAQ,qBAAqB,KAAK;AACpC,UAAI,KAAK,aAAa;AACpB,gBAAQ,WAAW,KAAK,WAAW,KAAK,UAAU,KAAK,QAAQ,IAAI;AACrE,UAAI,KAAK,WAAW;AAClB,gBAAQ,SAAS,KAAK,UAAU,KAAK,MAAM;AAC7C,UAAI,KAAK,UAAU,OAAW,SAAQ,QAAQ,KAAK;AACnD,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAC9B,UAAI,KAAK,cAAc,OAAW,SAAQ,aAAa,KAAK;AAC5D,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAE9B,YAAM,GACH,YAAY,cAAc,EAC1B,IAAI,OAAO,EACX,MAAM,MAAM,KAAK,KAAK,EACtB,QAAQ;AAAA,IACb;AAAA,IAEA,MAAM,UAAU,OAA8B;AAE5C,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACxE,YAAM,GAAG,WAAW,eAAe,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACzE,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,MAAM,KAAK,KAAK,EAAE,QAAQ;AAAA,IACtE;AAAA,IAEA,MAAM,OAA4B,OAAkC;AAClE,YAAM,MAAM,MAAM,GACf,WAAW,cAAc,EACzB,SAAS,iBAAiB,mBAAmB,sBAAsB,EACnE,UAAU,cAAc,EACxB;AAAA,QAAO,CAAC,OACP,GAAG,GAAG,MAAc,kBAAkB,EAAE,GAAG,YAAY;AAAA,MACzD,EACC,MAAM,mBAAmB,KAAK,KAAK,EACnC,QAAQ,iBAAiB,EACzB,iBAAiB;AAEpB,aAAO,MAAO,SAAS,GAAG,IAAU;AAAA,IACtC;AAAA,IAEA,MAAM,QAA6B,QAAkC;AACnE,UAAI,QAAQ,GACT,WAAW,cAAc,EACzB,SAAS,iBAAiB,mBAAmB,sBAAsB,EACnE,UAAU,cAAc,EACxB;AAAA,QAAO,CAAC,OACP,GAAG,GAAG,MAAc,kBAAkB,EAAE,GAAG,YAAY;AAAA,MACzD,EACC,QAAQ,iBAAiB;AAE5B,UAAI,QAAQ,QAAQ;AAClB,gBAAQ,MAAM,MAAM,uBAAuB,KAAK,OAAO,MAAM;AAAA,MAC/D;AACA,UAAI,QAAQ,SAAS;AACnB,YAAI,MAAM,QAAQ,OAAO,OAAO,GAAG;AACjC,cAAI,OAAO,QAAQ,SAAS,GAAG;AAC7B,oBAAQ,MAAM,MAAM,yBAAyB,MAAM,OAAO,OAAO;AAAA,UACnE;AAAA,QACF,OAAO;AACL,kBAAQ,MAAM,MAAM,yBAAyB,KAAK,OAAO,OAAO;AAAA,QAClE;AAAA,MACF;AACA,UAAI,QAAQ,QAAQ;AAClB,cAAM,SAAS,OAAO;AACtB,uBAAe,MAAM;AACrB,mBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,cAAI,UAAU,OAAW;AACzB,kBAAQ,MAAM;AAAA,YACZ,wCAAwC,MAAM,GAAG,GAAG;AAAA,YACpD;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,cAAQ,MAAM,QAAQ,2BAA2B,MAAM;AAEvD,UAAI,QAAQ,UAAU,QAAW;AAC/B,gBAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,MAClC;AACA,UAAI,QAAQ,WAAW,QAAW;AAEhC,YAAI,OAAO,UAAU,QAAW;AAC9B,kBAAQ,MAAM,MAAM,EAAE;AAAA,QACxB;AACA,gBAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,IAEA,MAAM,oBACJ,wBACqB;AACrB,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,UAAI,WAAW,GACZ,WAAW,cAAc,EACzB,OAAO,IAAI,EACX,MAAM,UAAU,KAAK,SAAS,EAC9B,QAAQ,cAAc,KAAK,EAC3B,QAAQ,MAAM,KAAK,EACnB,MAAM,CAAC;AAEV,UAAI,uBAAuB,SAAS,GAAG;AACrC,mBAAW,SAAS;AAAA,UAAM,CAAC,OACzB,GAAG,GAAG;AAAA,YACJ,GAAG,mBAAmB,MAAM,IAAI;AAAA,YAChC,GAAG,mBAAmB,UAAU,sBAAsB;AAAA,UACxD,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,MAAM,MAAM,GACf,YAAY,cAAc,EAC1B,IAAI;AAAA,QACH,QAAQ;AAAA,QACR,cAAc;AAAA,QACd,YAAY,2BAA2B,GAAG;AAAA,QAC1C,YAAY;AAAA,MACd,CAAC,EACA;AAAA,QAAM;AAAA,QAAM;AAAA,QAAK,CAAC,OACjB,GAAG,WAAW,SAAS,GAAG,KAAK,CAAC,EAAE,OAAO,IAAI;AAAA,MAC/C,EACC,aAAa,EACb,iBAAiB;AAEpB,UAAI,CAAC,IAAK,QAAO;AACjB,aAAO,SAAS,EAAE,GAAG,KAAK,YAAY,EAAE,CAAC;AAAA,IAC3C;AAAA,IAEA,MAAM,WAAW,OAAuC;AACtD,YAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,YAAM,KAAK,KAAK;AAEhB,YAAM,OAAkC;AAAA,QACtC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd,QACE,MAAM,WAAW,SAAY,KAAK,UAAU,MAAM,MAAM,IAAI;AAAA,QAC9D,OAAO,MAAM,SAAS;AAAA,QACtB,YAAY,MAAM;AAAA,QAClB,cAAc;AAAA,MAChB;AAEA,YAAM,GAAG,WAAW,eAAe,EAAE,OAAO,IAAI,EAAE,QAAQ;AAE1D,aAAO,UAAU,IAAI;AAAA,IACvB;AAAA,IAEA,MAAM,SAAS,OAAgC;AAC7C,YAAM,OAAO,MAAM,GAChB,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,SAAS,KAAK,EACtB,QAAQ;AAEX,aAAO,KAAK,IAAI,SAAS;AAAA,IAC3B;AAAA,IAEA,MAAM,iBAAiB,OAAe,MAAoC;AACxE,YAAM,MAAM,MAAM,GACf,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,MAAM,QAAQ,KAAK,IAAI,EACvB,MAAM,UAAU,KAAK,WAAW,EAChC,iBAAiB;AAEpB,aAAO,MAAM,UAAU,GAAG,IAAI;AAAA,IAChC;AAAA,IAEA,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,KAAK,KAAK;AAEhB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,SAAS,SAAY,KAAK,UAAU,MAAM,IAAI,IAAI;AAAA,QAC9D,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,QAAQ,OAA+B;AAC3C,YAAM,OAAO,MAAM,GAChB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,cAAc,KAAK,EAC3B,QAAQ;AAEX,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;;;ACpkBA,SAAS,iBAAAA,sBAAqB;;;ACKvB,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YAAY,OAAe;AACzB,UAAM,sBAAsB,KAAK,EAAE;AACnC,SAAK,OAAO;AAAA,EACd;AACF;AAKO,SAAS,gBAAgB,OAAwB;AACtD,SAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC9D;;;ACTO,SAAS,kBACd,KACA,SACA,SACA,cAC4C;AAC5C,MAAI,YAAY,IAAI;AACpB,MAAI,kBAAiC;AAErC,QAAM,aAAa,IAAI,gBAAgB;AAEvC,QAAM,cAAc,aAAa,GAAG,cAAc,CAAC,UAAU;AAC3D,QAAI,MAAM,UAAU,IAAI,IAAI;AAC1B,iBAAW,MAAM;AAAA,IACnB;AAAA,EACF,CAAC;AAED,QAAM,OAAoB;AAAA,IACxB,IAAI,QAAgB;AAClB,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,MAAM,IACJ,MACA,IACY;AAEZ,UAAI,WAAW,OAAO,SAAS;AAC7B,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAIA,YAAM,aAAa,MAAM,QAAQ,OAAO,IAAI,EAAE;AAC9C,UAAI,YAAY,WAAW,aAAa;AACtC,mBAAW,MAAM;AACjB,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,UAAI,WAAW,OAAO,SAAS;AAC7B,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,YAAM,eAAe,MAAM,QAAQ,iBAAiB,IAAI,IAAI,IAAI;AAChE,UAAI,cAAc;AAChB;AACA,eAAO,aAAa;AAAA,MACtB;AAGA,wBAAkB;AAGlB,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY,KAAK,IAAI;AAG3B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,UAAI;AAEF,cAAM,SAAS,MAAM,GAAG,WAAW,MAAM;AAGzC,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR;AAAA,QACF,CAAC;AAGD;AACA,cAAM,QAAQ,UAAU,IAAI,IAAI,EAAE,kBAAkB,UAAU,CAAC;AAG/D,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV,WAAW,YAAY;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,UACvB,QAAQ,IAAI;AAAA,QACd,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,cAAM,cAAc,WAAW,OAAO;AACtC,cAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAEvD,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ,cAAc,cAAc;AAAA,UACpC,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAED,qBAAa,KAAK;AAAA,UAChB,GAAI,cACA,EAAE,MAAM,cAAuB,IAC/B,EAAE,MAAM,aAAsB,OAAO,aAAa;AAAA,UACtD,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA,QAAQ,IAAI;AAAA,QACd,CAAC;AAED,YAAI,aAAa;AACf,gBAAM,IAAI,eAAe,IAAI,EAAE;AAAA,QACjC;AACA,cAAM;AAAA,MACR,UAAE;AAEA,0BAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IAEA,SAAS,SAAiB,OAAgB,SAAwB;AAChE,YAAM,eAAe,EAAE,SAAS,OAAO,QAAQ;AAE/C,cAAQ,UAAU,IAAI,IAAI,EAAE,UAAU,aAAa,CAAC;AAEpD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,KAAK;AAAA,MACH,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,SAAiB,MAAsB;AAC3C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,SAAS,YAAY;AACtC;;;AFhKO,SAAS,aACd,QACA,SACA,cACA,aACQ;AACR,MAAI,UAAU;AACd,MAAI,oBAA0C;AAC9C,MAAI,iBAAuD;AAC3D,MAAI,eAAoC;AACxC,MAAI,oBAA2D;AAC/D,MAAI,eAA8B;AAKlC,iBAAe,mBAAkC;AAC/C,UAAM,iBAAiB,IAAI;AAAA,MACzB,KAAK,IAAI,IAAI,OAAO;AAAA,IACtB,EAAE,YAAY;AACd,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAE/D,eAAW,OAAO,aAAa;AAC7B,UAAI,IAAI,cAAc,gBAAgB;AAEpC,cAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,UAC9B,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBAAiC;AAC9C,QAAI,cAAc;AAChB,YAAM,QAAQ,UAAU,cAAc;AAAA,QACpC,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAKA,iBAAe,iBACb,OACA,SACA,QACA,WACe;AAEf,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,CAAC,cAAc,WAAW,WAAW,aAAa;AACpD;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR;AAAA,MACA,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,KAAK,IAAI,IAAI;AAAA,MACvB,QAAQ,WAAW;AAAA,IACrB,CAAC;AAAA,EACH;AAKA,iBAAe,iBACb,OACA,SACA,OACe;AAGf,QAAI,iBAAiB,gBAAgB;AACnC;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,CAAC,cAAc,WAAW,WAAW,aAAa;AACpD;AAAA,IACF;AAEA,UAAM,eAAe,gBAAgB,KAAK;AAG1C,UAAM,QAAQ,MAAM,QAAQ,SAAS,KAAK;AAC1C,UAAM,aAAa,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,QAAQ;AAE1D,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,gBAAgB,YAAY,QAAQ;AAAA,MACpC,QAAQ,WAAW;AAAA,IACrB,CAAC;AAAA,EACH;AAKA,iBAAe,WACb,KACA,KACe;AAEf,mBAAe,IAAI;AAInB,wBAAoB,YAAY,MAAM;AACpC,sBAAgB,EAAE,MAAM,CAAC,UAAU;AACjC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,gBAAgB,KAAK;AAAA,UAC5B,SAAS;AAAA,UACT,OAAO,IAAI;AAAA,QACb,CAAC;AAAA,MACH,CAAC;AAAA,IACH,GAAG,OAAO,iBAAiB;AAG3B,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,OAAO,IAAI;AAAA,MACX,SAAS,IAAI;AAAA,MACb,OAAO,IAAI;AAAA,MACX,QAAQ,IAAI;AAAA,IACd,CAAC;AAED,UAAM,YAAY,KAAK,IAAI;AAE3B,UAAM,EAAE,MAAM,QAAQ,IAAI;AAAA,MACxB;AAAA,MACA,IAAI;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,IAAI,GAAG,MAAM,IAAI,KAAK;AAG3C,UAAI,IAAI,cAAc;AACpB,cAAM,cAAc,IAAI,aAAa,UAAU,MAAM;AACrD,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI,MAAM,mBAAmBC,eAAc,YAAY,KAAK,CAAC,EAAE;AAAA,QACvE;AAAA,MACF;AAEA,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,QAAQ,SAAS;AAAA,IAC/D,SAAS,OAAO;AACd,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,KAAK;AAAA,IACnD,UAAE;AACA,cAAQ;AAER,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AACA,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,iBAAe,iBAAmC;AAEhD,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAC/D,UAAM,yBAAyB,YAC5B;AAAA,MACC,CAAC,MACC,EAAE,mBAAmB;AAAA,IACzB,EACC,IAAI,CAAC,MAAM,EAAE,cAAc;AAG9B,UAAM,MAAM,MAAM,QAAQ,oBAAoB,sBAAsB;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,YAAY,IAAI,IAAI,OAAO;AACvC,QAAI,CAAC,KAAK;AAER,YAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,QAC9B,QAAQ;AAAA,QACR,OAAO,gBAAgB,IAAI,OAAO;AAAA,MACpC,CAAC;AACD,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,KAAK,GAAG;AAEzB,WAAO;AAAA,EACT;AAEA,iBAAe,OAAsB;AACnC,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,YAAY;AAEzB,YAAM,iBAAiB;AACvB,YAAM,eAAe;AAAA,IACvB;AAEA,QAAI;AACF,0BAAoB,OAAO;AAC3B,YAAM;AAAA,IACR,UAAE;AACA,0BAAoB;AAAA,IACtB;AAEA,QAAI,SAAS;AACX,uBAAiB,WAAW,MAAM,KAAK,GAAG,OAAO,eAAe;AAAA,IAClE,WAAW,cAAc;AACvB,mBAAa;AACb,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,YAAqB;AACvB,aAAO;AAAA,IACT;AAAA,IAEA,QAAc;AACZ,UAAI,SAAS;AACX;AAAA,MACF;AACA,gBAAU;AACV,WAAK;AAAA,IACP;AAAA,IAEA,MAAM,OAAsB;AAC1B,UAAI,CAAC,SAAS;AACZ;AAAA,MACF;AAEA,gBAAU;AAEV,UAAI,gBAAgB;AAClB,qBAAa,cAAc;AAC3B,yBAAiB;AAAA,MACnB;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AAEA,UAAI,mBAAmB;AAErB,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,yBAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;AL3PA,IAAM,WAAW;AAAA,EACf,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,gBAAgB;AAClB;AA0MA,SAAS,sBAMP,OAAqB,MAAsC;AAC3D,QAAM,EAAE,IAAI,SAAS,cAAc,aAAa,OAAO,IAAI;AAE3D,QAAM,UAAmC;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI,aAAa;AAAA,IACjB,MAAM,aAAa;AAAA,IACnB,SAAS,aAAa;AAAA,IACtB,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA;AAAA,IAGb,SACE,SACiE;AACjE,YAAM,aAAa,CAAC;AAEpB,iBAAW,OAAO,OAAO,KAAK,OAAO,GAAyB;AAC5D,cAAM,SAAS,QAAQ,GAAG;AAC1B,cAAM,SAAS;AAAA,UACb;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,QACR;AACA,mBAAW,GAAG,IAAI;AAAA,MAIpB;AAGA,YAAM,aAAa,EAAE,GAAG,MAAM,GAAG,WAAW;AAE5C,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IAEA,QAAQ,QAAQ;AAAA,IAChB,SAAS,QAAQ;AAAA,IAEjB,IAAI,QAA6B;AAC/B,aAAO,QAAQ,OAAO;AAAA,IACxB;AAAA,IAEA,OACE,MACyE;AACzE,YAAM,gBAAgB,YAAY,IAAI,IAAI;AAC1C,UAAI,CAAC,eAAe;AAClB,eAAO;AAAA,MACT;AACA,aAAO,cAAc;AAAA,IAMvB;AAAA,IAEA,UAAU,OAA6C;AAErD,UAAI,SAAS;AACb,UAAI,UAA+B;AAGnC,YAAM,cAAc,oBAAI,IAAe,CAAC,gBAAgB,YAAY,CAAC;AAErE,YAAM,mBAAgC;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO,IAAI,eAA6B;AAAA,QACtC,OAAO,CAAC,eAAe;AACrB,gBAAM,eAAe,iBAAiB;AAAA,YAAI,CAAC,SACzC,aAAa,GAAG,MAAM,CAAC,UAAU;AAC/B,kBAAI,UAAU,MAAM,UAAU,MAAO;AACrC,yBAAW,QAAQ,KAAK;AACxB,kBAAI,YAAY,IAAI,IAAI,GAAG;AACzB,yBAAS;AACT,0BAAU;AACV,2BAAW,MAAM;AAAA,cACnB;AAAA,YACF,CAAC;AAAA,UACH;AAEA,oBAAU,MAAM;AACd,uBAAW,SAAS,aAAc,OAAM;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,QAAQ,MAAM;AAEZ,cAAI,CAAC,QAAQ;AACX,qBAAS;AACT,sBAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,MAAM,OAA8B;AACxC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,+BAA+B,KAAK,EAAE;AAAA,MACxD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,QACR,OAAO;AAAA,MACT,CAAC;AAGD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,OAAO,OAA8B;AACzC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AACA,UAAI,IAAI,WAAW,UAAU;AAC3B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,wCAAwC,KAAK,EAAE;AAAA,MACjE;AACA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAGD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAA8B;AAC5C,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,YAAM,QAAQ,UAAU,KAAK;AAG7B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAyB;AAC7B,UAAI,MAAM,UAAU;AAClB;AAAA,MACF;AAEA,UAAI,MAAM,WAAW;AACnB,eAAO,MAAM;AAAA,MACf;AAEA,YAAM,YAAY,cAAc,EAAE,EAC/B,KAAK,MAAM;AACV,cAAM,WAAW;AAAA,MACnB,CAAC,EACA,QAAQ,MAAM;AACb,cAAM,YAAY;AAAA,MACpB,CAAC;AAEH,aAAO,MAAM;AAAA,IACf;AAAA,IAEA,MAAM,OAAsB;AAC1B,YAAM,KAAK,QAAQ;AACnB,WAAK,MAAM;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAuBO,SAAS,cAQd,SAG0C;AAC1C,QAAM,SAAS;AAAA,IACb,iBAAiB,QAAQ,mBAAmB,SAAS;AAAA,IACrD,mBAAmB,QAAQ,qBAAqB,SAAS;AAAA,IACzD,gBAAgB,QAAQ,kBAAkB,SAAS;AAAA,EACrD;AAEA,QAAM,KAAK,IAAI,OAAiB,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC5D,QAAM,UAAU,oBAAoB,EAAE;AACtC,QAAM,eAAe,mBAAmB;AACxC,QAAM,cAAc,kBAAkB;AACtC,QAAM,SAAS,aAAa,QAAQ,SAAS,cAAc,WAAW;AAEtE,QAAM,QAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc,QAAQ;AAAA,IACtB,WAAW;AAAA,IACX,UAAU;AAAA,EACZ;AAEA,QAAM,WAAW;AAAA,IACf;AAAA,IACA,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,MAAM;AAChB,WAAO,SAAS,SAAS,QAAQ,IAAI;AAAA,EACvC;AAEA,SAAO;AACT;;;AQxeO,SAAS,UAKd,QAKA;AACA,SAAO;AAAA,IACL,MAAM,OAAO;AAAA,IACb,OAAO,OAAO;AAAA,IACd,QAAQ,OAAO;AAAA,IACf,KAAK,OAAO;AAAA,EACd;AAKF;;;ACvFA,IAAM,eAAe;AAAA,EACnB,gBAAgB;AAClB;AAKO,SAAS,aAAa,MAAe,SAAS,KAAe;AAClE,SAAO,IAAI,SAAS,KAAK,UAAU,IAAI,GAAG;AAAA,IACxC;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AACH;AAKO,SAAS,cACd,SACA,SAA0B,KAChB;AACV,SAAO,aAAa,EAAE,OAAO,QAAQ,GAAG,MAAM;AAChD;AAKO,SAAS,kBAA4B;AAC1C,SAAO,aAAa,EAAE,SAAS,KAAK,CAAC;AACvC;AAKO,SAAS,sBACd,KACA,WACmB;AACnB,QAAM,QAAQ,IAAI,aAAa,IAAI,SAAS;AAC5C,MAAI,CAAC,OAAO;AACV,WAAO,cAAc,GAAG,SAAS,gCAAgC,GAAG;AAAA,EACtE;AACA,SAAO;AACT;;;AC7CA,IAAM,cAAc;AAAA,EAClB,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,YAAY;AACd;AAKA,SAAS,UAAU,MAAuB;AACxC,SAAO,SAAS,KAAK,UAAU,IAAI,CAAC;AAAA;AAAA;AACtC;AAKA,SAAS,mBAAgC;AACvC,SAAO,IAAI,YAAY;AACzB;AAKA,SAAS,UAAU,SAAsB,MAA2B;AAClE,SAAO,QAAQ,OAAO,UAAU,IAAI,CAAC;AACvC;AAKO,SAAS,kBAAkB,QAAkC;AAClE,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,QAAQ;AAAA,IACR,SAAS;AAAA,EACX,CAAC;AACH;AAKO,SAAS,0BACd,QAC4B;AAC5B,QAAM,UAAU,iBAAiB;AAEjC,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,MAAM,YAAY;AACtB,UAAI;AACF,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,uBAAW,MAAM;AACjB;AAAA,UACF;AAEA,qBAAW,QAAQ,UAAU,SAAS,KAAK,CAAC;AAAA,QAC9C;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,aAAO,YAAY;AAAA,IACrB;AAAA,EACF,CAAC;AACH;AAMO,SAAS,mCACd,QACA,YAC4B;AAC5B,MAAI,cAAc,GAAG;AACnB,WAAO,0BAA0B,MAAM;AAAA,EACzC;AAEA,QAAM,UAAU,iBAAiB;AACjC,MAAI,SAAS;AACb,MAAI,WAGO;AAEX,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,MAAM,YAAY;AACtB,YAAM,YAAiC;AAAA,QACrC,SAAS,CAAC,SACR,WAAW,QAAQ,UAAU,SAAS,IAAI,CAAC;AAAA,QAC7C,OAAO,MAAM;AACX,mBAAS;AACT,qBAAW,MAAM;AAAA,QACnB;AAAA,QACA,IAAI,SAAS;AACX,iBAAO;AAAA,QACT;AAAA,MACF;AACA,iBAAW,6BAA6B,WAAW,UAAU;AAE7D,UAAI;AACF,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,qBAAS,WAAW,MAAM;AAC1B;AAAA,UACF;AACA,mBAAS,WAAW,QAAQ,KAAK;AAAA,QACnC;AAAA,MACF,SAAS,OAAO;AACd,iBAAS,QAAQ;AACjB,eAAO,YAAY;AACnB,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,eAAS;AACT,gBAAU,QAAQ;AAClB,aAAO,YAAY;AAAA,IACrB;AAAA,EACF,CAAC;AACH;AAkBO,SAAS,iCACd,OAC4B;AAC5B,QAAM,UAAU,iBAAiB;AACjC,MAAI,SAAS;AACb,MAAI,eAA8B,CAAC;AAEnC,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,YAAY;AAChB,YAAM,gBAAqC;AAAA,QACzC,SAAS,CAAC,SAAkB;AAC1B,cAAI,OAAQ;AACZ,qBAAW,QAAQ,UAAU,SAAS,IAAI,CAAC;AAAA,QAC7C;AAAA,QACA,OAAO,MAAM;AACX,cAAI,OAAQ;AACZ,mBAAS;AACT,qBAAW,MAAM;AAAA,QACnB;AAAA,QACA,IAAI,SAAS;AACX,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,qBAAe,MAAM,aAAa;AAAA,IACpC;AAAA,IACA,SAAS;AACP,eAAS;AACT,iBAAW,eAAe,cAAc;AACtC,oBAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAUM,SAAS,6BACd,OACA,YAC0D;AAC1D,MAAI,cAAc,GAAG;AACnB,WAAO,EAAE,YAAY,OAAO,SAAS,MAAM;AAAA,IAAC,EAAE;AAAA,EAChD;AAGA,QAAM,UAAU,oBAAI,IAGlB;AAGF,QAAM,WAAW,oBAAI,IAAoB;AAEzC,QAAM,aAAkC;AAAA,IACtC,QAAQ,MAAe;AACrB,UAAI,MAAM,OAAQ;AAElB,YAAM,QACJ,OAAO,SAAS,YAAY,SAAS,OAChC,OACD;AAGN,UAAI,OAAO,SAAS,qBAAqB,IAAI,MAAM,QAAQ,EAAE,GAAG;AAC9D,iBAAS,OAAO,MAAM,KAAK;AAC3B,cAAM,QAAQ,QAAQ,IAAI,MAAM,KAAK;AACrC,YAAI,OAAO;AACT,uBAAa,MAAM,KAAK;AACxB,cAAI,CAAC,MAAM,OAAQ,OAAM,QAAQ,MAAM,IAAI;AAC3C,kBAAQ,OAAO,MAAM,KAAK;AAAA,QAC5B;AAAA,MACF;AAEA,UAAI,OAAO,SAAS,kBAAkB,CAAC,OAAO,OAAO;AACnD,cAAM,QAAQ,IAAI;AAClB;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM;AACpB,YAAM,MAAM,KAAK,IAAI;AACrB,YAAM,OAAO,SAAS,IAAI,KAAK,KAAK;AAGpC,UAAI,MAAM,QAAQ,YAAY;AAC5B,iBAAS,IAAI,OAAO,GAAG;AAEvB,cAAM,QAAQ,QAAQ,IAAI,KAAK;AAC/B,YAAI,OAAO;AACT,uBAAa,MAAM,KAAK;AACxB,kBAAQ,OAAO,KAAK;AAAA,QACtB;AACA,cAAM,QAAQ,IAAI;AAClB;AAAA,MACF;AAGA,YAAM,WAAW,QAAQ,IAAI,KAAK;AAClC,UAAI,UAAU;AACZ,qBAAa,SAAS,KAAK;AAAA,MAC7B;AAEA,YAAM,QAAQ,KAAK,IAAI,GAAG,cAAc,MAAM,KAAK;AACnD,YAAM,QAAQ,WAAW,MAAM;AAC7B,cAAM,UAAU,QAAQ,IAAI,KAAK;AACjC,YAAI,CAAC,WAAW,QAAQ,UAAU,MAAO;AAEzC,gBAAQ,OAAO,KAAK;AACpB,YAAI,CAAC,MAAM,QAAQ;AACjB,mBAAS,IAAI,OAAO,KAAK,IAAI,CAAC;AAC9B,gBAAM,QAAQ,QAAQ,IAAI;AAAA,QAC5B;AAAA,MACF,GAAG,KAAK;AAER,cAAQ,IAAI,OAAO,EAAE,MAAM,MAAM,CAAC;AAAA,IACpC;AAAA,IACA,QAAQ;AAEN,iBAAW,CAAC,EAAE,KAAK,KAAK,SAAS;AAC/B,qBAAa,MAAM,KAAK;AACxB,YAAI,CAAC,MAAM,QAAQ;AACjB,gBAAM,QAAQ,MAAM,IAAI;AAAA,QAC1B;AAAA,MACF;AACA,cAAQ,MAAM;AACd,eAAS,MAAM;AACf,YAAM,MAAM;AAAA,IACd;AAAA,IACA,IAAI,SAAS;AACX,aAAO,MAAM;AAAA,IACf;AAAA,EACF;AAEA,QAAM,UAAU,MAAM;AACpB,eAAW,CAAC,EAAE,KAAK,KAAK,SAAS;AAC/B,mBAAa,MAAM,KAAK;AAAA,IAC1B;AACA,YAAQ,MAAM;AACd,aAAS,MAAM;AAAA,EACjB;AAEA,SAAO,EAAE,YAAY,QAAQ;AAC/B;;;AChKA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,qBAA0C,IAAI,IAAI,cAAc;AAKtE,SAAS,sBACP,cACoC;AACpC,QAAM,SAAiC,CAAC;AACxC,aAAW,CAAC,KAAK,KAAK,KAAK,aAAa,QAAQ,GAAG;AACjD,QAAI,IAAI,WAAW,QAAQ,GAAG;AAC5B,aAAO,IAAI,MAAM,CAAC,CAAC,IAAI;AAAA,IACzB;AAAA,EACF;AACA,SAAO,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,SAAS;AACnD;AAMA,SAAS,eAAe,KAAgC;AACtD,QAAM,WAAW,IAAI,aAAa,OAAO,SAAS;AAClD,QAAM,cAAc,IAAI,aAAa,IAAI,QAAQ;AACjD,QAAM,aAAa,IAAI,aAAa,IAAI,OAAO;AAC/C,QAAM,cAAc,IAAI,aAAa,IAAI,QAAQ;AACjD,QAAM,SAAS,sBAAsB,IAAI,YAAY;AAGrD,MAAI,eAAe,CAAC,mBAAmB,IAAI,WAAW,GAAG;AACvD,WAAO;AAAA,MACL,mBAAmB,WAAW,qBAAqB,eAAe,KAAK,IAAI,CAAC;AAAA,MAC5E;AAAA,IACF;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,YAAY;AACd,YAAQ,OAAO,SAAS,YAAY,EAAE;AACtC,QAAI,OAAO,MAAM,KAAK,KAAK,QAAQ,GAAG;AACpC,aAAO,cAAc,iDAAiD,GAAG;AAAA,IAC3E;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,aAAa;AACf,aAAS,OAAO,SAAS,aAAa,EAAE;AACxC,QAAI,OAAO,MAAM,MAAM,KAAK,SAAS,GAAG;AACtC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,SAAS,SAAS,IAAI,WAAW;AAAA,IAC1C,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,yBAAyB,KAA+B;AAC/D,QAAM,WAAW,IAAI,aAAa,OAAO,SAAS;AAClD,QAAM,SAAS,sBAAsB,IAAI,YAAY;AAErD,SAAO;AAAA,IACL,SAAS,SAAS,SAAS,IAAI,WAAW;AAAA,IAC1C;AAAA,EACF;AACF;AAKA,SAAS,cACP,aACA,cACS;AACT,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,YAAY,GAAG;AACvD,QAAI,YAAY,GAAG,MAAM,MAAO,QAAO;AAAA,EACzC;AACA,SAAO;AACT;AAOO,SAAS,qBAId,SACA,SACgB;AAChB,QAAM,aAAa,SAAS,iBAAiB;AAC7C,QAAM,OAAO,SAAS;AAGtB,MAAI,QAAQ,CAAC,KAAK,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBACb,IACmB;AACnB,QAAI;AACF,aAAO,MAAM,GAAG;AAAA,IAClB,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAU,OAAM;AACrC,aAAO,cAAc,gBAAgB,KAAK,GAAG,GAAG;AAAA,IAClD;AAAA,EACF;AAGA,iBAAe,iBACb,KACA,KACA,WAC0D;AAC1D,UAAM,QAAQ,sBAAsB,KAAK,OAAO;AAChD,QAAI,iBAAiB,SAAU,QAAO;AAEtC,UAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,QAAI,CAAC,IAAK,QAAO,cAAc,iBAAiB,GAAG;AAEnD,QAAI,MAAM,eAAe,QAAQ,QAAW;AAC1C,YAAM,KAAK,YAAY,KAAiB,KAAqB;AAAA,QAC3D;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,KAA0B,MAAM;AAAA,EAC3C;AAIA,iBAAe,cACb,SACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,OAAQ,MAAM,QAAQ,KAAK;AAEjC,UAAI,CAAC,KAAK,SAAS;AACjB,eAAO,cAAc,uBAAuB,GAAG;AAAA,MACjD;AAEA,YAAM,MAAM,QAAQ,OAAO,KAAK,OAAO;AACvC,UAAI,CAAC,KAAK;AACR,eAAO,cAAc,kBAAkB,KAAK,OAAO,IAAI,GAAG;AAAA,MAC5D;AAGA,UAAI,MAAM,aAAa,QAAQ,QAAW;AACxC,cAAM,KAAK,UAAU,KAAiB,IAAI;AAAA,MAC5C;AAEA,YAAM,MAAM,MAAM,IAAI;AAAA,QACnB,KAAK,SAAS,CAAC;AAAA,QAChB;AAAA,UACE,gBAAgB,KAAK;AAAA,UACrB,gBAAgB,KAAK;AAAA,UACrB,QAAQ,KAAK;AAAA,QACf;AAAA,MACF;AAEA,YAAM,WAA4B,EAAE,OAAO,IAAI,GAAG;AAClD,aAAO,aAAa,QAAQ;AAAA,IAC9B,CAAC;AAAA,EACH;AAEA,iBAAe,gBACb,KACA,KACmB;AACnB,UAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,WAAW;AAC3D,QAAI,kBAAkB,SAAU,QAAO;AAEvC,UAAM,SAAS,QAAQ,UAAU,OAAO,KAAK;AAC7C,UAAM,YAAY;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB;AAAA,IACF;AACA,WAAO,kBAAkB,SAAS;AAAA,EACpC;AAEA,iBAAe,WACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,gBAAgB,eAAe,GAAG;AACxC,UAAI,yBAAyB,SAAU,QAAO;AAE9C,UAAI,SAA6B;AAGjC,UAAI,MAAM,aAAa,QAAQ,QAAW;AACxC,iBAAS,MAAM,KAAK,UAAU,KAAiB,MAAM;AAAA,MACvD;AAEA,YAAM,OAAO,MAAM,QAAQ,QAAQ,MAAM;AACzC,aAAO,aAAa,KAAK,IAAI,WAAW,CAAC;AAAA,IAC3C,CAAC;AAAA,EACH;AAEA,iBAAe,UACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,MAAM;AACtD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,aAAO,aAAa,YAAY,OAAO,GAAG,CAAC;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,iBAAe,YACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,OAAO;AACvD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,MAAM,QAAQ,QAAQ,SAAS,OAAO,KAAK;AACzD,aAAO,aAAa,KAAK;AAAA,IAC3B,CAAC;AAAA,EACH;AAEA,iBAAe,YACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,OAAO;AACvD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,MAAM,OAAO,KAAK;AAChC,aAAO,gBAAgB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,iBAAe,aACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,QAAQ;AACxD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,OAAO,OAAO,KAAK;AACjC,aAAO,gBAAgB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,iBAAe,aACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,QAAQ;AACxD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,UAAU,OAAO,KAAK;AACpC,aAAO,gBAAgB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,iBAAe,oBACb,KACA,KACmB;AACnB,QAAI;AAEJ,QAAI,QAAQ,UAAa,MAAM,oBAAoB;AACjD,YAAM,SAAS;AAAA,QACb;AAAA,MACF;AACA,eAAS,MAAM,KAAK,mBAAmB,KAAiB,MAAM;AAAA,IAChE,WAAW,QAAQ,UAAa,MAAM,WAAW;AAE/C,YAAM,SAAS;AAAA,QACb;AAAA,MACF;AACA,YAAM,SAAS,MAAM,KAAK;AAAA,QACxB;AAAA,QACA;AAAA,UACE,GAAG;AAAA,QACL;AAAA,MACF;AACA,eAAS,EAAE,SAAS,OAAO,SAAS,QAAQ,OAAO,OAAO;AAAA,IAC5D,OAAO;AACL,eAAS,yBAAyB,GAAG;AAAA,IACvC;AAEA,WAAO,oBAAoB,MAAM;AAAA,EACnC;AAEA,WAAS,oBAAoB,QAAuC;AAClE,UAAM,gBAAgB,MAAM,QAAQ,OAAO,OAAO,IAC9C,OAAO,UACP,OAAO,UACL,CAAC,OAAO,OAAO,IACf,CAAC;AACP,UAAM,eAAe,OAAO;AAE5B,UAAM,gBAAgB,CACpB,SACA,WACG;AACH,UAAI,cAAc,SAAS,KAAK,CAAC,cAAc,SAAS,OAAO;AAC7D,eAAO;AACT,UACE,iBACC,CAAC,UACA,CAAC,cAAc,QAAQ,YAAsC;AAE/D,eAAO;AACT,aAAO;AAAA,IACT;AAEA,UAAM,YAAY;AAAA,MAChB,CAAC,cAAmC;AAClC,cAAM,EAAE,YAAY,MAAM,QAAQ,IAAI;AAAA,UACpC;AAAA,UACA;AAAA,QACF;AAEA,cAAM,eAAe;AAAA,UACnB,QAAQ,GAAG,eAAe,CAAC,UAAU;AACnC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,gBAAgB,CAAC,UAAU;AACpC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,YAAY,CAAC,UAAU;AAChC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,gBAAgB,CAAC,UAAU;AACpC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,iBAAiB,CAAC,UAAU;AACrC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,OAAO,MAAM;AAAA,gBACb,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,eAAe,CAAC,UAAU;AACnC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,gBACd,UAAU,MAAM;AAAA,gBAChB,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO,CAAC,GAAG,cAAc,OAAO;AAAA,MAClC;AAAA,IACF;AAEA,WAAO,kBAAkB,SAAS;AAAA,EACpC;AAIA,SAAO;AAAA,IACL,MAAM,OAAO,SAAkB,UAAqC;AAClE,UAAI;AAEF,YAAI;AACJ,YAAI,MAAM,cAAc;AACtB,gBAAM,MAAM,KAAK,aAAa,OAAO;AAAA,QACvC;AAGA,YAAI,SAAS,WAAW;AACtB,gBAAM,QAAQ,UAAU;AAAA,QAC1B;AAGA,cAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAC/B,cAAM,OAAO,IAAI,SAAS,QAAQ,UAAU,EAAE;AAC9C,cAAM,SAAS,QAAQ;AAGvB,YAAI,WAAW,OAAO;AACpB,cAAI,SAAS,aAAc,QAAO,MAAM,gBAAgB,KAAK,GAAG;AAChE,cAAI,SAAS,QAAS,QAAO,MAAM,WAAW,KAAK,GAAG;AACtD,cAAI,SAAS,OAAQ,QAAO,MAAM,UAAU,KAAK,GAAG;AACpD,cAAI,SAAS,SAAU,QAAO,MAAM,YAAY,KAAK,GAAG;AACxD,cAAI,SAAS;AACX,mBAAO,MAAM,oBAAoB,KAAK,GAAG;AAAA,QAC7C;AAGA,YAAI,WAAW,QAAQ;AACrB,cAAI,SAAS,WAAY,QAAO,MAAM,cAAc,SAAS,GAAG;AAChE,cAAI,SAAS,SAAU,QAAO,MAAM,YAAY,KAAK,GAAG;AACxD,cAAI,SAAS,UAAW,QAAO,MAAM,aAAa,KAAK,GAAG;AAAA,QAC5D;AAGA,YAAI,WAAW,UAAU;AACvB,cAAI,SAAS,OAAQ,QAAO,MAAM,aAAa,KAAK,GAAG;AAAA,QACzD;AAEA,eAAO,IAAI,SAAS,aAAa,EAAE,QAAQ,IAAI,CAAC;AAAA,MAClD,SAAS,OAAO;AAEd,YAAI,iBAAiB,SAAU,QAAO;AACtC,eAAO,cAAc,gBAAgB,KAAK,GAAG,GAAG;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AACF;","names":["prettifyError","prettifyError"]}
1
+ {"version":3,"sources":["../src/durably.ts","../src/events.ts","../src/job.ts","../src/migrations.ts","../src/storage.ts","../src/worker.ts","../src/errors.ts","../src/context.ts","../src/define-job.ts","../src/http.ts","../src/sse.ts","../src/server.ts"],"sourcesContent":["import type { Dialect } from 'kysely'\nimport { Kysely } from 'kysely'\nimport type { z } from 'zod'\nimport type { JobDefinition } from './define-job'\nimport {\n type AnyEventInput,\n type DurablyEvent,\n type ErrorHandler,\n type EventEmitter,\n type EventListener,\n type EventType,\n type Unsubscribe,\n createEventEmitter,\n} from './events'\nimport {\n type JobHandle,\n type JobRegistry,\n createJobHandle,\n createJobRegistry,\n} from './job'\nimport { runMigrations } from './migrations'\nimport type { Database } from './schema'\nimport {\n type Run,\n type RunFilter,\n type Storage,\n createKyselyStorage,\n} from './storage'\nimport { type Worker, createWorker } from './worker'\n\n/**\n * Options for creating a Durably instance\n */\nexport interface DurablyOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n> {\n dialect: Dialect\n pollingInterval?: number\n heartbeatInterval?: number\n staleThreshold?: number\n cleanupSteps?: boolean\n /**\n * Zod schema for labels. When provided:\n * - Labels are type-checked at compile time\n * - Labels are validated at runtime on trigger()\n */\n labels?: z.ZodType<TLabels>\n /**\n * Job definitions to register. Shorthand for calling .register() after creation.\n * @example\n * ```ts\n * const durably = createDurably({\n * dialect,\n * jobs: { importCsv: importCsvJob, syncUsers: syncUsersJob },\n * })\n * ```\n */\n jobs?: TJobs\n}\n\n/**\n * Default configuration values\n */\nconst DEFAULTS = {\n pollingInterval: 1000,\n heartbeatInterval: 5000,\n staleThreshold: 30000,\n cleanupSteps: true,\n} as const\n\n/**\n * Plugin interface for extending Durably\n */\nexport interface DurablyPlugin {\n name: string\n // biome-ignore lint/suspicious/noExplicitAny: plugin needs to accept any Durably instance\n install(durably: Durably<any, any>): void\n}\n\n/**\n * Helper type to transform JobDefinition record to JobHandle record\n */\ntype TransformToHandles<\n TJobs extends Record<string, JobDefinition<string, unknown, unknown>>,\n TLabels extends Record<string, string> = Record<string, string>,\n> = {\n [K in keyof TJobs]: TJobs[K] extends JobDefinition<\n infer TName,\n infer TInput,\n infer TOutput\n >\n ? JobHandle<TName & string, TInput, TOutput, TLabels>\n : never\n}\n\n/**\n * Durably instance with type-safe jobs\n */\nexport interface Durably<\n TJobs extends Record<\n string,\n JobHandle<string, unknown, unknown, Record<string, string>>\n > = Record<string, never>,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /**\n * Registered job handles (type-safe)\n */\n readonly jobs: TJobs\n\n /**\n * Initialize Durably: run migrations and start the worker\n * This is the recommended way to start Durably.\n * Equivalent to calling migrate() then start().\n * @example\n * ```ts\n * const durably = createDurably({ dialect }).register({ ... })\n * await durably.init()\n * ```\n */\n init(): Promise<void>\n\n /**\n * Run database migrations\n * This is idempotent and safe to call multiple times\n */\n migrate(): Promise<void>\n\n /**\n * Get the underlying Kysely database instance\n * Useful for testing and advanced use cases\n */\n readonly db: Kysely<Database>\n\n /**\n * Storage layer for database operations\n */\n readonly storage: Storage\n\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Register job definitions and return a new Durably instance with type-safe jobs\n * @example\n * ```ts\n * const durably = createDurably({ dialect })\n * .register({\n * importCsv: importCsvJob,\n * syncUsers: syncUsersJob,\n * })\n * await durably.migrate()\n * // Usage: durably.jobs.importCsv.trigger({ rows: [...] })\n * ```\n */\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n register<TNewJobs extends Record<string, JobDefinition<string, any, any>>>(\n jobDefs: TNewJobs,\n ): Durably<TJobs & TransformToHandles<TNewJobs, TLabels>, TLabels>\n\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Create a fresh run from a completed, failed, or cancelled run\n * @throws Error if run is pending, running, or does not exist\n */\n retrigger(runId: string): Promise<Run<TLabels>>\n\n /**\n * Cancel a pending or running run\n * @throws Error if run is already completed, failed, or cancelled\n */\n cancel(runId: string): Promise<void>\n\n /**\n * Delete a completed, failed, or cancelled run and its associated steps and logs\n * @throws Error if run is pending or running, or does not exist\n */\n deleteRun(runId: string): Promise<void>\n\n /**\n * Get a run by ID\n * @example\n * ```ts\n * // Untyped (returns Run)\n * const run = await durably.getRun(runId)\n *\n * // Typed (returns custom type)\n * type MyRun = Run & { input: { userId: string }; output: { count: number } | null }\n * const typedRun = await durably.getRun<MyRun>(runId)\n * ```\n */\n getRun<T extends Run<TLabels> = Run<TLabels>>(\n runId: string,\n ): Promise<T | null>\n\n /**\n * Get runs with optional filtering\n * @example\n * ```ts\n * // Untyped (returns Run[])\n * const runs = await durably.getRuns({ status: 'completed' })\n *\n * // Typed (returns custom type[])\n * type MyRun = Run & { input: { userId: string }; output: { count: number } | null }\n * const typedRuns = await durably.getRuns<MyRun>({ jobName: 'my-job' })\n * ```\n */\n getRuns<T extends Run<TLabels> = Run<TLabels>>(\n filter?: RunFilter<TLabels>,\n ): Promise<T[]>\n\n /**\n * Register a plugin\n */\n use(plugin: DurablyPlugin): void\n\n /**\n * Get a registered job handle by name\n * Returns undefined if job is not registered\n */\n getJob<TName extends string = string>(\n name: TName,\n ): JobHandle<TName, Record<string, unknown>, unknown, TLabels> | undefined\n\n /**\n * Subscribe to events for a specific run\n * Returns a ReadableStream that can be used for SSE\n */\n subscribe(runId: string): ReadableStream<DurablyEvent>\n}\n\n/**\n * Internal state shared across Durably instances\n */\ninterface DurablyState {\n db: Kysely<Database>\n storage: Storage\n eventEmitter: EventEmitter\n jobRegistry: JobRegistry\n worker: Worker\n labelsSchema: z.ZodType | undefined\n cleanupSteps: boolean\n migrating: Promise<void> | null\n migrated: boolean\n}\n\n/**\n * Create a Durably instance implementation\n */\nfunction createDurablyInstance<\n TJobs extends Record<\n string,\n JobHandle<string, unknown, unknown, Record<string, string>>\n >,\n TLabels extends Record<string, string> = Record<string, string>,\n>(state: DurablyState, jobs: TJobs): Durably<TJobs, TLabels> {\n const { db, storage, eventEmitter, jobRegistry, worker } = state\n\n async function getRunOrThrow(runId: string): Promise<Run<TLabels>> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n return run as Run<TLabels>\n }\n\n const durably: Durably<TJobs, TLabels> = {\n db,\n storage,\n jobs,\n on: eventEmitter.on,\n emit: eventEmitter.emit,\n onError: eventEmitter.onError,\n start: worker.start,\n stop: worker.stop,\n\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n register<TNewJobs extends Record<string, JobDefinition<string, any, any>>>(\n jobDefs: TNewJobs,\n ): Durably<TJobs & TransformToHandles<TNewJobs, TLabels>, TLabels> {\n const newHandles = {} as TransformToHandles<TNewJobs, TLabels>\n\n for (const key of Object.keys(jobDefs) as (keyof TNewJobs)[]) {\n const jobDef = jobDefs[key]\n const handle = createJobHandle(\n jobDef,\n storage,\n eventEmitter,\n jobRegistry,\n state.labelsSchema as z.ZodType<TLabels> | undefined,\n )\n newHandles[key] = handle as TransformToHandles<\n TNewJobs,\n TLabels\n >[typeof key]\n }\n\n // Create new instance with merged jobs\n const mergedJobs = { ...jobs, ...newHandles } as TJobs &\n TransformToHandles<TNewJobs, TLabels>\n return createDurablyInstance<typeof mergedJobs, TLabels>(\n state,\n mergedJobs,\n )\n },\n\n getRun: storage.getRun,\n getRuns: storage.getRuns,\n\n use(plugin: DurablyPlugin): void {\n plugin.install(durably)\n },\n\n getJob<TName extends string = string>(\n name: TName,\n ): JobHandle<TName, Record<string, unknown>, unknown, TLabels> | undefined {\n const registeredJob = jobRegistry.get(name)\n if (!registeredJob) {\n return undefined\n }\n return registeredJob.handle as JobHandle<\n TName,\n Record<string, unknown>,\n unknown,\n TLabels\n >\n },\n\n subscribe(runId: string): ReadableStream<DurablyEvent> {\n // Track closed state and cleanup function in outer scope for cancel handler\n let closed = false\n let cleanup: (() => void) | null = null\n\n // Events that close the stream after enqueuing\n const closeEvents = new Set<EventType>(['run:complete', 'run:delete'])\n // All event types to subscribe to for a run\n const subscribedEvents: EventType[] = [\n 'run:start',\n 'run:complete',\n 'run:fail',\n 'run:cancel',\n 'run:delete',\n 'run:progress',\n 'step:start',\n 'step:complete',\n 'step:fail',\n 'log:write',\n ]\n\n return new ReadableStream<DurablyEvent>({\n start: (controller) => {\n const unsubscribes = subscribedEvents.map((type) =>\n eventEmitter.on(type, (event) => {\n if (closed || event.runId !== runId) return\n controller.enqueue(event)\n if (closeEvents.has(type)) {\n closed = true\n cleanup?.()\n controller.close()\n }\n }),\n )\n\n cleanup = () => {\n for (const unsub of unsubscribes) unsub()\n }\n },\n cancel: () => {\n // Clean up event listeners when stream is cancelled by consumer\n if (!closed) {\n closed = true\n cleanup?.()\n }\n },\n })\n },\n\n async retrigger(runId: string): Promise<Run<TLabels>> {\n const run = await getRunOrThrow(runId)\n if (run.status === 'pending') {\n throw new Error(`Cannot retrigger pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot retrigger running run: ${runId}`)\n }\n if (!jobRegistry.get(run.jobName)) {\n throw new Error(`Unknown job: ${run.jobName}`)\n }\n\n const nextRun = await storage.createRun({\n jobName: run.jobName,\n input: run.input,\n concurrencyKey: run.concurrencyKey ?? undefined,\n labels: run.labels,\n })\n\n eventEmitter.emit({\n type: 'run:trigger',\n runId: nextRun.id,\n jobName: run.jobName,\n input: run.input,\n labels: run.labels,\n })\n\n return nextRun as Run<TLabels>\n },\n\n async cancel(runId: string): Promise<void> {\n const run = await getRunOrThrow(runId)\n if (run.status === 'completed') {\n throw new Error(`Cannot cancel completed run: ${runId}`)\n }\n if (run.status === 'failed') {\n throw new Error(`Cannot cancel failed run: ${runId}`)\n }\n if (run.status === 'cancelled') {\n throw new Error(`Cannot cancel already cancelled run: ${runId}`)\n }\n const wasPending = run.status === 'pending'\n await storage.updateRun(runId, {\n status: 'cancelled',\n completedAt: new Date().toISOString(),\n })\n\n // For pending runs, no worker will clean up steps, so do it here\n if (wasPending && state.cleanupSteps) {\n await storage.deleteSteps(runId)\n }\n\n // Emit run:cancel event\n eventEmitter.emit({\n type: 'run:cancel',\n runId,\n jobName: run.jobName,\n labels: run.labels,\n })\n },\n\n async deleteRun(runId: string): Promise<void> {\n const run = await getRunOrThrow(runId)\n if (run.status === 'pending') {\n throw new Error(`Cannot delete pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot delete running run: ${runId}`)\n }\n await storage.deleteRun(runId)\n\n // Emit run:delete event\n eventEmitter.emit({\n type: 'run:delete',\n runId,\n jobName: run.jobName,\n labels: run.labels,\n })\n },\n\n async migrate(): Promise<void> {\n if (state.migrated) {\n return\n }\n\n if (state.migrating) {\n return state.migrating\n }\n\n state.migrating = runMigrations(db)\n .then(() => {\n state.migrated = true\n })\n .finally(() => {\n state.migrating = null\n })\n\n return state.migrating\n },\n\n async init(): Promise<void> {\n await this.migrate()\n this.start()\n },\n }\n\n return durably\n}\n\n/**\n * Create a Durably instance\n */\n// Overload: with jobs\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n>(\n options: DurablyOptions<TLabels, TJobs> & { jobs: TJobs },\n): Durably<TransformToHandles<TJobs, TLabels>, TLabels>\n\n// Overload: without jobs\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n>(options: DurablyOptions<TLabels>): Durably<Record<string, never>, TLabels>\n\n// Implementation\nexport function createDurably<\n TLabels extends Record<string, string> = Record<string, string>,\n // biome-ignore lint/suspicious/noExplicitAny: flexible type constraint for job definitions\n TJobs extends Record<string, JobDefinition<string, any, any>> = Record<\n string,\n never\n >,\n>(\n options: DurablyOptions<TLabels, TJobs>,\n):\n | Durably<TransformToHandles<TJobs, TLabels>, TLabels>\n | Durably<Record<string, never>, TLabels> {\n const config = {\n pollingInterval: options.pollingInterval ?? DEFAULTS.pollingInterval,\n heartbeatInterval: options.heartbeatInterval ?? DEFAULTS.heartbeatInterval,\n staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold,\n cleanupSteps: options.cleanupSteps ?? DEFAULTS.cleanupSteps,\n }\n\n const db = new Kysely<Database>({ dialect: options.dialect })\n const storage = createKyselyStorage(db)\n const eventEmitter = createEventEmitter()\n const jobRegistry = createJobRegistry()\n const worker = createWorker(config, storage, eventEmitter, jobRegistry)\n\n const state: DurablyState = {\n db,\n storage,\n eventEmitter,\n jobRegistry,\n worker,\n labelsSchema: options.labels,\n cleanupSteps: config.cleanupSteps,\n migrating: null,\n migrated: false,\n }\n\n const instance = createDurablyInstance<Record<string, never>, TLabels>(\n state,\n {},\n )\n\n if (options.jobs) {\n return instance.register(options.jobs)\n }\n\n return instance\n}\n","/**\n * Base event interface\n */\nexport interface BaseEvent {\n type: string\n timestamp: string\n sequence: number\n}\n\n/**\n * Run trigger event (emitted when a job is triggered, before worker picks it up)\n */\nexport interface RunTriggerEvent extends BaseEvent {\n type: 'run:trigger'\n runId: string\n jobName: string\n input: unknown\n labels: Record<string, string>\n}\n\n/**\n * Run start event\n */\nexport interface RunStartEvent extends BaseEvent {\n type: 'run:start'\n runId: string\n jobName: string\n input: unknown\n labels: Record<string, string>\n}\n\n/**\n * Run complete event\n */\nexport interface RunCompleteEvent extends BaseEvent {\n type: 'run:complete'\n runId: string\n jobName: string\n output: unknown\n duration: number\n labels: Record<string, string>\n}\n\n/**\n * Run fail event\n */\nexport interface RunFailEvent extends BaseEvent {\n type: 'run:fail'\n runId: string\n jobName: string\n error: string\n failedStepName: string\n labels: Record<string, string>\n}\n\n/**\n * Run cancel event\n */\nexport interface RunCancelEvent extends BaseEvent {\n type: 'run:cancel'\n runId: string\n jobName: string\n labels: Record<string, string>\n}\n\n/**\n * Run delete event (emitted when a run is deleted)\n */\nexport interface RunDeleteEvent extends BaseEvent {\n type: 'run:delete'\n runId: string\n jobName: string\n labels: Record<string, string>\n}\n\n/**\n * Progress data reported by step.progress()\n */\nexport interface ProgressData {\n current: number\n total?: number\n message?: string\n}\n\n/**\n * Run progress event\n */\nexport interface RunProgressEvent extends BaseEvent {\n type: 'run:progress'\n runId: string\n jobName: string\n progress: ProgressData\n labels: Record<string, string>\n}\n\n/**\n * Step start event\n */\nexport interface StepStartEvent extends BaseEvent {\n type: 'step:start'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n labels: Record<string, string>\n}\n\n/**\n * Step complete event\n */\nexport interface StepCompleteEvent extends BaseEvent {\n type: 'step:complete'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n output: unknown\n duration: number\n labels: Record<string, string>\n}\n\n/**\n * Step fail event\n */\nexport interface StepFailEvent extends BaseEvent {\n type: 'step:fail'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n error: string\n labels: Record<string, string>\n}\n\nexport interface StepCancelEvent extends BaseEvent {\n type: 'step:cancel'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n labels: Record<string, string>\n}\n\n/**\n * Log data reported by step.log\n */\nexport interface LogData {\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n stepName?: string | null\n}\n\n/**\n * Log write event\n */\nexport interface LogWriteEvent extends BaseEvent, LogData {\n type: 'log:write'\n runId: string\n jobName: string\n labels: Record<string, string>\n stepName: string | null\n data: unknown\n}\n\n/**\n * Worker error event (internal errors like heartbeat failures)\n */\nexport interface WorkerErrorEvent extends BaseEvent {\n type: 'worker:error'\n error: string\n context: string\n runId?: string\n}\n\n/**\n * All event types as discriminated union\n */\nexport type DurablyEvent =\n | RunTriggerEvent\n | RunStartEvent\n | RunCompleteEvent\n | RunFailEvent\n | RunCancelEvent\n | RunDeleteEvent\n | RunProgressEvent\n | StepStartEvent\n | StepCompleteEvent\n | StepFailEvent\n | StepCancelEvent\n | LogWriteEvent\n | WorkerErrorEvent\n\n/**\n * Event types for type-safe event names\n */\nexport type EventType = DurablyEvent['type']\n\n/**\n * Extract event by type\n */\nexport type EventByType<T extends EventType> = Extract<\n DurablyEvent,\n { type: T }\n>\n\n/**\n * Event input (without auto-generated fields)\n */\nexport type EventInput<T extends EventType> = Omit<\n EventByType<T>,\n 'timestamp' | 'sequence'\n>\n\n/**\n * All possible event inputs as a union (properly distributed)\n */\nexport type AnyEventInput =\n | EventInput<'run:trigger'>\n | EventInput<'run:start'>\n | EventInput<'run:complete'>\n | EventInput<'run:fail'>\n | EventInput<'run:cancel'>\n | EventInput<'run:delete'>\n | EventInput<'run:progress'>\n | EventInput<'step:start'>\n | EventInput<'step:complete'>\n | EventInput<'step:fail'>\n | EventInput<'step:cancel'>\n | EventInput<'log:write'>\n | EventInput<'worker:error'>\n\n/**\n * Event listener function\n */\nexport type EventListener<T extends EventType> = (event: EventByType<T>) => void\n\n/**\n * Unsubscribe function returned by on()\n */\nexport type Unsubscribe = () => void\n\n/**\n * Error handler function for listener exceptions\n */\nexport type ErrorHandler = (error: Error, event: DurablyEvent) => void\n\n/**\n * Event emitter interface\n */\nexport interface EventEmitter {\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n}\n\n/**\n * Create an event emitter\n */\nexport function createEventEmitter(): EventEmitter {\n const listeners = new Map<EventType, Set<EventListener<EventType>>>()\n let sequence = 0\n let errorHandler: ErrorHandler | null = null\n\n return {\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe {\n if (!listeners.has(type)) {\n listeners.set(type, new Set())\n }\n\n const typeListeners = listeners.get(type)\n typeListeners?.add(listener as unknown as EventListener<EventType>)\n\n return () => {\n typeListeners?.delete(listener as unknown as EventListener<EventType>)\n }\n },\n\n onError(handler: ErrorHandler): void {\n errorHandler = handler\n },\n\n emit(event: AnyEventInput): void {\n sequence++\n const fullEvent = {\n ...event,\n timestamp: new Date().toISOString(),\n sequence,\n } as DurablyEvent\n\n const typeListeners = listeners.get(event.type)\n if (!typeListeners) {\n return\n }\n\n for (const listener of typeListeners) {\n try {\n listener(fullEvent)\n } catch (error) {\n if (errorHandler) {\n errorHandler(\n error instanceof Error ? error : new Error(String(error)),\n fullEvent,\n )\n }\n // Continue to next listener regardless of error\n }\n }\n },\n }\n}\n","import { type z, prettifyError } from 'zod'\nimport type { JobDefinition } from './define-job'\nimport type { EventEmitter, LogData, ProgressData } from './events'\nimport type { Run, RunFilter, Storage } from './storage'\n\n// eslint-disable-next-line @typescript-eslint/no-empty-function\nconst noop = () => {}\n\n/**\n * Validate job input and throw on failure\n */\nfunction validateJobInputOrThrow<T>(\n schema: z.ZodType<T>,\n input: unknown,\n context?: string,\n): T {\n const result = schema.safeParse(input)\n if (!result.success) {\n const prefix = context ? `${context}: ` : ''\n throw new Error(`${prefix}Invalid input: ${prettifyError(result.error)}`)\n }\n return result.data\n}\n\n/**\n * Step context passed to the job function\n */\nexport interface StepContext {\n /**\n * The ID of the current run\n */\n readonly runId: string\n\n /**\n * Execute a step with automatic persistence and replay\n */\n run<T>(name: string, fn: (signal: AbortSignal) => T | Promise<T>): Promise<T>\n\n /**\n * Report progress for the current run\n */\n progress(current: number, total?: number, message?: string): void\n\n /**\n * Log a message\n */\n log: {\n info(message: string, data?: unknown): void\n warn(message: string, data?: unknown): void\n error(message: string, data?: unknown): void\n }\n}\n\n/**\n * Job function type\n */\nexport type JobFunction<TInput, TOutput> = (\n step: StepContext,\n input: TInput,\n) => Promise<TOutput>\n\n/**\n * Trigger options for trigger() and batchTrigger()\n */\nexport interface TriggerOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Options for triggerAndWait() (extends TriggerOptions with wait-specific options)\n */\nexport interface TriggerAndWaitOptions<\n TLabels extends Record<string, string> = Record<string, string>,\n> extends TriggerOptions<TLabels> {\n /** Timeout in milliseconds */\n timeout?: number\n /** Called when step.progress() is invoked during execution */\n onProgress?: (progress: ProgressData) => void | Promise<void>\n /** Called when step.log is invoked during execution */\n onLog?: (log: LogData) => void | Promise<void>\n}\n\n/**\n * Typed run with output type\n */\nexport interface TypedRun<\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n> extends Omit<Run<TLabels>, 'output'> {\n output: TOutput | null\n}\n\n/**\n * Batch trigger input - either just the input or input with options\n */\nexport type BatchTriggerInput<\n TInput,\n TLabels extends Record<string, string> = Record<string, string>,\n> = TInput | { input: TInput; options?: TriggerOptions<TLabels> }\n\n/**\n * Result of triggerAndWait\n */\nexport interface TriggerAndWaitResult<TOutput> {\n id: string\n output: TOutput\n}\n\n/**\n * Job handle returned by defineJob\n */\nexport interface JobHandle<\n TName extends string,\n TInput,\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n readonly name: TName\n\n /**\n * Trigger a new run\n */\n trigger(\n input: TInput,\n options?: TriggerOptions<TLabels>,\n ): Promise<TypedRun<TOutput, TLabels>>\n\n /**\n * Trigger a new run and wait for completion\n * Returns the output directly, throws if the run fails\n */\n triggerAndWait(\n input: TInput,\n options?: TriggerAndWaitOptions<TLabels>,\n ): Promise<TriggerAndWaitResult<TOutput>>\n\n /**\n * Trigger multiple runs in a batch\n * All inputs are validated before any runs are created\n */\n batchTrigger(\n inputs: BatchTriggerInput<TInput, TLabels>[],\n ): Promise<TypedRun<TOutput, TLabels>[]>\n\n /**\n * Get a run by ID\n */\n getRun(id: string): Promise<TypedRun<TOutput, TLabels> | null>\n\n /**\n * Get runs with optional filter\n */\n getRuns(\n filter?: Omit<RunFilter<TLabels>, 'jobName'>,\n ): Promise<TypedRun<TOutput, TLabels>[]>\n}\n\n/**\n * Internal job registration\n */\nexport interface RegisteredJob<TInput, TOutput> {\n name: string\n inputSchema: z.ZodType\n outputSchema: z.ZodType | undefined\n labelsSchema: z.ZodType | undefined\n fn: JobFunction<TInput, TOutput>\n jobDef: JobDefinition<string, TInput, TOutput>\n // biome-ignore lint/suspicious/noExplicitAny: handle may have any labels type\n handle: JobHandle<string, TInput, TOutput, any>\n}\n\n/**\n * Job registry for managing registered jobs\n */\nexport interface JobRegistry {\n /**\n * Register a job (called internally by createJobHandle)\n */\n set<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void\n\n /**\n * Get a registered job by name\n */\n get(name: string): RegisteredJob<unknown, unknown> | undefined\n\n /**\n * Check if a job is registered\n */\n has(name: string): boolean\n}\n\n/**\n * Create a job registry\n */\nexport function createJobRegistry(): JobRegistry {\n const jobs = new Map<string, RegisteredJob<unknown, unknown>>()\n\n return {\n set<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void {\n jobs.set(job.name, job as RegisteredJob<unknown, unknown>)\n },\n\n get(name: string): RegisteredJob<unknown, unknown> | undefined {\n return jobs.get(name)\n },\n\n has(name: string): boolean {\n return jobs.has(name)\n },\n }\n}\n\n/**\n * Create a job handle from a JobDefinition\n */\nexport function createJobHandle<\n TName extends string,\n TInput,\n TOutput,\n TLabels extends Record<string, string> = Record<string, string>,\n>(\n jobDef: JobDefinition<TName, TInput, TOutput>,\n storage: Storage,\n eventEmitter: EventEmitter,\n registry: JobRegistry,\n labelsSchema?: z.ZodType<TLabels>,\n): JobHandle<TName, TInput, TOutput, TLabels> {\n // Check if same JobDefinition is already registered (idempotent)\n const existingJob = registry.get(jobDef.name)\n if (existingJob) {\n // If same JobDefinition (same reference), return existing handle\n if (existingJob.jobDef === jobDef) {\n return existingJob.handle as JobHandle<TName, TInput, TOutput, TLabels>\n }\n // Different JobDefinition with same name - error\n throw new Error(\n `Job \"${jobDef.name}\" is already registered with a different definition`,\n )\n }\n\n const inputSchema = jobDef.input as z.ZodType<TInput>\n const outputSchema = jobDef.output as z.ZodType<TOutput> | undefined\n\n const handle: JobHandle<TName, TInput, TOutput, TLabels> = {\n name: jobDef.name,\n\n async trigger(\n input: TInput,\n options?: TriggerOptions<TLabels>,\n ): Promise<TypedRun<TOutput, TLabels>> {\n // Validate input\n const validatedInput = validateJobInputOrThrow(inputSchema, input)\n\n // Validate labels if schema provided\n if (labelsSchema && options?.labels) {\n validateJobInputOrThrow(labelsSchema, options.labels, 'labels')\n }\n\n // Create the run\n const run = await storage.createRun({\n jobName: jobDef.name,\n input: validatedInput,\n idempotencyKey: options?.idempotencyKey,\n concurrencyKey: options?.concurrencyKey,\n labels: options?.labels,\n })\n\n // Emit run:trigger event\n eventEmitter.emit({\n type: 'run:trigger',\n runId: run.id,\n jobName: jobDef.name,\n input: validatedInput,\n labels: run.labels,\n })\n\n return run as TypedRun<TOutput, TLabels>\n },\n\n async triggerAndWait(\n input: TInput,\n options?: TriggerAndWaitOptions<TLabels>,\n ): Promise<TriggerAndWaitResult<TOutput>> {\n // Trigger the run\n const run = await this.trigger(input, options)\n\n // Wait for completion via event subscription\n return new Promise((resolve, reject) => {\n let timeoutId: ReturnType<typeof setTimeout> | undefined\n let resolved = false\n\n const unsubscribes: (() => void)[] = []\n\n const cleanup = () => {\n if (resolved) return\n resolved = true\n for (const unsub of unsubscribes) unsub()\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n }\n\n unsubscribes.push(\n eventEmitter.on('run:complete', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n resolve({\n id: run.id,\n output: event.output as TOutput,\n })\n }\n }),\n )\n\n unsubscribes.push(\n eventEmitter.on('run:fail', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n reject(new Error(event.error))\n }\n }),\n )\n\n if (options?.onProgress) {\n const onProgress = options.onProgress\n unsubscribes.push(\n eventEmitter.on('run:progress', (event) => {\n if (event.runId === run.id && !resolved) {\n void Promise.resolve(onProgress(event.progress)).catch(noop)\n }\n }),\n )\n }\n\n if (options?.onLog) {\n const onLog = options.onLog\n unsubscribes.push(\n eventEmitter.on('log:write', (event) => {\n if (event.runId === run.id && !resolved) {\n const { level, message, data, stepName } = event\n void Promise.resolve(\n onLog({ level, message, data, stepName }),\n ).catch(noop)\n }\n }),\n )\n }\n\n // Check current status after subscribing (race condition mitigation)\n // If the run completed before we subscribed, we need to handle it\n storage\n .getRun(run.id)\n .then((currentRun) => {\n if (resolved || !currentRun) return\n if (currentRun.status === 'completed') {\n cleanup()\n resolve({\n id: run.id,\n output: currentRun.output as TOutput,\n })\n } else if (currentRun.status === 'failed') {\n cleanup()\n reject(new Error(currentRun.error || 'Run failed'))\n }\n })\n .catch((error) => {\n if (resolved) return\n cleanup()\n reject(error instanceof Error ? error : new Error(String(error)))\n })\n\n // Set timeout if specified\n if (options?.timeout !== undefined) {\n timeoutId = setTimeout(() => {\n if (!resolved) {\n cleanup()\n reject(\n new Error(`triggerAndWait timeout after ${options.timeout}ms`),\n )\n }\n }, options.timeout)\n }\n })\n },\n\n async batchTrigger(\n inputs: (TInput | { input: TInput; options?: TriggerOptions<TLabels> })[],\n ): Promise<TypedRun<TOutput, TLabels>[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Normalize inputs to { input, options } format\n const normalized = inputs.map((item) => {\n if (item && typeof item === 'object' && 'input' in item) {\n return item as { input: TInput; options?: TriggerOptions<TLabels> }\n }\n return { input: item as TInput, options: undefined }\n })\n\n // Validate all inputs and labels first (before creating any runs)\n const validated: {\n input: unknown\n options?: TriggerOptions<TLabels>\n }[] = []\n for (let i = 0; i < normalized.length; i++) {\n const validatedInput = validateJobInputOrThrow(\n inputSchema,\n normalized[i].input,\n `at index ${i}`,\n )\n if (labelsSchema && normalized[i].options?.labels) {\n validateJobInputOrThrow(\n labelsSchema,\n normalized[i].options?.labels,\n `labels at index ${i}`,\n )\n }\n validated.push({\n input: validatedInput,\n options: normalized[i].options,\n })\n }\n\n // Create all runs\n const runs = await storage.batchCreateRuns(\n validated.map((v) => ({\n jobName: jobDef.name,\n input: v.input,\n idempotencyKey: v.options?.idempotencyKey,\n concurrencyKey: v.options?.concurrencyKey,\n labels: v.options?.labels,\n })),\n )\n\n // Emit run:trigger events for all created runs\n for (let i = 0; i < runs.length; i++) {\n eventEmitter.emit({\n type: 'run:trigger',\n runId: runs[i].id,\n jobName: jobDef.name,\n input: validated[i].input,\n labels: runs[i].labels,\n })\n }\n\n return runs as TypedRun<TOutput, TLabels>[]\n },\n\n async getRun(id: string): Promise<TypedRun<TOutput, TLabels> | null> {\n const run = await storage.getRun(id)\n if (!run || run.jobName !== jobDef.name) {\n return null\n }\n return run as TypedRun<TOutput, TLabels>\n },\n\n async getRuns(\n filter?: Omit<RunFilter<TLabels>, 'jobName'>,\n ): Promise<TypedRun<TOutput, TLabels>[]> {\n const runs = await storage.getRuns({\n ...filter,\n jobName: jobDef.name,\n })\n return runs as TypedRun<TOutput, TLabels>[]\n },\n }\n\n // Register the job with the handle\n registry.set({\n name: jobDef.name,\n inputSchema,\n outputSchema,\n labelsSchema,\n fn: jobDef.run as JobFunction<unknown, unknown>,\n jobDef: jobDef as JobDefinition<string, TInput, TOutput>,\n handle,\n })\n\n return handle\n}\n","import type { Kysely } from 'kysely'\nimport type { Database } from './schema'\n\n/**\n * Migration definitions\n */\ninterface Migration {\n version: number\n up: (db: Kysely<Database>) => Promise<void>\n}\n\nexport const LATEST_SCHEMA_VERSION = 1\n\nconst migrations: Migration[] = [\n {\n version: 1,\n up: async (db) => {\n // Create runs table\n await db.schema\n .createTable('durably_runs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('job_name', 'text', (col) => col.notNull())\n .addColumn('input', 'text', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('idempotency_key', 'text')\n .addColumn('concurrency_key', 'text')\n .addColumn('labels', 'text', (col) => col.notNull().defaultTo('{}'))\n .addColumn('current_step_index', 'integer', (col) =>\n col.notNull().defaultTo(0),\n )\n .addColumn('progress', 'text')\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('heartbeat_at', 'text', (col) => col.notNull())\n .addColumn('started_at', 'text')\n .addColumn('completed_at', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .addColumn('updated_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create runs indexes\n await db.schema\n .createIndex('idx_durably_runs_job_idempotency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['job_name', 'idempotency_key'])\n .unique()\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_concurrency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'concurrency_key'])\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_created')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'created_at'])\n .execute()\n\n // Create steps table\n await db.schema\n .createTable('durably_steps')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('name', 'text', (col) => col.notNull())\n .addColumn('index', 'integer', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('started_at', 'text', (col) => col.notNull())\n .addColumn('completed_at', 'text')\n .execute()\n\n // Create steps index\n await db.schema\n .createIndex('idx_durably_steps_run_index')\n .ifNotExists()\n .on('durably_steps')\n .columns(['run_id', 'index'])\n .execute()\n\n // Create logs table\n await db.schema\n .createTable('durably_logs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('step_name', 'text')\n .addColumn('level', 'text', (col) => col.notNull())\n .addColumn('message', 'text', (col) => col.notNull())\n .addColumn('data', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create logs index\n await db.schema\n .createIndex('idx_durably_logs_run_created')\n .ifNotExists()\n .on('durably_logs')\n .columns(['run_id', 'created_at'])\n .execute()\n\n // Create schema_versions table\n await db.schema\n .createTable('durably_schema_versions')\n .ifNotExists()\n .addColumn('version', 'integer', (col) => col.primaryKey())\n .addColumn('applied_at', 'text', (col) => col.notNull())\n .execute()\n },\n },\n]\n\n/**\n * Get the current schema version from the database\n */\nasync function getCurrentVersion(db: Kysely<Database>): Promise<number> {\n try {\n const result = await db\n .selectFrom('durably_schema_versions')\n .select('version')\n .orderBy('version', 'desc')\n .limit(1)\n .executeTakeFirst()\n\n return result?.version ?? 0\n } catch {\n // Table doesn't exist yet\n return 0\n }\n}\n\n/**\n * Run pending migrations\n */\nexport async function runMigrations(db: Kysely<Database>): Promise<void> {\n const currentVersion = await getCurrentVersion(db)\n\n for (const migration of migrations) {\n if (migration.version > currentVersion) {\n await db.transaction().execute(async (trx) => {\n await migration.up(trx)\n\n await trx\n .insertInto('durably_schema_versions')\n .values({\n version: migration.version,\n applied_at: new Date().toISOString(),\n })\n .execute()\n })\n }\n }\n}\n","import { type Kysely, sql } from 'kysely'\nimport { monotonicFactory } from 'ulidx'\nimport type { Database } from './schema'\n\nconst ulid = monotonicFactory()\n\n/**\n * Run data for creating a new run\n */\nexport interface CreateRunInput<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n jobName: string\n input: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Run data returned from storage\n */\nexport interface Run<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n id: string\n jobName: string\n input: unknown\n status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n idempotencyKey: string | null\n concurrencyKey: string | null\n currentStepIndex: number\n stepCount: number\n progress: { current: number; total?: number; message?: string } | null\n output: unknown | null\n error: string | null\n labels: TLabels\n heartbeatAt: string\n startedAt: string | null\n completedAt: string | null\n createdAt: string\n updatedAt: string\n}\n\n/**\n * Run update data\n */\nexport interface UpdateRunInput {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n currentStepIndex?: number\n progress?: { current: number; total?: number; message?: string } | null\n output?: unknown\n error?: string | null\n heartbeatAt?: string\n startedAt?: string\n completedAt?: string\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n /** Filter by job name(s). Pass a string for one, or an array for multiple (OR). */\n jobName?: string | string[]\n /** Filter by labels (all specified labels must match) */\n labels?: { [K in keyof TLabels]?: TLabels[K] }\n /** Maximum number of runs to return */\n limit?: number\n /** Number of runs to skip (for pagination) */\n offset?: number\n}\n\n/**\n * Step data for creating a new step\n */\nexport interface CreateStepInput {\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed' | 'cancelled'\n output?: unknown\n error?: string\n startedAt: string // ISO8601 timestamp when step execution started\n}\n\n/**\n * Step data returned from storage\n */\nexport interface Step {\n id: string\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed' | 'cancelled'\n output: unknown | null\n error: string | null\n startedAt: string\n completedAt: string | null\n}\n\n/**\n * Log data for creating a new log\n */\nexport interface CreateLogInput {\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n}\n\n/**\n * Log data returned from storage\n */\nexport interface Log {\n id: string\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown | null\n createdAt: string\n}\n\n/**\n * A client-safe subset of Run, excluding internal fields like\n * heartbeatAt, idempotencyKey, concurrencyKey, and updatedAt.\n */\nexport type ClientRun<\n TLabels extends Record<string, string> = Record<string, string>,\n> = Omit<\n Run<TLabels>,\n 'idempotencyKey' | 'concurrencyKey' | 'heartbeatAt' | 'updatedAt'\n>\n\n/**\n * Project a full Run to a ClientRun by stripping internal fields.\n */\nexport function toClientRun<\n TLabels extends Record<string, string> = Record<string, string>,\n>(run: Run<TLabels>): ClientRun<TLabels> {\n const {\n idempotencyKey,\n concurrencyKey,\n heartbeatAt,\n updatedAt,\n ...clientRun\n } = run\n return clientRun\n}\n\n/**\n * Storage interface for database operations\n */\nexport interface Storage {\n // Run operations\n createRun(input: CreateRunInput): Promise<Run>\n batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]>\n updateRun(runId: string, data: UpdateRunInput): Promise<void>\n deleteRun(runId: string): Promise<void>\n getRun<T extends Run = Run>(runId: string): Promise<T | null>\n getRuns<T extends Run = Run>(filter?: RunFilter): Promise<T[]>\n claimNextPendingRun(excludeConcurrencyKeys: string[]): Promise<Run | null>\n\n // Step operations\n createStep(input: CreateStepInput): Promise<Step>\n deleteSteps(runId: string): Promise<void>\n getSteps(runId: string): Promise<Step[]>\n getCompletedStep(runId: string, name: string): Promise<Step | null>\n\n // Log operations\n createLog(input: CreateLogInput): Promise<Log>\n getLogs(runId: string): Promise<Log[]>\n}\n\n/**\n * Convert database row to Run object\n */\n/**\n * Validate label keys: alphanumeric, dash, underscore, dot, slash only\n */\nconst LABEL_KEY_PATTERN = /^[a-zA-Z0-9\\-_./]+$/\n\nfunction validateLabels(labels: Record<string, string> | undefined): void {\n if (!labels) return\n for (const key of Object.keys(labels)) {\n if (!LABEL_KEY_PATTERN.test(key)) {\n throw new Error(\n `Invalid label key \"${key}\": must contain only alphanumeric characters, dashes, underscores, dots, and slashes`,\n )\n }\n }\n}\n\nfunction rowToRun(\n row: Database['durably_runs'] & { step_count?: number | bigint | null },\n): Run {\n return {\n id: row.id,\n jobName: row.job_name,\n input: JSON.parse(row.input),\n status: row.status,\n idempotencyKey: row.idempotency_key,\n concurrencyKey: row.concurrency_key,\n currentStepIndex: row.current_step_index,\n stepCount: Number(row.step_count ?? 0),\n progress: row.progress ? JSON.parse(row.progress) : null,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n labels: JSON.parse(row.labels),\n heartbeatAt: row.heartbeat_at,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n }\n}\n\n/**\n * Convert database row to Step object\n */\nfunction rowToStep(row: Database['durably_steps']): Step {\n return {\n id: row.id,\n runId: row.run_id,\n name: row.name,\n index: row.index,\n status: row.status,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n }\n}\n\n/**\n * Convert database row to Log object\n */\nfunction rowToLog(row: Database['durably_logs']): Log {\n return {\n id: row.id,\n runId: row.run_id,\n stepName: row.step_name,\n level: row.level,\n message: row.message,\n data: row.data ? JSON.parse(row.data) : null,\n createdAt: row.created_at,\n }\n}\n\n/**\n * Create a Kysely-based Storage implementation\n */\nexport function createKyselyStorage(db: Kysely<Database>): Storage {\n return {\n async createRun(input: CreateRunInput): Promise<Run> {\n const now = new Date().toISOString()\n\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n return rowToRun(existing)\n }\n }\n\n validateLabels(input.labels)\n\n const id = ulid()\n const run: Database['durably_runs'] = {\n id,\n job_name: input.jobName,\n input: JSON.stringify(input.input),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n labels: JSON.stringify(input.labels ?? {}),\n heartbeat_at: now,\n started_at: null,\n completed_at: null,\n created_at: now,\n updated_at: now,\n }\n\n await db.insertInto('durably_runs').values(run).execute()\n\n return rowToRun(run)\n },\n\n async batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Use transaction to ensure atomicity of idempotency checks and inserts\n return await db.transaction().execute(async (trx) => {\n const now = new Date().toISOString()\n const runs: Database['durably_runs'][] = []\n\n // Validate all labels upfront\n for (const input of inputs) {\n validateLabels(input.labels)\n }\n\n // Process inputs - check idempotency keys and create run objects\n for (const input of inputs) {\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await trx\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n runs.push(existing)\n continue\n }\n }\n\n const id = ulid()\n runs.push({\n id,\n job_name: input.jobName,\n input: JSON.stringify(input.input),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n labels: JSON.stringify(input.labels ?? {}),\n heartbeat_at: now,\n started_at: null,\n completed_at: null,\n created_at: now,\n updated_at: now,\n })\n }\n\n // Insert all new runs in a single batch\n const newRuns = runs.filter((r) => r.created_at === now)\n if (newRuns.length > 0) {\n await trx.insertInto('durably_runs').values(newRuns).execute()\n }\n\n return runs.map(rowToRun)\n })\n },\n\n async updateRun(runId: string, data: UpdateRunInput): Promise<void> {\n const now = new Date().toISOString()\n const updates: Partial<Database['durably_runs']> = {\n updated_at: now,\n }\n\n if (data.status !== undefined) updates.status = data.status\n if (data.currentStepIndex !== undefined)\n updates.current_step_index = data.currentStepIndex\n if (data.progress !== undefined)\n updates.progress = data.progress ? JSON.stringify(data.progress) : null\n if (data.output !== undefined)\n updates.output = JSON.stringify(data.output)\n if (data.error !== undefined) updates.error = data.error\n if (data.heartbeatAt !== undefined)\n updates.heartbeat_at = data.heartbeatAt\n if (data.startedAt !== undefined) updates.started_at = data.startedAt\n if (data.completedAt !== undefined)\n updates.completed_at = data.completedAt\n\n await db\n .updateTable('durably_runs')\n .set(updates)\n .where('id', '=', runId)\n .execute()\n },\n\n async deleteRun(runId: string): Promise<void> {\n // Delete in order: logs -> steps -> run (due to foreign key constraints)\n await db.deleteFrom('durably_logs').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_steps').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_runs').where('id', '=', runId).execute()\n },\n\n async getRun<T extends Run = Run>(runId: string): Promise<T | null> {\n const row = await db\n .selectFrom('durably_runs')\n .leftJoin('durably_steps', 'durably_runs.id', 'durably_steps.run_id')\n .selectAll('durably_runs')\n .select((eb) =>\n eb.fn.count<number>('durably_steps.id').as('step_count'),\n )\n .where('durably_runs.id', '=', runId)\n .groupBy('durably_runs.id')\n .executeTakeFirst()\n\n return row ? (rowToRun(row) as T) : null\n },\n\n async getRuns<T extends Run = Run>(filter?: RunFilter): Promise<T[]> {\n let query = db\n .selectFrom('durably_runs')\n .leftJoin('durably_steps', 'durably_runs.id', 'durably_steps.run_id')\n .selectAll('durably_runs')\n .select((eb) =>\n eb.fn.count<number>('durably_steps.id').as('step_count'),\n )\n .groupBy('durably_runs.id')\n\n if (filter?.status) {\n query = query.where('durably_runs.status', '=', filter.status)\n }\n if (filter?.jobName) {\n if (Array.isArray(filter.jobName)) {\n if (filter.jobName.length > 0) {\n query = query.where('durably_runs.job_name', 'in', filter.jobName)\n }\n } else {\n query = query.where('durably_runs.job_name', '=', filter.jobName)\n }\n }\n if (filter?.labels) {\n const labels = filter.labels as Record<string, string>\n validateLabels(labels)\n for (const [key, value] of Object.entries(labels)) {\n if (value === undefined) continue\n query = query.where(\n sql`json_extract(durably_runs.labels, ${`$.\"${key}\"`})`,\n '=',\n value,\n )\n }\n }\n\n query = query.orderBy('durably_runs.created_at', 'desc')\n\n if (filter?.limit !== undefined) {\n query = query.limit(filter.limit)\n }\n if (filter?.offset !== undefined) {\n // SQLite requires LIMIT when using OFFSET\n if (filter.limit === undefined) {\n query = query.limit(-1) // -1 means unlimited in SQLite\n }\n query = query.offset(filter.offset)\n }\n\n const rows = await query.execute()\n return rows.map(rowToRun) as T[]\n },\n\n async claimNextPendingRun(\n excludeConcurrencyKeys: string[],\n ): Promise<Run | null> {\n const now = new Date().toISOString()\n\n let subquery = db\n .selectFrom('durably_runs')\n .select('id')\n .where('status', '=', 'pending')\n .orderBy('created_at', 'asc')\n .orderBy('id', 'asc')\n .limit(1)\n\n if (excludeConcurrencyKeys.length > 0) {\n subquery = subquery.where((eb) =>\n eb.or([\n eb('concurrency_key', 'is', null),\n eb('concurrency_key', 'not in', excludeConcurrencyKeys),\n ]),\n )\n }\n\n const row = await db\n .updateTable('durably_runs')\n .set({\n status: 'running',\n heartbeat_at: now,\n started_at: sql`COALESCE(started_at, ${now})`,\n updated_at: now,\n })\n .where('id', '=', (eb) =>\n eb.selectFrom(subquery.as('sub')).select('id'),\n )\n .returningAll()\n .executeTakeFirst()\n\n if (!row) return null\n return rowToRun({ ...row, step_count: 0 })\n },\n\n async createStep(input: CreateStepInput): Promise<Step> {\n const completedAt = new Date().toISOString()\n const id = ulid()\n\n const step: Database['durably_steps'] = {\n id,\n run_id: input.runId,\n name: input.name,\n index: input.index,\n status: input.status,\n output:\n input.output !== undefined ? JSON.stringify(input.output) : null,\n error: input.error ?? null,\n started_at: input.startedAt,\n completed_at: completedAt,\n }\n\n await db.insertInto('durably_steps').values(step).execute()\n\n return rowToStep(step)\n },\n\n async deleteSteps(runId: string): Promise<void> {\n await db.deleteFrom('durably_steps').where('run_id', '=', runId).execute()\n },\n\n async getSteps(runId: string): Promise<Step[]> {\n const rows = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('index', 'asc')\n .execute()\n\n return rows.map(rowToStep)\n },\n\n async getCompletedStep(runId: string, name: string): Promise<Step | null> {\n const row = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .where('name', '=', name)\n .where('status', '=', 'completed')\n .executeTakeFirst()\n\n return row ? rowToStep(row) : null\n },\n\n async createLog(input: CreateLogInput): Promise<Log> {\n const now = new Date().toISOString()\n const id = ulid()\n\n const log: Database['durably_logs'] = {\n id,\n run_id: input.runId,\n step_name: input.stepName,\n level: input.level,\n message: input.message,\n data: input.data !== undefined ? JSON.stringify(input.data) : null,\n created_at: now,\n }\n\n await db.insertInto('durably_logs').values(log).execute()\n\n return rowToLog(log)\n },\n\n async getLogs(runId: string): Promise<Log[]> {\n const rows = await db\n .selectFrom('durably_logs')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('created_at', 'asc')\n .execute()\n\n return rows.map(rowToLog)\n },\n }\n}\n","import { prettifyError } from 'zod'\nimport { createStepContext } from './context'\nimport { CancelledError, getErrorMessage } from './errors'\nimport type { EventEmitter } from './events'\nimport type { JobRegistry } from './job'\nimport type { Storage } from './storage'\n\n/**\n * Worker configuration\n */\nexport interface WorkerConfig {\n pollingInterval: number\n heartbeatInterval: number\n staleThreshold: number\n cleanupSteps: boolean\n}\n\n/**\n * Worker state\n */\nexport interface Worker {\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Check if worker is running\n */\n readonly isRunning: boolean\n}\n\n/**\n * Create a worker instance\n */\nexport function createWorker(\n config: WorkerConfig,\n storage: Storage,\n eventEmitter: EventEmitter,\n jobRegistry: JobRegistry,\n): Worker {\n let running = false\n let currentRunPromise: Promise<void> | null = null\n let pollingTimeout: ReturnType<typeof setTimeout> | null = null\n let stopResolver: (() => void) | null = null\n let heartbeatInterval: ReturnType<typeof setInterval> | null = null\n let currentRunId: string | null = null\n\n /**\n * Recover stale runs by resetting them to pending\n */\n async function recoverStaleRuns(): Promise<void> {\n const staleThreshold = new Date(\n Date.now() - config.staleThreshold,\n ).toISOString()\n const runningRuns = await storage.getRuns({ status: 'running' })\n\n for (const run of runningRuns) {\n if (run.heartbeatAt < staleThreshold) {\n // This run is stale - reset to pending\n await storage.updateRun(run.id, {\n status: 'pending',\n })\n }\n }\n }\n\n /**\n * Update heartbeat for current run\n */\n async function updateHeartbeat(): Promise<void> {\n if (currentRunId) {\n await storage.updateRun(currentRunId, {\n heartbeatAt: new Date().toISOString(),\n })\n }\n }\n\n /**\n * Handle successful run completion\n */\n async function handleRunSuccess(\n runId: string,\n jobName: string,\n output: unknown,\n startTime: number,\n ): Promise<void> {\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (!currentRun || currentRun.status === 'cancelled') {\n return\n }\n\n await storage.updateRun(runId, {\n status: 'completed',\n output,\n completedAt: new Date().toISOString(),\n })\n\n eventEmitter.emit({\n type: 'run:complete',\n runId,\n jobName,\n output,\n duration: Date.now() - startTime,\n labels: currentRun.labels,\n })\n }\n\n /**\n * Handle failed run\n */\n async function handleRunFailure(\n runId: string,\n jobName: string,\n error: unknown,\n ): Promise<void> {\n // If the error is CancelledError, don't treat it as a failure\n // The run status is already 'cancelled'\n if (error instanceof CancelledError) {\n return\n }\n\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (!currentRun || currentRun.status === 'cancelled') {\n return\n }\n\n const errorMessage = getErrorMessage(error)\n\n // Get the failed step name if available\n const steps = await storage.getSteps(runId)\n const failedStep = steps.find((s) => s.status === 'failed')\n\n await storage.updateRun(runId, {\n status: 'failed',\n error: errorMessage,\n completedAt: new Date().toISOString(),\n })\n\n eventEmitter.emit({\n type: 'run:fail',\n runId,\n jobName,\n error: errorMessage,\n failedStepName: failedStep?.name ?? 'unknown',\n labels: currentRun.labels,\n })\n }\n\n /**\n * Execute a run with heartbeat management\n */\n async function executeRun(\n run: Awaited<ReturnType<typeof storage.getRun>> & { id: string },\n job: NonNullable<ReturnType<typeof jobRegistry.get>>,\n ): Promise<void> {\n // Track current run for heartbeat updates\n currentRunId = run.id\n\n // Start heartbeat interval\n // Errors are emitted as events but don't stop execution\n heartbeatInterval = setInterval(() => {\n updateHeartbeat().catch((error) => {\n eventEmitter.emit({\n type: 'worker:error',\n error: getErrorMessage(error),\n context: 'heartbeat',\n runId: run.id,\n })\n })\n }, config.heartbeatInterval)\n\n // Emit run:start event\n eventEmitter.emit({\n type: 'run:start',\n runId: run.id,\n jobName: run.jobName,\n input: run.input,\n labels: run.labels,\n })\n\n const startTime = Date.now()\n\n const { step, dispose } = createStepContext(\n run,\n run.jobName,\n storage,\n eventEmitter,\n )\n\n try {\n // Execute job with step context\n const output = await job.fn(step, run.input)\n\n // Validate output if schema exists\n if (job.outputSchema) {\n const parseResult = job.outputSchema.safeParse(output)\n if (!parseResult.success) {\n throw new Error(`Invalid output: ${prettifyError(parseResult.error)}`)\n }\n }\n\n await handleRunSuccess(run.id, run.jobName, output, startTime)\n } catch (error) {\n await handleRunFailure(run.id, run.jobName, error)\n } finally {\n if (config.cleanupSteps) {\n try {\n await storage.deleteSteps(run.id)\n } catch {\n // Best-effort cleanup — don't block worker teardown\n }\n }\n\n dispose()\n // Stop heartbeat interval\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n currentRunId = null\n }\n }\n\n async function processNextRun(): Promise<boolean> {\n // Get running runs to exclude their concurrency keys\n const runningRuns = await storage.getRuns({ status: 'running' })\n const excludeConcurrencyKeys = runningRuns\n .filter(\n (r): r is typeof r & { concurrencyKey: string } =>\n r.concurrencyKey !== null,\n )\n .map((r) => r.concurrencyKey)\n\n // Atomically claim next pending run (SELECT + UPDATE in one statement)\n const run = await storage.claimNextPendingRun(excludeConcurrencyKeys)\n if (!run) {\n return false\n }\n\n // Get the job definition\n const job = jobRegistry.get(run.jobName)\n if (!job) {\n // Unknown job - mark as failed\n await storage.updateRun(run.id, {\n status: 'failed',\n error: `Unknown job: ${run.jobName}`,\n })\n return true\n }\n\n await executeRun(run, job)\n\n return true\n }\n\n async function poll(): Promise<void> {\n if (!running) {\n return\n }\n\n const doWork = async () => {\n // Recover stale runs before processing\n await recoverStaleRuns()\n await processNextRun()\n }\n\n try {\n currentRunPromise = doWork()\n await currentRunPromise\n } finally {\n currentRunPromise = null\n }\n\n if (running) {\n pollingTimeout = setTimeout(() => poll(), config.pollingInterval)\n } else if (stopResolver) {\n stopResolver()\n stopResolver = null\n }\n }\n\n return {\n get isRunning(): boolean {\n return running\n },\n\n start(): void {\n if (running) {\n return\n }\n running = true\n poll()\n },\n\n async stop(): Promise<void> {\n if (!running) {\n return\n }\n\n running = false\n\n if (pollingTimeout) {\n clearTimeout(pollingTimeout)\n pollingTimeout = null\n }\n\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n\n if (currentRunPromise) {\n // Wait for current run to complete\n return new Promise<void>((resolve) => {\n stopResolver = resolve\n })\n }\n },\n }\n}\n","/**\n * Error thrown when a run is cancelled during execution.\n * The worker catches this error and treats it specially - it does not\n * mark the run as failed, as the run status is already 'cancelled'.\n */\nexport class CancelledError extends Error {\n constructor(runId: string) {\n super(`Run was cancelled: ${runId}`)\n this.name = 'CancelledError'\n }\n}\n\n/**\n * Extract error message from unknown error\n */\nexport function getErrorMessage(error: unknown): string {\n return error instanceof Error ? error.message : String(error)\n}\n","import { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { StepContext } from './job'\nimport type { Run, Storage } from './storage'\n\n/**\n * Create a step context for executing a run\n */\nexport function createStepContext(\n run: Run,\n jobName: string,\n storage: Storage,\n eventEmitter: EventEmitter,\n): { step: StepContext; dispose: () => void } {\n let stepIndex = run.currentStepIndex\n let currentStepName: string | null = null\n\n const controller = new AbortController()\n\n const unsubscribe = eventEmitter.on('run:cancel', (event) => {\n if (event.runId === run.id) {\n controller.abort()\n }\n })\n\n const step: StepContext = {\n get runId(): string {\n return run.id\n },\n\n async run<T>(\n name: string,\n fn: (signal: AbortSignal) => T | Promise<T>,\n ): Promise<T> {\n // Fast path: check in-memory signal first (set by run:cancel event)\n if (controller.signal.aborted) {\n throw new CancelledError(run.id)\n }\n\n // Slow path: DB check for cases where event wasn't received\n // (e.g., run cancelled while worker was down, then resumed)\n const currentRun = await storage.getRun(run.id)\n if (currentRun?.status === 'cancelled') {\n controller.abort()\n throw new CancelledError(run.id)\n }\n\n // Check cancellation before replaying cached steps\n if (controller.signal.aborted) {\n throw new CancelledError(run.id)\n }\n\n // Check if step was already completed\n const existingStep = await storage.getCompletedStep(run.id, name)\n if (existingStep) {\n stepIndex++\n return existingStep.output as T\n }\n\n // Track current step for log attribution\n currentStepName = name\n\n // Record step start time\n const startedAt = new Date().toISOString()\n const startTime = Date.now()\n\n // Emit step:start event\n eventEmitter.emit({\n type: 'step:start',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n labels: run.labels,\n })\n\n try {\n // Execute the step with the abort signal\n const result = await fn(controller.signal)\n\n // Save step result\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'completed',\n output: result,\n startedAt,\n })\n\n // Update run's current step index\n stepIndex++\n await storage.updateRun(run.id, { currentStepIndex: stepIndex })\n\n // Emit step:complete event\n eventEmitter.emit({\n type: 'step:complete',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex: stepIndex - 1,\n output: result,\n duration: Date.now() - startTime,\n labels: run.labels,\n })\n\n return result\n } catch (error) {\n const isCancelled = controller.signal.aborted\n const errorMessage =\n error instanceof Error ? error.message : String(error)\n\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: isCancelled ? 'cancelled' : 'failed',\n error: errorMessage,\n startedAt,\n })\n\n eventEmitter.emit({\n ...(isCancelled\n ? { type: 'step:cancel' as const }\n : { type: 'step:fail' as const, error: errorMessage }),\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n labels: run.labels,\n })\n\n if (isCancelled) {\n throw new CancelledError(run.id)\n }\n throw error\n } finally {\n // Clear current step after execution\n currentStepName = null\n }\n },\n\n progress(current: number, total?: number, message?: string): void {\n const progressData = { current, total, message }\n // Fire and forget - don't await\n storage.updateRun(run.id, { progress: progressData })\n // Emit progress event\n eventEmitter.emit({\n type: 'run:progress',\n runId: run.id,\n jobName,\n progress: progressData,\n labels: run.labels,\n })\n },\n\n log: {\n info(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'info',\n message,\n data,\n })\n },\n\n warn(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'warn',\n message,\n data,\n })\n },\n\n error(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n jobName,\n labels: run.labels,\n stepName: currentStepName,\n level: 'error',\n message,\n data,\n })\n },\n },\n }\n\n return { step, dispose: unsubscribe }\n}\n","import type { z } from 'zod'\nimport type { StepContext } from './job'\n\n/**\n * Job run function type\n */\nexport type JobRunFunction<TInput, TOutput> = (\n step: StepContext,\n input: TInput,\n) => Promise<TOutput>\n\n/**\n * Job definition - a standalone description of a job\n * This is the result of calling defineJob() and can be passed to durably.register()\n */\nexport interface JobDefinition<TName extends string, TInput, TOutput> {\n readonly name: TName\n readonly input: z.ZodType<TInput>\n readonly output: z.ZodType<TOutput> | undefined\n readonly run: JobRunFunction<TInput, TOutput>\n}\n\n/**\n * Extract input type from a JobDefinition\n * @example\n * ```ts\n * type Input = JobInput<typeof myJob> // { userId: string }\n * ```\n */\nexport type JobInput<T> =\n T extends JobDefinition<string, infer TInput, unknown> ? TInput : never\n\n/**\n * Extract output type from a JobDefinition\n * @example\n * ```ts\n * type Output = JobOutput<typeof myJob> // { count: number }\n * ```\n */\nexport type JobOutput<T> =\n T extends JobDefinition<string, unknown, infer TOutput> ? TOutput : never\n\n/**\n * Configuration for defining a job\n */\nexport interface DefineJobConfig<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n> {\n name: TName\n input: TInputSchema\n output?: TOutputSchema\n run: JobRunFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n}\n\n/**\n * Define a job - creates a JobDefinition that can be registered with durably.register()\n *\n * @example\n * ```ts\n * import { defineJob } from '@coji/durably'\n * import { z } from 'zod'\n *\n * export const syncUsers = defineJob({\n * name: 'sync-users',\n * input: z.object({ orgId: z.string() }),\n * output: z.object({ syncedCount: z.number() }),\n * run: async (step, input) => {\n * const users = await step.run('fetch-users', () => fetchUsers(input.orgId))\n * return { syncedCount: users.length }\n * },\n * })\n * ```\n */\nexport function defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n>(\n config: DefineJobConfig<TName, TInputSchema, TOutputSchema>,\n): JobDefinition<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n> {\n return {\n name: config.name,\n input: config.input,\n output: config.output,\n run: config.run,\n } as JobDefinition<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n}\n","/**\n * HTTP response utilities for the Durably HTTP handler.\n * Extracted to eliminate duplication in server.ts handlers.\n */\n\nimport { getErrorMessage } from './errors'\n\nexport { getErrorMessage }\n\n/**\n * JSON response headers\n */\nconst JSON_HEADERS = {\n 'Content-Type': 'application/json',\n} as const\n\n/**\n * Create a JSON response\n */\nexport function jsonResponse(data: unknown, status = 200): Response {\n return new Response(JSON.stringify(data), {\n status,\n headers: JSON_HEADERS,\n })\n}\n\n/**\n * Create an error response with consistent format\n */\nexport function errorResponse(\n message: string,\n status: 400 | 404 | 500 = 500,\n): Response {\n return jsonResponse({ error: message }, status)\n}\n\n/**\n * Create a success response with { success: true }\n */\nexport function successResponse(): Response {\n return jsonResponse({ success: true })\n}\n\n/**\n * Get required query parameter or return error response\n */\nexport function getRequiredQueryParam(\n url: URL,\n paramName: string,\n): string | Response {\n const value = url.searchParams.get(paramName)\n if (!value) {\n return errorResponse(`${paramName} query parameter is required`, 400)\n }\n return value\n}\n","/**\n * SSE (Server-Sent Events) utilities for streaming events to clients.\n * Extracted to eliminate duplication between subscribe and runsSubscribe handlers.\n */\n\nimport type { Unsubscribe } from './events'\n\n/**\n * SSE response headers\n */\nconst SSE_HEADERS = {\n 'Content-Type': 'text/event-stream',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n} as const\n\n/**\n * Encode data as SSE format: `data: ${json}\\n\\n`\n */\nfunction formatSSE(data: unknown): string {\n return `data: ${JSON.stringify(data)}\\n\\n`\n}\n\n/**\n * Create a TextEncoder for SSE streams\n */\nfunction createSSEEncoder(): TextEncoder {\n return new TextEncoder()\n}\n\n/**\n * Encode and format data for SSE streaming\n */\nfunction encodeSSE(encoder: TextEncoder, data: unknown): Uint8Array {\n return encoder.encode(formatSSE(data))\n}\n\n/**\n * Create an SSE Response from a ReadableStream\n */\nexport function createSSEResponse(stream: ReadableStream): Response {\n return new Response(stream, {\n status: 200,\n headers: SSE_HEADERS,\n })\n}\n\n/**\n * Transform a ReadableStream of events into an SSE-formatted stream\n */\nexport function createSSEStreamFromReader<T>(\n reader: ReadableStreamDefaultReader<T>,\n): ReadableStream<Uint8Array> {\n const encoder = createSSEEncoder()\n\n return new ReadableStream({\n async start(controller) {\n try {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n controller.close()\n break\n }\n\n controller.enqueue(encodeSSE(encoder, value))\n }\n } catch (error) {\n controller.error(error)\n }\n },\n cancel() {\n reader.releaseLock()\n },\n })\n}\n\n/**\n * Transform a ReadableStream of events into an SSE-formatted stream with\n * throttling for `run:progress` events.\n */\nexport function createThrottledSSEStreamFromReader<T>(\n reader: ReadableStreamDefaultReader<T>,\n throttleMs: number,\n): ReadableStream<Uint8Array> {\n if (throttleMs <= 0) {\n return createSSEStreamFromReader(reader)\n }\n\n const encoder = createSSEEncoder()\n let closed = false\n let throttle: {\n controller: SSEStreamController\n dispose: () => void\n } | null = null\n\n return new ReadableStream({\n async start(controller) {\n const innerCtrl: SSEStreamController = {\n enqueue: (data: unknown) =>\n controller.enqueue(encodeSSE(encoder, data)),\n close: () => {\n closed = true\n controller.close()\n },\n get closed() {\n return closed\n },\n }\n throttle = createThrottledSSEController(innerCtrl, throttleMs)\n\n try {\n while (true) {\n const { done, value } = await reader.read()\n if (done) {\n throttle.controller.close()\n break\n }\n throttle.controller.enqueue(value)\n }\n } catch (error) {\n throttle.dispose()\n reader.releaseLock()\n controller.error(error)\n }\n },\n cancel() {\n closed = true\n throttle?.dispose()\n reader.releaseLock()\n },\n })\n}\n\n/**\n * SSE stream controller with cleanup support\n */\nexport interface SSEStreamController {\n enqueue: (data: unknown) => void\n close: () => void\n readonly closed: boolean\n}\n\n/**\n * Create an SSE stream from event subscriptions.\n * Handles the common pattern of subscribing to multiple events and streaming them.\n *\n * @param setup - Function to set up event subscriptions, returns cleanup functions\n * @returns SSE Response\n */\nexport function createSSEStreamFromSubscriptions(\n setup: (controller: SSEStreamController) => Unsubscribe[],\n): ReadableStream<Uint8Array> {\n const encoder = createSSEEncoder()\n let closed = false\n let unsubscribes: Unsubscribe[] = []\n\n return new ReadableStream({\n start(controller) {\n const sseController: SSEStreamController = {\n enqueue: (data: unknown) => {\n if (closed) return\n controller.enqueue(encodeSSE(encoder, data))\n },\n close: () => {\n if (closed) return\n closed = true\n controller.close()\n },\n get closed() {\n return closed\n },\n }\n\n unsubscribes = setup(sseController)\n },\n cancel() {\n closed = true\n for (const unsubscribe of unsubscribes) {\n unsubscribe()\n }\n },\n })\n}\n\nconst TERMINAL_EVENT_TYPES = new Set([\n 'run:complete',\n 'run:fail',\n 'run:cancel',\n 'run:delete',\n])\n\n/**\n * Create an SSE stream controller that throttles `run:progress` events.\n *\n * - First progress event per run is delivered immediately\n * - Subsequent events within the throttle window are coalesced (latest wins)\n * - A trailing flush ensures the last progress is always delivered\n * - Non-progress events pass through immediately\n */\nexport function createThrottledSSEController(\n inner: SSEStreamController,\n throttleMs: number,\n): { controller: SSEStreamController; dispose: () => void } {\n if (throttleMs <= 0) {\n return { controller: inner, dispose: () => {} }\n }\n\n // Per-run throttle state\n const pending = new Map<\n string,\n { data: unknown; timer: ReturnType<typeof setTimeout> }\n >()\n\n // Track last send time per run for leading-edge delivery\n const lastSent = new Map<string, number>()\n\n const controller: SSEStreamController = {\n enqueue(data: unknown) {\n if (inner.closed) return\n\n const event =\n typeof data === 'object' && data !== null\n ? (data as { type?: string; runId?: string })\n : null\n\n // Flush and clean up throttle state for terminal run events\n if (event?.runId && TERMINAL_EVENT_TYPES.has(event.type ?? '')) {\n lastSent.delete(event.runId)\n const entry = pending.get(event.runId)\n if (entry) {\n clearTimeout(entry.timer)\n if (!inner.closed) inner.enqueue(entry.data)\n pending.delete(event.runId)\n }\n }\n\n if (event?.type !== 'run:progress' || !event?.runId) {\n inner.enqueue(data)\n return\n }\n\n const runId = event.runId\n const now = Date.now()\n const last = lastSent.get(runId) ?? 0\n\n // Leading edge: send immediately if enough time has passed\n if (now - last >= throttleMs) {\n lastSent.set(runId, now)\n // Clear any pending flush for this run\n const entry = pending.get(runId)\n if (entry) {\n clearTimeout(entry.timer)\n pending.delete(runId)\n }\n inner.enqueue(data)\n return\n }\n\n // Trailing edge: buffer latest and schedule flush\n const existing = pending.get(runId)\n if (existing) {\n clearTimeout(existing.timer)\n }\n\n const delay = Math.max(0, throttleMs - (now - last))\n const timer = setTimeout(() => {\n const current = pending.get(runId)\n if (!current || current.timer !== timer) return\n\n pending.delete(runId)\n if (!inner.closed) {\n lastSent.set(runId, Date.now())\n inner.enqueue(current.data)\n }\n }, delay)\n\n pending.set(runId, { data, timer })\n },\n close() {\n // Flush all pending progress events before closing\n for (const [, entry] of pending) {\n clearTimeout(entry.timer)\n if (!inner.closed) {\n inner.enqueue(entry.data)\n }\n }\n pending.clear()\n lastSent.clear()\n inner.close()\n },\n get closed() {\n return inner.closed\n },\n }\n\n const dispose = () => {\n for (const [, entry] of pending) {\n clearTimeout(entry.timer)\n }\n pending.clear()\n lastSent.clear()\n }\n\n return { controller, dispose }\n}\n","import type { Durably } from './durably'\nimport type { AnyEventInput } from './events'\nimport {\n errorResponse,\n getErrorMessage,\n getRequiredQueryParam,\n jsonResponse,\n successResponse,\n} from './http'\nimport {\n createSSEResponse,\n createSSEStreamFromSubscriptions,\n createThrottledSSEController,\n createThrottledSSEStreamFromReader,\n type SSEStreamController,\n} from './sse'\nimport type { Run, RunFilter } from './storage'\nimport { toClientRun } from './storage'\n\n/**\n * Run operation types for onRunAccess\n */\nexport type RunOperation =\n | 'read'\n | 'subscribe'\n | 'steps'\n | 'retrigger'\n | 'cancel'\n | 'delete'\n\n/**\n * Subscription filter — only fields that SSE subscriptions actually support.\n */\nexport type RunsSubscribeFilter<\n TLabels extends Record<string, string> = Record<string, string>,\n> = Pick<RunFilter<TLabels>, 'jobName' | 'labels'>\n\n/**\n * Request body for triggering a job\n */\nexport interface TriggerRequest<\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n jobName: string\n input: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n labels?: TLabels\n}\n\n/**\n * Response for trigger endpoint\n */\nexport interface TriggerResponse {\n runId: string\n}\n\n/**\n * Auth middleware configuration.\n * When `auth` is set, `authenticate` is required.\n * TContext is inferred from authenticate's return type.\n * TLabels is inferred from the Durably instance.\n */\nexport interface AuthConfig<\n TContext,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /** Authenticate every request. Return context or throw Response to reject. */\n authenticate: (request: Request) => Promise<TContext> | TContext\n\n /** Guard before trigger. Called after body validation and job resolution. */\n onTrigger?: (\n ctx: TContext,\n trigger: TriggerRequest<TLabels>,\n ) => Promise<void> | void\n\n /** Guard before run-level operations. Run is pre-fetched. */\n onRunAccess?: (\n ctx: TContext,\n run: Run<TLabels>,\n info: { operation: RunOperation },\n ) => Promise<void> | void\n\n /** Scope runs list queries (GET /runs). */\n scopeRuns?: (\n ctx: TContext,\n filter: RunFilter<TLabels>,\n ) => RunFilter<TLabels> | Promise<RunFilter<TLabels>>\n\n /** Scope runs subscribe stream (GET /runs/subscribe). Falls back to scopeRuns if not set. */\n scopeRunsSubscribe?: (\n ctx: TContext,\n filter: RunsSubscribeFilter<TLabels>,\n ) => RunsSubscribeFilter<TLabels> | Promise<RunsSubscribeFilter<TLabels>>\n}\n\n/**\n * Handler interface for HTTP endpoints\n */\nexport interface DurablyHandler {\n /**\n * Handle all Durably HTTP requests with automatic routing + auth\n *\n * Routes:\n * - GET {basePath}/subscribe?runId=xxx - SSE stream\n * - GET {basePath}/runs - List runs\n * - GET {basePath}/runs/subscribe - SSE stream of run updates\n * - GET {basePath}/run?runId=xxx - Get single run\n * - GET {basePath}/steps?runId=xxx - Get steps\n * - POST {basePath}/trigger - Trigger a job\n * - POST {basePath}/retrigger?runId=xxx - Create a fresh run from a terminal run\n * - POST {basePath}/cancel?runId=xxx - Cancel a run\n * - DELETE {basePath}/run?runId=xxx - Delete a run\n */\n handle(request: Request, basePath: string): Promise<Response>\n}\n\n/**\n * Options for createDurablyHandler\n */\nexport interface CreateDurablyHandlerOptions<\n TContext = undefined,\n TLabels extends Record<string, string> = Record<string, string>,\n> {\n /**\n * Called before handling each request (after authentication).\n * Use this to initialize Durably (migrate, start worker, etc.)\n */\n onRequest?: () => Promise<void> | void\n\n /**\n * Throttle interval in milliseconds for SSE progress events.\n * @default 100\n */\n sseThrottleMs?: number\n\n /**\n * Auth middleware. When set, authenticate is required and auth applies to ALL endpoints.\n */\n auth?: AuthConfig<TContext, TLabels>\n}\n\n/**\n * Valid status values for runs\n */\nconst VALID_STATUSES = [\n 'pending',\n 'running',\n 'completed',\n 'failed',\n 'cancelled',\n] as const satisfies readonly RunFilter['status'][]\n\nconst VALID_STATUSES_SET: ReadonlySet<string> = new Set(VALID_STATUSES)\n\n/**\n * Parse label.* query params into a Record<string, string>\n */\nfunction parseLabelsFromParams(\n searchParams: URLSearchParams,\n): Record<string, string> | undefined {\n const labels: Record<string, string> = {}\n for (const [key, value] of searchParams.entries()) {\n if (key.startsWith('label.')) {\n labels[key.slice(6)] = value\n }\n }\n return Object.keys(labels).length > 0 ? labels : undefined\n}\n\n/**\n * Parse and validate RunFilter from query params.\n * Returns the filter or an error Response.\n */\nfunction parseRunFilter(url: URL): RunFilter | Response {\n const jobNames = url.searchParams.getAll('jobName')\n const statusParam = url.searchParams.get('status')\n const limitParam = url.searchParams.get('limit')\n const offsetParam = url.searchParams.get('offset')\n const labels = parseLabelsFromParams(url.searchParams)\n\n // Validate status\n if (statusParam && !VALID_STATUSES_SET.has(statusParam)) {\n return errorResponse(\n `Invalid status: ${statusParam}. Must be one of: ${VALID_STATUSES.join(', ')}`,\n 400,\n )\n }\n\n // Validate limit\n let limit: number | undefined\n if (limitParam) {\n limit = Number.parseInt(limitParam, 10)\n if (Number.isNaN(limit) || limit < 0) {\n return errorResponse('Invalid limit: must be a non-negative integer', 400)\n }\n }\n\n // Validate offset\n let offset: number | undefined\n if (offsetParam) {\n offset = Number.parseInt(offsetParam, 10)\n if (Number.isNaN(offset) || offset < 0) {\n return errorResponse(\n 'Invalid offset: must be a non-negative integer',\n 400,\n )\n }\n }\n\n return {\n jobName: jobNames.length > 0 ? jobNames : undefined,\n status: statusParam as RunFilter['status'],\n labels,\n limit,\n offset,\n }\n}\n\n/**\n * Parse RunsSubscribeFilter from query params.\n */\nfunction parseRunsSubscribeFilter(url: URL): RunsSubscribeFilter {\n const jobNames = url.searchParams.getAll('jobName')\n const labels = parseLabelsFromParams(url.searchParams)\n\n return {\n jobName: jobNames.length > 0 ? jobNames : undefined,\n labels,\n }\n}\n\n/**\n * Check if event labels match filter labels (all filter labels must match)\n */\nfunction matchesLabels(\n eventLabels: Record<string, string>,\n filterLabels: Record<string, string>,\n): boolean {\n for (const [key, value] of Object.entries(filterLabels)) {\n if (eventLabels[key] !== value) return false\n }\n return true\n}\n\n/**\n * Create HTTP handlers for Durably\n * Uses Web Standard Request/Response for framework-agnostic usage\n */\n// biome-ignore lint/suspicious/noExplicitAny: TLabels must be inferred from Durably instance\nexport function createDurablyHandler<\n TContext = undefined,\n TLabels extends Record<string, string> = Record<string, string>,\n>(\n durably: Durably<any, TLabels>,\n options?: CreateDurablyHandlerOptions<TContext, TLabels>,\n): DurablyHandler {\n const throttleMs = options?.sseThrottleMs ?? 100\n const auth = options?.auth\n\n // Validate: auth requires authenticate\n if (auth && !auth.authenticate) {\n throw new Error(\n 'createDurablyHandler: auth.authenticate is required when auth is provided',\n )\n }\n\n // --- Shared helpers ---\n\n /** Wrap handler with try/catch that re-throws Response and catches everything else as 500 */\n async function withErrorHandling(\n fn: () => Promise<Response>,\n ): Promise<Response> {\n try {\n return await fn()\n } catch (error) {\n if (error instanceof Response) throw error\n return errorResponse(getErrorMessage(error), 500)\n }\n }\n\n /** Fetch run, check auth, return run or error Response */\n async function requireRunAccess(\n url: URL,\n ctx: TContext | undefined,\n operation: RunOperation,\n ): Promise<{ run: Run<TLabels>; runId: string } | Response> {\n const runId = getRequiredQueryParam(url, 'runId')\n if (runId instanceof Response) return runId\n\n const run = await durably.getRun(runId)\n if (!run) return errorResponse('Run not found', 404)\n\n if (auth?.onRunAccess && ctx !== undefined) {\n await auth.onRunAccess(ctx as TContext, run as Run<TLabels>, {\n operation,\n })\n }\n\n return { run: run as Run<TLabels>, runId }\n }\n\n // --- Private endpoint handlers (closure-scoped, not exposed on returned object) ---\n\n async function handleTrigger(\n request: Request,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const body = (await request.json()) as TriggerRequest<TLabels>\n\n if (!body.jobName) {\n return errorResponse('jobName is required', 400)\n }\n\n const job = durably.getJob(body.jobName)\n if (!job) {\n return errorResponse(`Job not found: ${body.jobName}`, 404)\n }\n\n // Auth hook: onTrigger (after validation)\n if (auth?.onTrigger && ctx !== undefined) {\n await auth.onTrigger(ctx as TContext, body)\n }\n\n const run = await job.trigger(\n (body.input ?? {}) as Record<string, unknown>,\n {\n idempotencyKey: body.idempotencyKey,\n concurrencyKey: body.concurrencyKey,\n labels: body.labels,\n },\n )\n\n const response: TriggerResponse = { runId: run.id }\n return jsonResponse(response)\n })\n }\n\n async function handleSubscribe(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n const result = await requireRunAccess(url, ctx, 'subscribe')\n if (result instanceof Response) return result\n\n const stream = durably.subscribe(result.runId)\n const sseStream = createThrottledSSEStreamFromReader(\n stream.getReader() as ReadableStreamDefaultReader<AnyEventInput>,\n throttleMs,\n )\n return createSSEResponse(sseStream)\n }\n\n async function handleRuns(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const filterOrError = parseRunFilter(url)\n if (filterOrError instanceof Response) return filterOrError\n\n let filter: RunFilter<TLabels> = filterOrError as RunFilter<TLabels>\n\n // Auth hook: scopeRuns\n if (auth?.scopeRuns && ctx !== undefined) {\n filter = await auth.scopeRuns(ctx as TContext, filter)\n }\n\n const runs = await durably.getRuns(filter)\n return jsonResponse(runs.map(toClientRun))\n })\n }\n\n async function handleRun(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'read')\n if (result instanceof Response) return result\n\n return jsonResponse(toClientRun(result.run))\n })\n }\n\n async function handleSteps(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'steps')\n if (result instanceof Response) return result\n\n const steps = await durably.storage.getSteps(result.runId)\n return jsonResponse(steps)\n })\n }\n\n async function handleRetrigger(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'retrigger')\n if (result instanceof Response) return result\n\n const run = await durably.retrigger(result.runId)\n return jsonResponse({ success: true, runId: run.id })\n })\n }\n\n async function handleCancel(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'cancel')\n if (result instanceof Response) return result\n\n await durably.cancel(result.runId)\n return successResponse()\n })\n }\n\n async function handleDelete(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n return withErrorHandling(async () => {\n const result = await requireRunAccess(url, ctx, 'delete')\n if (result instanceof Response) return result\n\n await durably.deleteRun(result.runId)\n return successResponse()\n })\n }\n\n async function handleRunsSubscribe(\n url: URL,\n ctx: TContext | undefined,\n ): Promise<Response> {\n let filter: RunsSubscribeFilter<TLabels>\n\n if (ctx !== undefined && auth?.scopeRunsSubscribe) {\n const parsed = parseRunsSubscribeFilter(\n url,\n ) as RunsSubscribeFilter<TLabels>\n filter = await auth.scopeRunsSubscribe(ctx as TContext, parsed)\n } else if (ctx !== undefined && auth?.scopeRuns) {\n // Fallback: use scopeRuns with subscribe-compatible filter\n const parsed = parseRunsSubscribeFilter(\n url,\n ) as RunsSubscribeFilter<TLabels>\n const scoped = await auth.scopeRuns(\n ctx as TContext,\n {\n ...parsed,\n } as RunFilter<TLabels>,\n )\n filter = { jobName: scoped.jobName, labels: scoped.labels }\n } else {\n filter = parseRunsSubscribeFilter(url) as RunsSubscribeFilter<TLabels>\n }\n\n return createRunsSSEStream(filter)\n }\n\n function createRunsSSEStream(filter: RunsSubscribeFilter): Response {\n const jobNameFilter = Array.isArray(filter.jobName)\n ? filter.jobName\n : filter.jobName\n ? [filter.jobName]\n : []\n const labelsFilter = filter.labels\n\n const matchesFilter = (\n jobName: string,\n labels?: Record<string, string>,\n ) => {\n if (jobNameFilter.length > 0 && !jobNameFilter.includes(jobName))\n return false\n if (\n labelsFilter &&\n (!labels ||\n !matchesLabels(labels, labelsFilter as Record<string, string>))\n )\n return false\n return true\n }\n\n const sseStream = createSSEStreamFromSubscriptions(\n (innerCtrl: SSEStreamController) => {\n const { controller: ctrl, dispose } = createThrottledSSEController(\n innerCtrl,\n throttleMs,\n )\n\n const unsubscribes = [\n durably.on('run:trigger', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:trigger',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:start', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:start',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:complete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:complete',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:fail', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:fail',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:cancel', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:cancel',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:delete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:delete',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('run:progress', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'run:progress',\n runId: event.runId,\n jobName: event.jobName,\n progress: event.progress,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:start', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:start',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:complete', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:complete',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:fail', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:fail',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n error: event.error,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('step:cancel', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'step:cancel',\n runId: event.runId,\n jobName: event.jobName,\n stepName: event.stepName,\n stepIndex: event.stepIndex,\n labels: event.labels,\n })\n }\n }),\n\n durably.on('log:write', (event) => {\n if (matchesFilter(event.jobName, event.labels)) {\n ctrl.enqueue({\n type: 'log:write',\n runId: event.runId,\n jobName: event.jobName,\n labels: event.labels,\n stepName: event.stepName,\n level: event.level,\n message: event.message,\n data: event.data,\n })\n }\n }),\n ]\n\n return [...unsubscribes, dispose]\n },\n )\n\n return createSSEResponse(sseStream)\n }\n\n // --- Public API: only handle() ---\n\n return {\n async handle(request: Request, basePath: string): Promise<Response> {\n try {\n // 1. Authenticate (fail fast before anything else)\n let ctx: TContext | undefined\n if (auth?.authenticate) {\n ctx = await auth.authenticate(request)\n }\n\n // 2. Run onRequest hook (lazy init: migrations, worker start)\n if (options?.onRequest) {\n await options.onRequest()\n }\n\n // 3. Route by path + method\n const url = new URL(request.url)\n const path = url.pathname.replace(basePath, '')\n const method = request.method\n\n // GET routes\n if (method === 'GET') {\n if (path === '/subscribe') return await handleSubscribe(url, ctx)\n if (path === '/runs') return await handleRuns(url, ctx)\n if (path === '/run') return await handleRun(url, ctx)\n if (path === '/steps') return await handleSteps(url, ctx)\n if (path === '/runs/subscribe')\n return await handleRunsSubscribe(url, ctx)\n }\n\n // POST routes\n if (method === 'POST') {\n if (path === '/trigger') return await handleTrigger(request, ctx)\n if (path === '/retrigger') return await handleRetrigger(url, ctx)\n if (path === '/cancel') return await handleCancel(url, ctx)\n }\n\n // DELETE routes\n if (method === 'DELETE') {\n if (path === '/run') return await handleDelete(url, ctx)\n }\n\n return new Response('Not Found', { status: 404 })\n } catch (error) {\n // Auth hooks throw Response to reject — return as-is\n if (error instanceof Response) return error\n return errorResponse(getErrorMessage(error), 500)\n }\n },\n }\n}\n"],"mappings":";;;;;AACA,SAAS,cAAc;;;AC8QhB,SAAS,qBAAmC;AACjD,QAAM,YAAY,oBAAI,IAA8C;AACpE,MAAI,WAAW;AACf,MAAI,eAAoC;AAExC,SAAO;AAAA,IACL,GAAwB,MAAS,UAAyC;AACxE,UAAI,CAAC,UAAU,IAAI,IAAI,GAAG;AACxB,kBAAU,IAAI,MAAM,oBAAI,IAAI,CAAC;AAAA,MAC/B;AAEA,YAAM,gBAAgB,UAAU,IAAI,IAAI;AACxC,qBAAe,IAAI,QAA+C;AAElE,aAAO,MAAM;AACX,uBAAe,OAAO,QAA+C;AAAA,MACvE;AAAA,IACF;AAAA,IAEA,QAAQ,SAA6B;AACnC,qBAAe;AAAA,IACjB;AAAA,IAEA,KAAK,OAA4B;AAC/B;AACA,YAAM,YAAY;AAAA,QAChB,GAAG;AAAA,QACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF;AAEA,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAC9C,UAAI,CAAC,eAAe;AAClB;AAAA,MACF;AAEA,iBAAW,YAAY,eAAe;AACpC,YAAI;AACF,mBAAS,SAAS;AAAA,QACpB,SAAS,OAAO;AACd,cAAI,cAAc;AAChB;AAAA,cACE,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,cACxD;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;AClUA,SAAiB,qBAAqB;AAMtC,IAAM,OAAO,MAAM;AAAC;AAKpB,SAAS,wBACP,QACA,OACA,SACG;AACH,QAAM,SAAS,OAAO,UAAU,KAAK;AACrC,MAAI,CAAC,OAAO,SAAS;AACnB,UAAM,SAAS,UAAU,GAAG,OAAO,OAAO;AAC1C,UAAM,IAAI,MAAM,GAAG,MAAM,kBAAkB,cAAc,OAAO,KAAK,CAAC,EAAE;AAAA,EAC1E;AACA,SAAO,OAAO;AAChB;AAgLO,SAAS,oBAAiC;AAC/C,QAAM,OAAO,oBAAI,IAA6C;AAE9D,SAAO;AAAA,IACL,IAAqB,KAA2C;AAC9D,WAAK,IAAI,IAAI,MAAM,GAAsC;AAAA,IAC3D;AAAA,IAEA,IAAI,MAA2D;AAC7D,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,IAEA,IAAI,MAAuB;AACzB,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,EACF;AACF;AAKO,SAAS,gBAMd,QACA,SACA,cACA,UACA,cAC4C;AAE5C,QAAM,cAAc,SAAS,IAAI,OAAO,IAAI;AAC5C,MAAI,aAAa;AAEf,QAAI,YAAY,WAAW,QAAQ;AACjC,aAAO,YAAY;AAAA,IACrB;AAEA,UAAM,IAAI;AAAA,MACR,QAAQ,OAAO,IAAI;AAAA,IACrB;AAAA,EACF;AAEA,QAAM,cAAc,OAAO;AAC3B,QAAM,eAAe,OAAO;AAE5B,QAAM,SAAqD;AAAA,IACzD,MAAM,OAAO;AAAA,IAEb,MAAM,QACJ,OACA,SACqC;AAErC,YAAM,iBAAiB,wBAAwB,aAAa,KAAK;AAGjE,UAAI,gBAAgB,SAAS,QAAQ;AACnC,gCAAwB,cAAc,QAAQ,QAAQ,QAAQ;AAAA,MAChE;AAGA,YAAM,MAAM,MAAM,QAAQ,UAAU;AAAA,QAClC,SAAS,OAAO;AAAA,QAChB,OAAO;AAAA,QACP,gBAAgB,SAAS;AAAA,QACzB,gBAAgB,SAAS;AAAA,QACzB,QAAQ,SAAS;AAAA,MACnB,CAAC;AAGD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX,SAAS,OAAO;AAAA,QAChB,OAAO;AAAA,QACP,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,eACJ,OACA,SACwC;AAExC,YAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,OAAO;AAG7C,aAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAI;AACJ,YAAI,WAAW;AAEf,cAAM,eAA+B,CAAC;AAEtC,cAAM,UAAU,MAAM;AACpB,cAAI,SAAU;AACd,qBAAW;AACX,qBAAW,SAAS,aAAc,OAAM;AACxC,cAAI,WAAW;AACb,yBAAa,SAAS;AAAA,UACxB;AAAA,QACF;AAEA,qBAAa;AAAA,UACX,aAAa,GAAG,gBAAgB,CAAC,UAAU;AACzC,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,sBAAQ;AAAA,gBACN,IAAI,IAAI;AAAA,gBACR,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAEA,qBAAa;AAAA,UACX,aAAa,GAAG,YAAY,CAAC,UAAU;AACrC,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,qBAAO,IAAI,MAAM,MAAM,KAAK,CAAC;AAAA,YAC/B;AAAA,UACF,CAAC;AAAA,QACH;AAEA,YAAI,SAAS,YAAY;AACvB,gBAAM,aAAa,QAAQ;AAC3B,uBAAa;AAAA,YACX,aAAa,GAAG,gBAAgB,CAAC,UAAU;AACzC,kBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,qBAAK,QAAQ,QAAQ,WAAW,MAAM,QAAQ,CAAC,EAAE,MAAM,IAAI;AAAA,cAC7D;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAEA,YAAI,SAAS,OAAO;AAClB,gBAAM,QAAQ,QAAQ;AACtB,uBAAa;AAAA,YACX,aAAa,GAAG,aAAa,CAAC,UAAU;AACtC,kBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAM,EAAE,OAAO,SAAS,MAAM,SAAS,IAAI;AAC3C,qBAAK,QAAQ;AAAA,kBACX,MAAM,EAAE,OAAO,SAAS,MAAM,SAAS,CAAC;AAAA,gBAC1C,EAAE,MAAM,IAAI;AAAA,cACd;AAAA,YACF,CAAC;AAAA,UACH;AAAA,QACF;AAIA,gBACG,OAAO,IAAI,EAAE,EACb,KAAK,CAAC,eAAe;AACpB,cAAI,YAAY,CAAC,WAAY;AAC7B,cAAI,WAAW,WAAW,aAAa;AACrC,oBAAQ;AACR,oBAAQ;AAAA,cACN,IAAI,IAAI;AAAA,cACR,QAAQ,WAAW;AAAA,YACrB,CAAC;AAAA,UACH,WAAW,WAAW,WAAW,UAAU;AACzC,oBAAQ;AACR,mBAAO,IAAI,MAAM,WAAW,SAAS,YAAY,CAAC;AAAA,UACpD;AAAA,QACF,CAAC,EACA,MAAM,CAAC,UAAU;AAChB,cAAI,SAAU;AACd,kBAAQ;AACR,iBAAO,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC,CAAC;AAAA,QAClE,CAAC;AAGH,YAAI,SAAS,YAAY,QAAW;AAClC,sBAAY,WAAW,MAAM;AAC3B,gBAAI,CAAC,UAAU;AACb,sBAAQ;AACR;AAAA,gBACE,IAAI,MAAM,gCAAgC,QAAQ,OAAO,IAAI;AAAA,cAC/D;AAAA,YACF;AAAA,UACF,GAAG,QAAQ,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,aACJ,QACuC;AACvC,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,YAAM,aAAa,OAAO,IAAI,CAAC,SAAS;AACtC,YAAI,QAAQ,OAAO,SAAS,YAAY,WAAW,MAAM;AACvD,iBAAO;AAAA,QACT;AACA,eAAO,EAAE,OAAO,MAAgB,SAAS,OAAU;AAAA,MACrD,CAAC;AAGD,YAAM,YAGA,CAAC;AACP,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,iBAAiB;AAAA,UACrB;AAAA,UACA,WAAW,CAAC,EAAE;AAAA,UACd,YAAY,CAAC;AAAA,QACf;AACA,YAAI,gBAAgB,WAAW,CAAC,EAAE,SAAS,QAAQ;AACjD;AAAA,YACE;AAAA,YACA,WAAW,CAAC,EAAE,SAAS;AAAA,YACvB,mBAAmB,CAAC;AAAA,UACtB;AAAA,QACF;AACA,kBAAU,KAAK;AAAA,UACb,OAAO;AAAA,UACP,SAAS,WAAW,CAAC,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAGA,YAAM,OAAO,MAAM,QAAQ;AAAA,QACzB,UAAU,IAAI,CAAC,OAAO;AAAA,UACpB,SAAS,OAAO;AAAA,UAChB,OAAO,EAAE;AAAA,UACT,gBAAgB,EAAE,SAAS;AAAA,UAC3B,gBAAgB,EAAE,SAAS;AAAA,UAC3B,QAAQ,EAAE,SAAS;AAAA,QACrB,EAAE;AAAA,MACJ;AAGA,eAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,KAAK,CAAC,EAAE;AAAA,UACf,SAAS,OAAO;AAAA,UAChB,OAAO,UAAU,CAAC,EAAE;AAAA,UACpB,QAAQ,KAAK,CAAC,EAAE;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,OAAO,IAAwD;AACnE,YAAM,MAAM,MAAM,QAAQ,OAAO,EAAE;AACnC,UAAI,CAAC,OAAO,IAAI,YAAY,OAAO,MAAM;AACvC,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QACJ,QACuC;AACvC,YAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACjC,GAAG;AAAA,QACH,SAAS,OAAO;AAAA,MAClB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AAGA,WAAS,IAAI;AAAA,IACX,MAAM,OAAO;AAAA,IACb;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI,OAAO;AAAA,IACX;AAAA,IACA;AAAA,EACF,CAAC;AAED,SAAO;AACT;;;ACvdA,IAAM,aAA0B;AAAA,EAC9B;AAAA,IACE,SAAS;AAAA,IACT,IAAI,OAAO,OAAO;AAEhB,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,YAAY,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,mBAAmB,MAAM,EACnC,UAAU,mBAAmB,MAAM,EACnC,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,EAAE,UAAU,IAAI,CAAC,EAClE;AAAA,QAAU;AAAA,QAAsB;AAAA,QAAW,CAAC,QAC3C,IAAI,QAAQ,EAAE,UAAU,CAAC;AAAA,MAC3B,EACC,UAAU,YAAY,MAAM,EAC5B,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,gBAAgB,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACxD,UAAU,cAAc,MAAM,EAC9B,UAAU,gBAAgB,MAAM,EAChC,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,kCAAkC,EAC9C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,YAAY,iBAAiB,CAAC,EACvC,OAAO,EACP,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,qCAAqC,EACjD,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,iBAAiB,CAAC,EACrC,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,iCAAiC,EAC7C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,eAAe,EAC3B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,QAAQ,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAChD,UAAU,SAAS,WAAW,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,gBAAgB,MAAM,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,6BAA6B,EACzC,YAAY,EACZ,GAAG,eAAe,EAClB,QAAQ,CAAC,UAAU,OAAO,CAAC,EAC3B,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,aAAa,MAAM,EAC7B,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,QAAQ,MAAM,EACxB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,8BAA8B,EAC1C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,yBAAyB,EACrC,YAAY,EACZ,UAAU,WAAW,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,EACzD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAAA,IACb;AAAA,EACF;AACF;AAKA,eAAe,kBAAkB,IAAuC;AACtE,MAAI;AACF,UAAM,SAAS,MAAM,GAClB,WAAW,yBAAyB,EACpC,OAAO,SAAS,EAChB,QAAQ,WAAW,MAAM,EACzB,MAAM,CAAC,EACP,iBAAiB;AAEpB,WAAO,QAAQ,WAAW;AAAA,EAC5B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cAAc,IAAqC;AACvE,QAAM,iBAAiB,MAAM,kBAAkB,EAAE;AAEjD,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,UAAU,gBAAgB;AACtC,YAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AAC5C,cAAM,UAAU,GAAG,GAAG;AAEtB,cAAM,IACH,WAAW,yBAAyB,EACpC,OAAO;AAAA,UACN,SAAS,UAAU;AAAA,UACnB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,QACrC,CAAC,EACA,QAAQ;AAAA,MACb,CAAC;AAAA,IACH;AAAA,EACF;AACF;;;AC/JA,SAAsB,WAAW;AACjC,SAAS,wBAAwB;AAGjC,IAAM,OAAO,iBAAiB;AAyIvB,SAAS,YAEd,KAAuC;AACvC,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG;AAAA,EACL,IAAI;AACJ,SAAO;AACT;AAgCA,IAAM,oBAAoB;AAE1B,SAAS,eAAe,QAAkD;AACxE,MAAI,CAAC,OAAQ;AACb,aAAW,OAAO,OAAO,KAAK,MAAM,GAAG;AACrC,QAAI,CAAC,kBAAkB,KAAK,GAAG,GAAG;AAChC,YAAM,IAAI;AAAA,QACR,sBAAsB,GAAG;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AACF;AAEA,SAAS,SACP,KACK;AACL,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,SAAS,IAAI;AAAA,IACb,OAAO,KAAK,MAAM,IAAI,KAAK;AAAA,IAC3B,QAAQ,IAAI;AAAA,IACZ,gBAAgB,IAAI;AAAA,IACpB,gBAAgB,IAAI;AAAA,IACpB,kBAAkB,IAAI;AAAA,IACtB,WAAW,OAAO,IAAI,cAAc,CAAC;AAAA,IACrC,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,IACpD,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,QAAQ,KAAK,MAAM,IAAI,MAAM;AAAA,IAC7B,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,EACjB;AACF;AAKA,SAAS,UAAU,KAAsC;AACvD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAM,IAAI;AAAA,IACV,OAAO,IAAI;AAAA,IACX,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,EACnB;AACF;AAKA,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,UAAU,IAAI;AAAA,IACd,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,IAAI,IAAI;AAAA,IACxC,WAAW,IAAI;AAAA,EACjB;AACF;AAKO,SAAS,oBAAoB,IAA+B;AACjE,SAAO;AAAA,IACL,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAGnC,UAAI,MAAM,gBAAgB;AACxB,cAAM,WAAW,MAAM,GACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,YAAI,UAAU;AACZ,iBAAO,SAAS,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,qBAAe,MAAM,MAAM;AAE3B,YAAM,KAAK,KAAK;AAChB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,UAAU,MAAM;AAAA,QAChB,OAAO,KAAK,UAAU,MAAM,KAAK;AAAA,QACjC,QAAQ;AAAA,QACR,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,oBAAoB;AAAA,QACpB,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,QAAQ,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,QACzC,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,gBAAgB,QAA0C;AAC9D,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,aAAO,MAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AACnD,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAmC,CAAC;AAG1C,mBAAW,SAAS,QAAQ;AAC1B,yBAAe,MAAM,MAAM;AAAA,QAC7B;AAGA,mBAAW,SAAS,QAAQ;AAE1B,cAAI,MAAM,gBAAgB;AACxB,kBAAM,WAAW,MAAM,IACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,gBAAI,UAAU;AACZ,mBAAK,KAAK,QAAQ;AAClB;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK,KAAK;AAChB,eAAK,KAAK;AAAA,YACR;AAAA,YACA,UAAU,MAAM;AAAA,YAChB,OAAO,KAAK,UAAU,MAAM,KAAK;AAAA,YACjC,QAAQ;AAAA,YACR,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,oBAAoB;AAAA,YACpB,UAAU;AAAA,YACV,QAAQ;AAAA,YACR,OAAO;AAAA,YACP,QAAQ,KAAK,UAAU,MAAM,UAAU,CAAC,CAAC;AAAA,YACzC,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,cAAM,UAAU,KAAK,OAAO,CAAC,MAAM,EAAE,eAAe,GAAG;AACvD,YAAI,QAAQ,SAAS,GAAG;AACtB,gBAAM,IAAI,WAAW,cAAc,EAAE,OAAO,OAAO,EAAE,QAAQ;AAAA,QAC/D;AAEA,eAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAAe,MAAqC;AAClE,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,UAA6C;AAAA,QACjD,YAAY;AAAA,MACd;AAEA,UAAI,KAAK,WAAW,OAAW,SAAQ,SAAS,KAAK;AACrD,UAAI,KAAK,qBAAqB;AAC5B,gBAAQ,qBAAqB,KAAK;AACpC,UAAI,KAAK,aAAa;AACpB,gBAAQ,WAAW,KAAK,WAAW,KAAK,UAAU,KAAK,QAAQ,IAAI;AACrE,UAAI,KAAK,WAAW;AAClB,gBAAQ,SAAS,KAAK,UAAU,KAAK,MAAM;AAC7C,UAAI,KAAK,UAAU,OAAW,SAAQ,QAAQ,KAAK;AACnD,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAC9B,UAAI,KAAK,cAAc,OAAW,SAAQ,aAAa,KAAK;AAC5D,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAE9B,YAAM,GACH,YAAY,cAAc,EAC1B,IAAI,OAAO,EACX,MAAM,MAAM,KAAK,KAAK,EACtB,QAAQ;AAAA,IACb;AAAA,IAEA,MAAM,UAAU,OAA8B;AAE5C,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACxE,YAAM,GAAG,WAAW,eAAe,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACzE,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,MAAM,KAAK,KAAK,EAAE,QAAQ;AAAA,IACtE;AAAA,IAEA,MAAM,OAA4B,OAAkC;AAClE,YAAM,MAAM,MAAM,GACf,WAAW,cAAc,EACzB,SAAS,iBAAiB,mBAAmB,sBAAsB,EACnE,UAAU,cAAc,EACxB;AAAA,QAAO,CAAC,OACP,GAAG,GAAG,MAAc,kBAAkB,EAAE,GAAG,YAAY;AAAA,MACzD,EACC,MAAM,mBAAmB,KAAK,KAAK,EACnC,QAAQ,iBAAiB,EACzB,iBAAiB;AAEpB,aAAO,MAAO,SAAS,GAAG,IAAU;AAAA,IACtC;AAAA,IAEA,MAAM,QAA6B,QAAkC;AACnE,UAAI,QAAQ,GACT,WAAW,cAAc,EACzB,SAAS,iBAAiB,mBAAmB,sBAAsB,EACnE,UAAU,cAAc,EACxB;AAAA,QAAO,CAAC,OACP,GAAG,GAAG,MAAc,kBAAkB,EAAE,GAAG,YAAY;AAAA,MACzD,EACC,QAAQ,iBAAiB;AAE5B,UAAI,QAAQ,QAAQ;AAClB,gBAAQ,MAAM,MAAM,uBAAuB,KAAK,OAAO,MAAM;AAAA,MAC/D;AACA,UAAI,QAAQ,SAAS;AACnB,YAAI,MAAM,QAAQ,OAAO,OAAO,GAAG;AACjC,cAAI,OAAO,QAAQ,SAAS,GAAG;AAC7B,oBAAQ,MAAM,MAAM,yBAAyB,MAAM,OAAO,OAAO;AAAA,UACnE;AAAA,QACF,OAAO;AACL,kBAAQ,MAAM,MAAM,yBAAyB,KAAK,OAAO,OAAO;AAAA,QAClE;AAAA,MACF;AACA,UAAI,QAAQ,QAAQ;AAClB,cAAM,SAAS,OAAO;AACtB,uBAAe,MAAM;AACrB,mBAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,cAAI,UAAU,OAAW;AACzB,kBAAQ,MAAM;AAAA,YACZ,wCAAwC,MAAM,GAAG,GAAG;AAAA,YACpD;AAAA,YACA;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,cAAQ,MAAM,QAAQ,2BAA2B,MAAM;AAEvD,UAAI,QAAQ,UAAU,QAAW;AAC/B,gBAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,MAClC;AACA,UAAI,QAAQ,WAAW,QAAW;AAEhC,YAAI,OAAO,UAAU,QAAW;AAC9B,kBAAQ,MAAM,MAAM,EAAE;AAAA,QACxB;AACA,gBAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,IAEA,MAAM,oBACJ,wBACqB;AACrB,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,UAAI,WAAW,GACZ,WAAW,cAAc,EACzB,OAAO,IAAI,EACX,MAAM,UAAU,KAAK,SAAS,EAC9B,QAAQ,cAAc,KAAK,EAC3B,QAAQ,MAAM,KAAK,EACnB,MAAM,CAAC;AAEV,UAAI,uBAAuB,SAAS,GAAG;AACrC,mBAAW,SAAS;AAAA,UAAM,CAAC,OACzB,GAAG,GAAG;AAAA,YACJ,GAAG,mBAAmB,MAAM,IAAI;AAAA,YAChC,GAAG,mBAAmB,UAAU,sBAAsB;AAAA,UACxD,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,MAAM,MAAM,GACf,YAAY,cAAc,EAC1B,IAAI;AAAA,QACH,QAAQ;AAAA,QACR,cAAc;AAAA,QACd,YAAY,2BAA2B,GAAG;AAAA,QAC1C,YAAY;AAAA,MACd,CAAC,EACA;AAAA,QAAM;AAAA,QAAM;AAAA,QAAK,CAAC,OACjB,GAAG,WAAW,SAAS,GAAG,KAAK,CAAC,EAAE,OAAO,IAAI;AAAA,MAC/C,EACC,aAAa,EACb,iBAAiB;AAEpB,UAAI,CAAC,IAAK,QAAO;AACjB,aAAO,SAAS,EAAE,GAAG,KAAK,YAAY,EAAE,CAAC;AAAA,IAC3C;AAAA,IAEA,MAAM,WAAW,OAAuC;AACtD,YAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,YAAM,KAAK,KAAK;AAEhB,YAAM,OAAkC;AAAA,QACtC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd,QACE,MAAM,WAAW,SAAY,KAAK,UAAU,MAAM,MAAM,IAAI;AAAA,QAC9D,OAAO,MAAM,SAAS;AAAA,QACtB,YAAY,MAAM;AAAA,QAClB,cAAc;AAAA,MAChB;AAEA,YAAM,GAAG,WAAW,eAAe,EAAE,OAAO,IAAI,EAAE,QAAQ;AAE1D,aAAO,UAAU,IAAI;AAAA,IACvB;AAAA,IAEA,MAAM,YAAY,OAA8B;AAC9C,YAAM,GAAG,WAAW,eAAe,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AAAA,IAC3E;AAAA,IAEA,MAAM,SAAS,OAAgC;AAC7C,YAAM,OAAO,MAAM,GAChB,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,SAAS,KAAK,EACtB,QAAQ;AAEX,aAAO,KAAK,IAAI,SAAS;AAAA,IAC3B;AAAA,IAEA,MAAM,iBAAiB,OAAe,MAAoC;AACxE,YAAM,MAAM,MAAM,GACf,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,MAAM,QAAQ,KAAK,IAAI,EACvB,MAAM,UAAU,KAAK,WAAW,EAChC,iBAAiB;AAEpB,aAAO,MAAM,UAAU,GAAG,IAAI;AAAA,IAChC;AAAA,IAEA,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,KAAK,KAAK;AAEhB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,SAAS,SAAY,KAAK,UAAU,MAAM,IAAI,IAAI;AAAA,QAC9D,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,QAAQ,OAA+B;AAC3C,YAAM,OAAO,MAAM,GAChB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,cAAc,KAAK,EAC3B,QAAQ;AAEX,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;;;ACzkBA,SAAS,iBAAAA,sBAAqB;;;ACKvB,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YAAY,OAAe;AACzB,UAAM,sBAAsB,KAAK,EAAE;AACnC,SAAK,OAAO;AAAA,EACd;AACF;AAKO,SAAS,gBAAgB,OAAwB;AACtD,SAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC9D;;;ACTO,SAAS,kBACd,KACA,SACA,SACA,cAC4C;AAC5C,MAAI,YAAY,IAAI;AACpB,MAAI,kBAAiC;AAErC,QAAM,aAAa,IAAI,gBAAgB;AAEvC,QAAM,cAAc,aAAa,GAAG,cAAc,CAAC,UAAU;AAC3D,QAAI,MAAM,UAAU,IAAI,IAAI;AAC1B,iBAAW,MAAM;AAAA,IACnB;AAAA,EACF,CAAC;AAED,QAAM,OAAoB;AAAA,IACxB,IAAI,QAAgB;AAClB,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,MAAM,IACJ,MACA,IACY;AAEZ,UAAI,WAAW,OAAO,SAAS;AAC7B,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAIA,YAAM,aAAa,MAAM,QAAQ,OAAO,IAAI,EAAE;AAC9C,UAAI,YAAY,WAAW,aAAa;AACtC,mBAAW,MAAM;AACjB,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,UAAI,WAAW,OAAO,SAAS;AAC7B,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,YAAM,eAAe,MAAM,QAAQ,iBAAiB,IAAI,IAAI,IAAI;AAChE,UAAI,cAAc;AAChB;AACA,eAAO,aAAa;AAAA,MACtB;AAGA,wBAAkB;AAGlB,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY,KAAK,IAAI;AAG3B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV;AAAA,QACA,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,UAAI;AAEF,cAAM,SAAS,MAAM,GAAG,WAAW,MAAM;AAGzC,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR;AAAA,QACF,CAAC;AAGD;AACA,cAAM,QAAQ,UAAU,IAAI,IAAI,EAAE,kBAAkB,UAAU,CAAC;AAG/D,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV,WAAW,YAAY;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,UACvB,QAAQ,IAAI;AAAA,QACd,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,cAAM,cAAc,WAAW,OAAO;AACtC,cAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAEvD,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ,cAAc,cAAc;AAAA,UACpC,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAED,qBAAa,KAAK;AAAA,UAChB,GAAI,cACA,EAAE,MAAM,cAAuB,IAC/B,EAAE,MAAM,aAAsB,OAAO,aAAa;AAAA,UACtD,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA,QAAQ,IAAI;AAAA,QACd,CAAC;AAED,YAAI,aAAa;AACf,gBAAM,IAAI,eAAe,IAAI,EAAE;AAAA,QACjC;AACA,cAAM;AAAA,MACR,UAAE;AAEA,0BAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IAEA,SAAS,SAAiB,OAAgB,SAAwB;AAChE,YAAM,eAAe,EAAE,SAAS,OAAO,QAAQ;AAE/C,cAAQ,UAAU,IAAI,IAAI,EAAE,UAAU,aAAa,CAAC;AAEpD,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,KAAK;AAAA,MACH,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,SAAiB,MAAsB;AAC3C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,QAAQ,IAAI;AAAA,UACZ,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,MAAM,SAAS,YAAY;AACtC;;;AF/JO,SAAS,aACd,QACA,SACA,cACA,aACQ;AACR,MAAI,UAAU;AACd,MAAI,oBAA0C;AAC9C,MAAI,iBAAuD;AAC3D,MAAI,eAAoC;AACxC,MAAI,oBAA2D;AAC/D,MAAI,eAA8B;AAKlC,iBAAe,mBAAkC;AAC/C,UAAM,iBAAiB,IAAI;AAAA,MACzB,KAAK,IAAI,IAAI,OAAO;AAAA,IACtB,EAAE,YAAY;AACd,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAE/D,eAAW,OAAO,aAAa;AAC7B,UAAI,IAAI,cAAc,gBAAgB;AAEpC,cAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,UAC9B,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBAAiC;AAC9C,QAAI,cAAc;AAChB,YAAM,QAAQ,UAAU,cAAc;AAAA,QACpC,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAKA,iBAAe,iBACb,OACA,SACA,QACA,WACe;AAEf,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,CAAC,cAAc,WAAW,WAAW,aAAa;AACpD;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR;AAAA,MACA,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,KAAK,IAAI,IAAI;AAAA,MACvB,QAAQ,WAAW;AAAA,IACrB,CAAC;AAAA,EACH;AAKA,iBAAe,iBACb,OACA,SACA,OACe;AAGf,QAAI,iBAAiB,gBAAgB;AACnC;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,CAAC,cAAc,WAAW,WAAW,aAAa;AACpD;AAAA,IACF;AAEA,UAAM,eAAe,gBAAgB,KAAK;AAG1C,UAAM,QAAQ,MAAM,QAAQ,SAAS,KAAK;AAC1C,UAAM,aAAa,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,QAAQ;AAE1D,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,gBAAgB,YAAY,QAAQ;AAAA,MACpC,QAAQ,WAAW;AAAA,IACrB,CAAC;AAAA,EACH;AAKA,iBAAe,WACb,KACA,KACe;AAEf,mBAAe,IAAI;AAInB,wBAAoB,YAAY,MAAM;AACpC,sBAAgB,EAAE,MAAM,CAAC,UAAU;AACjC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,gBAAgB,KAAK;AAAA,UAC5B,SAAS;AAAA,UACT,OAAO,IAAI;AAAA,QACb,CAAC;AAAA,MACH,CAAC;AAAA,IACH,GAAG,OAAO,iBAAiB;AAG3B,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,OAAO,IAAI;AAAA,MACX,SAAS,IAAI;AAAA,MACb,OAAO,IAAI;AAAA,MACX,QAAQ,IAAI;AAAA,IACd,CAAC;AAED,UAAM,YAAY,KAAK,IAAI;AAE3B,UAAM,EAAE,MAAM,QAAQ,IAAI;AAAA,MACxB;AAAA,MACA,IAAI;AAAA,MACJ;AAAA,MACA;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,SAAS,MAAM,IAAI,GAAG,MAAM,IAAI,KAAK;AAG3C,UAAI,IAAI,cAAc;AACpB,cAAM,cAAc,IAAI,aAAa,UAAU,MAAM;AACrD,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI,MAAM,mBAAmBC,eAAc,YAAY,KAAK,CAAC,EAAE;AAAA,QACvE;AAAA,MACF;AAEA,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,QAAQ,SAAS;AAAA,IAC/D,SAAS,OAAO;AACd,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,KAAK;AAAA,IACnD,UAAE;AACA,UAAI,OAAO,cAAc;AACvB,YAAI;AACF,gBAAM,QAAQ,YAAY,IAAI,EAAE;AAAA,QAClC,QAAQ;AAAA,QAER;AAAA,MACF;AAEA,cAAQ;AAER,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AACA,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,iBAAe,iBAAmC;AAEhD,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAC/D,UAAM,yBAAyB,YAC5B;AAAA,MACC,CAAC,MACC,EAAE,mBAAmB;AAAA,IACzB,EACC,IAAI,CAAC,MAAM,EAAE,cAAc;AAG9B,UAAM,MAAM,MAAM,QAAQ,oBAAoB,sBAAsB;AACpE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,YAAY,IAAI,IAAI,OAAO;AACvC,QAAI,CAAC,KAAK;AAER,YAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,QAC9B,QAAQ;AAAA,QACR,OAAO,gBAAgB,IAAI,OAAO;AAAA,MACpC,CAAC;AACD,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,KAAK,GAAG;AAEzB,WAAO;AAAA,EACT;AAEA,iBAAe,OAAsB;AACnC,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,YAAY;AAEzB,YAAM,iBAAiB;AACvB,YAAM,eAAe;AAAA,IACvB;AAEA,QAAI;AACF,0BAAoB,OAAO;AAC3B,YAAM;AAAA,IACR,UAAE;AACA,0BAAoB;AAAA,IACtB;AAEA,QAAI,SAAS;AACX,uBAAiB,WAAW,MAAM,KAAK,GAAG,OAAO,eAAe;AAAA,IAClE,WAAW,cAAc;AACvB,mBAAa;AACb,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,YAAqB;AACvB,aAAO;AAAA,IACT;AAAA,IAEA,QAAc;AACZ,UAAI,SAAS;AACX;AAAA,MACF;AACA,gBAAU;AACV,WAAK;AAAA,IACP;AAAA,IAEA,MAAM,OAAsB;AAC1B,UAAI,CAAC,SAAS;AACZ;AAAA,MACF;AAEA,gBAAU;AAEV,UAAI,gBAAgB;AAClB,qBAAa,cAAc;AAC3B,yBAAiB;AAAA,MACnB;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AAEA,UAAI,mBAAmB;AAErB,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,yBAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;ALnQA,IAAM,WAAW;AAAA,EACf,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,gBAAgB;AAAA,EAChB,cAAc;AAChB;AA2MA,SAAS,sBAMP,OAAqB,MAAsC;AAC3D,QAAM,EAAE,IAAI,SAAS,cAAc,aAAa,OAAO,IAAI;AAE3D,iBAAe,cAAc,OAAsC;AACjE,UAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AACA,WAAO;AAAA,EACT;AAEA,QAAM,UAAmC;AAAA,IACvC;AAAA,IACA;AAAA,IACA;AAAA,IACA,IAAI,aAAa;AAAA,IACjB,MAAM,aAAa;AAAA,IACnB,SAAS,aAAa;AAAA,IACtB,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA;AAAA,IAGb,SACE,SACiE;AACjE,YAAM,aAAa,CAAC;AAEpB,iBAAW,OAAO,OAAO,KAAK,OAAO,GAAyB;AAC5D,cAAM,SAAS,QAAQ,GAAG;AAC1B,cAAM,SAAS;AAAA,UACb;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA,MAAM;AAAA,QACR;AACA,mBAAW,GAAG,IAAI;AAAA,MAIpB;AAGA,YAAM,aAAa,EAAE,GAAG,MAAM,GAAG,WAAW;AAE5C,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IAEA,QAAQ,QAAQ;AAAA,IAChB,SAAS,QAAQ;AAAA,IAEjB,IAAI,QAA6B;AAC/B,aAAO,QAAQ,OAAO;AAAA,IACxB;AAAA,IAEA,OACE,MACyE;AACzE,YAAM,gBAAgB,YAAY,IAAI,IAAI;AAC1C,UAAI,CAAC,eAAe;AAClB,eAAO;AAAA,MACT;AACA,aAAO,cAAc;AAAA,IAMvB;AAAA,IAEA,UAAU,OAA6C;AAErD,UAAI,SAAS;AACb,UAAI,UAA+B;AAGnC,YAAM,cAAc,oBAAI,IAAe,CAAC,gBAAgB,YAAY,CAAC;AAErE,YAAM,mBAAgC;AAAA,QACpC;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAEA,aAAO,IAAI,eAA6B;AAAA,QACtC,OAAO,CAAC,eAAe;AACrB,gBAAM,eAAe,iBAAiB;AAAA,YAAI,CAAC,SACzC,aAAa,GAAG,MAAM,CAAC,UAAU;AAC/B,kBAAI,UAAU,MAAM,UAAU,MAAO;AACrC,yBAAW,QAAQ,KAAK;AACxB,kBAAI,YAAY,IAAI,IAAI,GAAG;AACzB,yBAAS;AACT,0BAAU;AACV,2BAAW,MAAM;AAAA,cACnB;AAAA,YACF,CAAC;AAAA,UACH;AAEA,oBAAU,MAAM;AACd,uBAAW,SAAS,aAAc,OAAM;AAAA,UAC1C;AAAA,QACF;AAAA,QACA,QAAQ,MAAM;AAEZ,cAAI,CAAC,QAAQ;AACX,qBAAS;AACT,sBAAU;AAAA,UACZ;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAAsC;AACpD,YAAM,MAAM,MAAM,cAAc,KAAK;AACrC,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,MAC1D;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,iCAAiC,KAAK,EAAE;AAAA,MAC1D;AACA,UAAI,CAAC,YAAY,IAAI,IAAI,OAAO,GAAG;AACjC,cAAM,IAAI,MAAM,gBAAgB,IAAI,OAAO,EAAE;AAAA,MAC/C;AAEA,YAAM,UAAU,MAAM,QAAQ,UAAU;AAAA,QACtC,SAAS,IAAI;AAAA,QACb,OAAO,IAAI;AAAA,QACX,gBAAgB,IAAI,kBAAkB;AAAA,QACtC,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,QAAQ;AAAA,QACf,SAAS,IAAI;AAAA,QACb,OAAO,IAAI;AAAA,QACX,QAAQ,IAAI;AAAA,MACd,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,OAAO,OAA8B;AACzC,YAAM,MAAM,MAAM,cAAc,KAAK;AACrC,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AACA,UAAI,IAAI,WAAW,UAAU;AAC3B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,wCAAwC,KAAK,EAAE;AAAA,MACjE;AACA,YAAM,aAAa,IAAI,WAAW;AAClC,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,QACR,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC,CAAC;AAGD,UAAI,cAAc,MAAM,cAAc;AACpC,cAAM,QAAQ,YAAY,KAAK;AAAA,MACjC;AAGA,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAA8B;AAC5C,YAAM,MAAM,MAAM,cAAc,KAAK;AACrC,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,YAAM,QAAQ,UAAU,KAAK;AAG7B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN;AAAA,QACA,SAAS,IAAI;AAAA,QACb,QAAQ,IAAI;AAAA,MACd,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAyB;AAC7B,UAAI,MAAM,UAAU;AAClB;AAAA,MACF;AAEA,UAAI,MAAM,WAAW;AACnB,eAAO,MAAM;AAAA,MACf;AAEA,YAAM,YAAY,cAAc,EAAE,EAC/B,KAAK,MAAM;AACV,cAAM,WAAW;AAAA,MACnB,CAAC,EACA,QAAQ,MAAM;AACb,cAAM,YAAY;AAAA,MACpB,CAAC;AAEH,aAAO,MAAM;AAAA,IACf;AAAA,IAEA,MAAM,OAAsB;AAC1B,YAAM,KAAK,QAAQ;AACnB,WAAK,MAAM;AAAA,IACb;AAAA,EACF;AAEA,SAAO;AACT;AAuBO,SAAS,cAQd,SAG0C;AAC1C,QAAM,SAAS;AAAA,IACb,iBAAiB,QAAQ,mBAAmB,SAAS;AAAA,IACrD,mBAAmB,QAAQ,qBAAqB,SAAS;AAAA,IACzD,gBAAgB,QAAQ,kBAAkB,SAAS;AAAA,IACnD,cAAc,QAAQ,gBAAgB,SAAS;AAAA,EACjD;AAEA,QAAM,KAAK,IAAI,OAAiB,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC5D,QAAM,UAAU,oBAAoB,EAAE;AACtC,QAAM,eAAe,mBAAmB;AACxC,QAAM,cAAc,kBAAkB;AACtC,QAAM,SAAS,aAAa,QAAQ,SAAS,cAAc,WAAW;AAEtE,QAAM,QAAsB;AAAA,IAC1B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAc,QAAQ;AAAA,IACtB,cAAc,OAAO;AAAA,IACrB,WAAW;AAAA,IACX,UAAU;AAAA,EACZ;AAEA,QAAM,WAAW;AAAA,IACf;AAAA,IACA,CAAC;AAAA,EACH;AAEA,MAAI,QAAQ,MAAM;AAChB,WAAO,SAAS,SAAS,QAAQ,IAAI;AAAA,EACvC;AAEA,SAAO;AACT;;;AQvfO,SAAS,UAKd,QAKA;AACA,SAAO;AAAA,IACL,MAAM,OAAO;AAAA,IACb,OAAO,OAAO;AAAA,IACd,QAAQ,OAAO;AAAA,IACf,KAAK,OAAO;AAAA,EACd;AAKF;;;ACvFA,IAAM,eAAe;AAAA,EACnB,gBAAgB;AAClB;AAKO,SAAS,aAAa,MAAe,SAAS,KAAe;AAClE,SAAO,IAAI,SAAS,KAAK,UAAU,IAAI,GAAG;AAAA,IACxC;AAAA,IACA,SAAS;AAAA,EACX,CAAC;AACH;AAKO,SAAS,cACd,SACA,SAA0B,KAChB;AACV,SAAO,aAAa,EAAE,OAAO,QAAQ,GAAG,MAAM;AAChD;AAKO,SAAS,kBAA4B;AAC1C,SAAO,aAAa,EAAE,SAAS,KAAK,CAAC;AACvC;AAKO,SAAS,sBACd,KACA,WACmB;AACnB,QAAM,QAAQ,IAAI,aAAa,IAAI,SAAS;AAC5C,MAAI,CAAC,OAAO;AACV,WAAO,cAAc,GAAG,SAAS,gCAAgC,GAAG;AAAA,EACtE;AACA,SAAO;AACT;;;AC7CA,IAAM,cAAc;AAAA,EAClB,gBAAgB;AAAA,EAChB,iBAAiB;AAAA,EACjB,YAAY;AACd;AAKA,SAAS,UAAU,MAAuB;AACxC,SAAO,SAAS,KAAK,UAAU,IAAI,CAAC;AAAA;AAAA;AACtC;AAKA,SAAS,mBAAgC;AACvC,SAAO,IAAI,YAAY;AACzB;AAKA,SAAS,UAAU,SAAsB,MAA2B;AAClE,SAAO,QAAQ,OAAO,UAAU,IAAI,CAAC;AACvC;AAKO,SAAS,kBAAkB,QAAkC;AAClE,SAAO,IAAI,SAAS,QAAQ;AAAA,IAC1B,QAAQ;AAAA,IACR,SAAS;AAAA,EACX,CAAC;AACH;AAKO,SAAS,0BACd,QAC4B;AAC5B,QAAM,UAAU,iBAAiB;AAEjC,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,MAAM,YAAY;AACtB,UAAI;AACF,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,uBAAW,MAAM;AACjB;AAAA,UACF;AAEA,qBAAW,QAAQ,UAAU,SAAS,KAAK,CAAC;AAAA,QAC9C;AAAA,MACF,SAAS,OAAO;AACd,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,aAAO,YAAY;AAAA,IACrB;AAAA,EACF,CAAC;AACH;AAMO,SAAS,mCACd,QACA,YAC4B;AAC5B,MAAI,cAAc,GAAG;AACnB,WAAO,0BAA0B,MAAM;AAAA,EACzC;AAEA,QAAM,UAAU,iBAAiB;AACjC,MAAI,SAAS;AACb,MAAI,WAGO;AAEX,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,MAAM,YAAY;AACtB,YAAM,YAAiC;AAAA,QACrC,SAAS,CAAC,SACR,WAAW,QAAQ,UAAU,SAAS,IAAI,CAAC;AAAA,QAC7C,OAAO,MAAM;AACX,mBAAS;AACT,qBAAW,MAAM;AAAA,QACnB;AAAA,QACA,IAAI,SAAS;AACX,iBAAO;AAAA,QACT;AAAA,MACF;AACA,iBAAW,6BAA6B,WAAW,UAAU;AAE7D,UAAI;AACF,eAAO,MAAM;AACX,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,cAAI,MAAM;AACR,qBAAS,WAAW,MAAM;AAC1B;AAAA,UACF;AACA,mBAAS,WAAW,QAAQ,KAAK;AAAA,QACnC;AAAA,MACF,SAAS,OAAO;AACd,iBAAS,QAAQ;AACjB,eAAO,YAAY;AACnB,mBAAW,MAAM,KAAK;AAAA,MACxB;AAAA,IACF;AAAA,IACA,SAAS;AACP,eAAS;AACT,gBAAU,QAAQ;AAClB,aAAO,YAAY;AAAA,IACrB;AAAA,EACF,CAAC;AACH;AAkBO,SAAS,iCACd,OAC4B;AAC5B,QAAM,UAAU,iBAAiB;AACjC,MAAI,SAAS;AACb,MAAI,eAA8B,CAAC;AAEnC,SAAO,IAAI,eAAe;AAAA,IACxB,MAAM,YAAY;AAChB,YAAM,gBAAqC;AAAA,QACzC,SAAS,CAAC,SAAkB;AAC1B,cAAI,OAAQ;AACZ,qBAAW,QAAQ,UAAU,SAAS,IAAI,CAAC;AAAA,QAC7C;AAAA,QACA,OAAO,MAAM;AACX,cAAI,OAAQ;AACZ,mBAAS;AACT,qBAAW,MAAM;AAAA,QACnB;AAAA,QACA,IAAI,SAAS;AACX,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,qBAAe,MAAM,aAAa;AAAA,IACpC;AAAA,IACA,SAAS;AACP,eAAS;AACT,iBAAW,eAAe,cAAc;AACtC,oBAAY;AAAA,MACd;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,IAAM,uBAAuB,oBAAI,IAAI;AAAA,EACnC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAUM,SAAS,6BACd,OACA,YAC0D;AAC1D,MAAI,cAAc,GAAG;AACnB,WAAO,EAAE,YAAY,OAAO,SAAS,MAAM;AAAA,IAAC,EAAE;AAAA,EAChD;AAGA,QAAM,UAAU,oBAAI,IAGlB;AAGF,QAAM,WAAW,oBAAI,IAAoB;AAEzC,QAAM,aAAkC;AAAA,IACtC,QAAQ,MAAe;AACrB,UAAI,MAAM,OAAQ;AAElB,YAAM,QACJ,OAAO,SAAS,YAAY,SAAS,OAChC,OACD;AAGN,UAAI,OAAO,SAAS,qBAAqB,IAAI,MAAM,QAAQ,EAAE,GAAG;AAC9D,iBAAS,OAAO,MAAM,KAAK;AAC3B,cAAM,QAAQ,QAAQ,IAAI,MAAM,KAAK;AACrC,YAAI,OAAO;AACT,uBAAa,MAAM,KAAK;AACxB,cAAI,CAAC,MAAM,OAAQ,OAAM,QAAQ,MAAM,IAAI;AAC3C,kBAAQ,OAAO,MAAM,KAAK;AAAA,QAC5B;AAAA,MACF;AAEA,UAAI,OAAO,SAAS,kBAAkB,CAAC,OAAO,OAAO;AACnD,cAAM,QAAQ,IAAI;AAClB;AAAA,MACF;AAEA,YAAM,QAAQ,MAAM;AACpB,YAAM,MAAM,KAAK,IAAI;AACrB,YAAM,OAAO,SAAS,IAAI,KAAK,KAAK;AAGpC,UAAI,MAAM,QAAQ,YAAY;AAC5B,iBAAS,IAAI,OAAO,GAAG;AAEvB,cAAM,QAAQ,QAAQ,IAAI,KAAK;AAC/B,YAAI,OAAO;AACT,uBAAa,MAAM,KAAK;AACxB,kBAAQ,OAAO,KAAK;AAAA,QACtB;AACA,cAAM,QAAQ,IAAI;AAClB;AAAA,MACF;AAGA,YAAM,WAAW,QAAQ,IAAI,KAAK;AAClC,UAAI,UAAU;AACZ,qBAAa,SAAS,KAAK;AAAA,MAC7B;AAEA,YAAM,QAAQ,KAAK,IAAI,GAAG,cAAc,MAAM,KAAK;AACnD,YAAM,QAAQ,WAAW,MAAM;AAC7B,cAAM,UAAU,QAAQ,IAAI,KAAK;AACjC,YAAI,CAAC,WAAW,QAAQ,UAAU,MAAO;AAEzC,gBAAQ,OAAO,KAAK;AACpB,YAAI,CAAC,MAAM,QAAQ;AACjB,mBAAS,IAAI,OAAO,KAAK,IAAI,CAAC;AAC9B,gBAAM,QAAQ,QAAQ,IAAI;AAAA,QAC5B;AAAA,MACF,GAAG,KAAK;AAER,cAAQ,IAAI,OAAO,EAAE,MAAM,MAAM,CAAC;AAAA,IACpC;AAAA,IACA,QAAQ;AAEN,iBAAW,CAAC,EAAE,KAAK,KAAK,SAAS;AAC/B,qBAAa,MAAM,KAAK;AACxB,YAAI,CAAC,MAAM,QAAQ;AACjB,gBAAM,QAAQ,MAAM,IAAI;AAAA,QAC1B;AAAA,MACF;AACA,cAAQ,MAAM;AACd,eAAS,MAAM;AACf,YAAM,MAAM;AAAA,IACd;AAAA,IACA,IAAI,SAAS;AACX,aAAO,MAAM;AAAA,IACf;AAAA,EACF;AAEA,QAAM,UAAU,MAAM;AACpB,eAAW,CAAC,EAAE,KAAK,KAAK,SAAS;AAC/B,mBAAa,MAAM,KAAK;AAAA,IAC1B;AACA,YAAQ,MAAM;AACd,aAAS,MAAM;AAAA,EACjB;AAEA,SAAO,EAAE,YAAY,QAAQ;AAC/B;;;AChKA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,qBAA0C,IAAI,IAAI,cAAc;AAKtE,SAAS,sBACP,cACoC;AACpC,QAAM,SAAiC,CAAC;AACxC,aAAW,CAAC,KAAK,KAAK,KAAK,aAAa,QAAQ,GAAG;AACjD,QAAI,IAAI,WAAW,QAAQ,GAAG;AAC5B,aAAO,IAAI,MAAM,CAAC,CAAC,IAAI;AAAA,IACzB;AAAA,EACF;AACA,SAAO,OAAO,KAAK,MAAM,EAAE,SAAS,IAAI,SAAS;AACnD;AAMA,SAAS,eAAe,KAAgC;AACtD,QAAM,WAAW,IAAI,aAAa,OAAO,SAAS;AAClD,QAAM,cAAc,IAAI,aAAa,IAAI,QAAQ;AACjD,QAAM,aAAa,IAAI,aAAa,IAAI,OAAO;AAC/C,QAAM,cAAc,IAAI,aAAa,IAAI,QAAQ;AACjD,QAAM,SAAS,sBAAsB,IAAI,YAAY;AAGrD,MAAI,eAAe,CAAC,mBAAmB,IAAI,WAAW,GAAG;AACvD,WAAO;AAAA,MACL,mBAAmB,WAAW,qBAAqB,eAAe,KAAK,IAAI,CAAC;AAAA,MAC5E;AAAA,IACF;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,YAAY;AACd,YAAQ,OAAO,SAAS,YAAY,EAAE;AACtC,QAAI,OAAO,MAAM,KAAK,KAAK,QAAQ,GAAG;AACpC,aAAO,cAAc,iDAAiD,GAAG;AAAA,IAC3E;AAAA,EACF;AAGA,MAAI;AACJ,MAAI,aAAa;AACf,aAAS,OAAO,SAAS,aAAa,EAAE;AACxC,QAAI,OAAO,MAAM,MAAM,KAAK,SAAS,GAAG;AACtC,aAAO;AAAA,QACL;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,SAAS,SAAS,IAAI,WAAW;AAAA,IAC1C,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAKA,SAAS,yBAAyB,KAA+B;AAC/D,QAAM,WAAW,IAAI,aAAa,OAAO,SAAS;AAClD,QAAM,SAAS,sBAAsB,IAAI,YAAY;AAErD,SAAO;AAAA,IACL,SAAS,SAAS,SAAS,IAAI,WAAW;AAAA,IAC1C;AAAA,EACF;AACF;AAKA,SAAS,cACP,aACA,cACS;AACT,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,YAAY,GAAG;AACvD,QAAI,YAAY,GAAG,MAAM,MAAO,QAAO;AAAA,EACzC;AACA,SAAO;AACT;AAOO,SAAS,qBAId,SACA,SACgB;AAChB,QAAM,aAAa,SAAS,iBAAiB;AAC7C,QAAM,OAAO,SAAS;AAGtB,MAAI,QAAQ,CAAC,KAAK,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBACb,IACmB;AACnB,QAAI;AACF,aAAO,MAAM,GAAG;AAAA,IAClB,SAAS,OAAO;AACd,UAAI,iBAAiB,SAAU,OAAM;AACrC,aAAO,cAAc,gBAAgB,KAAK,GAAG,GAAG;AAAA,IAClD;AAAA,EACF;AAGA,iBAAe,iBACb,KACA,KACA,WAC0D;AAC1D,UAAM,QAAQ,sBAAsB,KAAK,OAAO;AAChD,QAAI,iBAAiB,SAAU,QAAO;AAEtC,UAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,QAAI,CAAC,IAAK,QAAO,cAAc,iBAAiB,GAAG;AAEnD,QAAI,MAAM,eAAe,QAAQ,QAAW;AAC1C,YAAM,KAAK,YAAY,KAAiB,KAAqB;AAAA,QAC3D;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,EAAE,KAA0B,MAAM;AAAA,EAC3C;AAIA,iBAAe,cACb,SACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,OAAQ,MAAM,QAAQ,KAAK;AAEjC,UAAI,CAAC,KAAK,SAAS;AACjB,eAAO,cAAc,uBAAuB,GAAG;AAAA,MACjD;AAEA,YAAM,MAAM,QAAQ,OAAO,KAAK,OAAO;AACvC,UAAI,CAAC,KAAK;AACR,eAAO,cAAc,kBAAkB,KAAK,OAAO,IAAI,GAAG;AAAA,MAC5D;AAGA,UAAI,MAAM,aAAa,QAAQ,QAAW;AACxC,cAAM,KAAK,UAAU,KAAiB,IAAI;AAAA,MAC5C;AAEA,YAAM,MAAM,MAAM,IAAI;AAAA,QACnB,KAAK,SAAS,CAAC;AAAA,QAChB;AAAA,UACE,gBAAgB,KAAK;AAAA,UACrB,gBAAgB,KAAK;AAAA,UACrB,QAAQ,KAAK;AAAA,QACf;AAAA,MACF;AAEA,YAAM,WAA4B,EAAE,OAAO,IAAI,GAAG;AAClD,aAAO,aAAa,QAAQ;AAAA,IAC9B,CAAC;AAAA,EACH;AAEA,iBAAe,gBACb,KACA,KACmB;AACnB,UAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,WAAW;AAC3D,QAAI,kBAAkB,SAAU,QAAO;AAEvC,UAAM,SAAS,QAAQ,UAAU,OAAO,KAAK;AAC7C,UAAM,YAAY;AAAA,MAChB,OAAO,UAAU;AAAA,MACjB;AAAA,IACF;AACA,WAAO,kBAAkB,SAAS;AAAA,EACpC;AAEA,iBAAe,WACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,gBAAgB,eAAe,GAAG;AACxC,UAAI,yBAAyB,SAAU,QAAO;AAE9C,UAAI,SAA6B;AAGjC,UAAI,MAAM,aAAa,QAAQ,QAAW;AACxC,iBAAS,MAAM,KAAK,UAAU,KAAiB,MAAM;AAAA,MACvD;AAEA,YAAM,OAAO,MAAM,QAAQ,QAAQ,MAAM;AACzC,aAAO,aAAa,KAAK,IAAI,WAAW,CAAC;AAAA,IAC3C,CAAC;AAAA,EACH;AAEA,iBAAe,UACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,MAAM;AACtD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,aAAO,aAAa,YAAY,OAAO,GAAG,CAAC;AAAA,IAC7C,CAAC;AAAA,EACH;AAEA,iBAAe,YACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,OAAO;AACvD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,MAAM,QAAQ,QAAQ,SAAS,OAAO,KAAK;AACzD,aAAO,aAAa,KAAK;AAAA,IAC3B,CAAC;AAAA,EACH;AAEA,iBAAe,gBACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,WAAW;AAC3D,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,MAAM,MAAM,QAAQ,UAAU,OAAO,KAAK;AAChD,aAAO,aAAa,EAAE,SAAS,MAAM,OAAO,IAAI,GAAG,CAAC;AAAA,IACtD,CAAC;AAAA,EACH;AAEA,iBAAe,aACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,QAAQ;AACxD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,OAAO,OAAO,KAAK;AACjC,aAAO,gBAAgB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,iBAAe,aACb,KACA,KACmB;AACnB,WAAO,kBAAkB,YAAY;AACnC,YAAM,SAAS,MAAM,iBAAiB,KAAK,KAAK,QAAQ;AACxD,UAAI,kBAAkB,SAAU,QAAO;AAEvC,YAAM,QAAQ,UAAU,OAAO,KAAK;AACpC,aAAO,gBAAgB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,iBAAe,oBACb,KACA,KACmB;AACnB,QAAI;AAEJ,QAAI,QAAQ,UAAa,MAAM,oBAAoB;AACjD,YAAM,SAAS;AAAA,QACb;AAAA,MACF;AACA,eAAS,MAAM,KAAK,mBAAmB,KAAiB,MAAM;AAAA,IAChE,WAAW,QAAQ,UAAa,MAAM,WAAW;AAE/C,YAAM,SAAS;AAAA,QACb;AAAA,MACF;AACA,YAAM,SAAS,MAAM,KAAK;AAAA,QACxB;AAAA,QACA;AAAA,UACE,GAAG;AAAA,QACL;AAAA,MACF;AACA,eAAS,EAAE,SAAS,OAAO,SAAS,QAAQ,OAAO,OAAO;AAAA,IAC5D,OAAO;AACL,eAAS,yBAAyB,GAAG;AAAA,IACvC;AAEA,WAAO,oBAAoB,MAAM;AAAA,EACnC;AAEA,WAAS,oBAAoB,QAAuC;AAClE,UAAM,gBAAgB,MAAM,QAAQ,OAAO,OAAO,IAC9C,OAAO,UACP,OAAO,UACL,CAAC,OAAO,OAAO,IACf,CAAC;AACP,UAAM,eAAe,OAAO;AAE5B,UAAM,gBAAgB,CACpB,SACA,WACG;AACH,UAAI,cAAc,SAAS,KAAK,CAAC,cAAc,SAAS,OAAO;AAC7D,eAAO;AACT,UACE,iBACC,CAAC,UACA,CAAC,cAAc,QAAQ,YAAsC;AAE/D,eAAO;AACT,aAAO;AAAA,IACT;AAEA,UAAM,YAAY;AAAA,MAChB,CAAC,cAAmC;AAClC,cAAM,EAAE,YAAY,MAAM,QAAQ,IAAI;AAAA,UACpC;AAAA,UACA;AAAA,QACF;AAEA,cAAM,eAAe;AAAA,UACnB,QAAQ,GAAG,eAAe,CAAC,UAAU;AACnC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,gBAAgB,CAAC,UAAU;AACpC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,YAAY,CAAC,UAAU;AAChC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,gBAAgB,CAAC,UAAU;AACpC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,cAAc,CAAC,UAAU;AAClC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,iBAAiB,CAAC,UAAU;AACrC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,OAAO,MAAM;AAAA,gBACb,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,eAAe,CAAC,UAAU;AACnC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,UAAU,MAAM;AAAA,gBAChB,WAAW,MAAM;AAAA,gBACjB,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,UAED,QAAQ,GAAG,aAAa,CAAC,UAAU;AACjC,gBAAI,cAAc,MAAM,SAAS,MAAM,MAAM,GAAG;AAC9C,mBAAK,QAAQ;AAAA,gBACX,MAAM;AAAA,gBACN,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,QAAQ,MAAM;AAAA,gBACd,UAAU,MAAM;AAAA,gBAChB,OAAO,MAAM;AAAA,gBACb,SAAS,MAAM;AAAA,gBACf,MAAM,MAAM;AAAA,cACd,CAAC;AAAA,YACH;AAAA,UACF,CAAC;AAAA,QACH;AAEA,eAAO,CAAC,GAAG,cAAc,OAAO;AAAA,MAClC;AAAA,IACF;AAEA,WAAO,kBAAkB,SAAS;AAAA,EACpC;AAIA,SAAO;AAAA,IACL,MAAM,OAAO,SAAkB,UAAqC;AAClE,UAAI;AAEF,YAAI;AACJ,YAAI,MAAM,cAAc;AACtB,gBAAM,MAAM,KAAK,aAAa,OAAO;AAAA,QACvC;AAGA,YAAI,SAAS,WAAW;AACtB,gBAAM,QAAQ,UAAU;AAAA,QAC1B;AAGA,cAAM,MAAM,IAAI,IAAI,QAAQ,GAAG;AAC/B,cAAM,OAAO,IAAI,SAAS,QAAQ,UAAU,EAAE;AAC9C,cAAM,SAAS,QAAQ;AAGvB,YAAI,WAAW,OAAO;AACpB,cAAI,SAAS,aAAc,QAAO,MAAM,gBAAgB,KAAK,GAAG;AAChE,cAAI,SAAS,QAAS,QAAO,MAAM,WAAW,KAAK,GAAG;AACtD,cAAI,SAAS,OAAQ,QAAO,MAAM,UAAU,KAAK,GAAG;AACpD,cAAI,SAAS,SAAU,QAAO,MAAM,YAAY,KAAK,GAAG;AACxD,cAAI,SAAS;AACX,mBAAO,MAAM,oBAAoB,KAAK,GAAG;AAAA,QAC7C;AAGA,YAAI,WAAW,QAAQ;AACrB,cAAI,SAAS,WAAY,QAAO,MAAM,cAAc,SAAS,GAAG;AAChE,cAAI,SAAS,aAAc,QAAO,MAAM,gBAAgB,KAAK,GAAG;AAChE,cAAI,SAAS,UAAW,QAAO,MAAM,aAAa,KAAK,GAAG;AAAA,QAC5D;AAGA,YAAI,WAAW,UAAU;AACvB,cAAI,SAAS,OAAQ,QAAO,MAAM,aAAa,KAAK,GAAG;AAAA,QACzD;AAEA,eAAO,IAAI,SAAS,aAAa,EAAE,QAAQ,IAAI,CAAC;AAAA,MAClD,SAAS,OAAO;AAEd,YAAI,iBAAiB,SAAU,QAAO;AACtC,eAAO,cAAc,gBAAgB,KAAK,GAAG,GAAG;AAAA,MAClD;AAAA,IACF;AAAA,EACF;AACF;","names":["prettifyError","prettifyError"]}
@@ -1,3 +1,3 @@
1
- export { N as withLogPersistence } from '../index-fppJjkF-.js';
1
+ export { M as withLogPersistence } from '../index-hM7-oiyj.js';
2
2
  import 'kysely';
3
3
  import 'zod';
package/docs/llms.md CHANGED
@@ -35,6 +35,7 @@ const durably = createDurably({
35
35
  pollingInterval: 1000, // Job polling interval (ms)
36
36
  heartbeatInterval: 5000, // Heartbeat update interval (ms)
37
37
  staleThreshold: 30000, // When to consider a job abandoned (ms)
38
+ cleanupSteps: true, // Delete step output data on terminal state (default: true)
38
39
  // Optional: type-safe labels with Zod schema
39
40
  // labels: z.object({ organizationId: z.string(), env: z.string() }),
40
41
  jobs: {
@@ -206,10 +207,12 @@ type MyRun = Run & {
206
207
  const typedRuns = await durably.getRuns<MyRun>({ jobName: 'my-job' })
207
208
  ```
208
209
 
209
- ### Retry Failed Runs
210
+ ### Retrigger Failed Runs
210
211
 
211
212
  ```ts
212
- await durably.retry(runId)
213
+ // Creates a fresh run (new ID) with the same input/options
214
+ const newRun = await durably.retrigger(runId)
215
+ console.log(newRun.id) // new run ID
213
216
  ```
214
217
 
215
218
  ### Cancel Runs
@@ -236,7 +239,6 @@ durably.on('run:complete', (e) => console.log('Done:', e.output))
236
239
  durably.on('run:fail', (e) => console.error('Failed:', e.error))
237
240
  durably.on('run:cancel', (e) => console.log('Cancelled:', e.runId))
238
241
  durably.on('run:delete', (e) => console.log('Deleted:', e.runId))
239
- durably.on('run:retry', (e) => console.log('Retried:', e.runId))
240
242
  durably.on('run:progress', (e) =>
241
243
  console.log('Progress:', e.progress.current, '/', e.progress.total),
242
244
  )
@@ -336,7 +338,7 @@ const handler = createDurablyHandler(durably, {
336
338
  }
337
339
  },
338
340
 
339
- // Guard before run-level operations (read, subscribe, steps, retry, cancel, delete)
341
+ // Guard before run-level operations (read, subscribe, steps, retrigger, cancel, delete)
340
342
  onRunAccess: async (ctx, run, { operation }) => {
341
343
  if (run.labels.organizationId !== ctx.orgId) {
342
344
  throw new Response('Forbidden', { status: 403 })
@@ -418,7 +420,7 @@ type RunOperation =
418
420
  | 'read'
419
421
  | 'subscribe'
420
422
  | 'steps'
421
- | 'retry'
423
+ | 'retrigger'
422
424
  | 'cancel'
423
425
  | 'delete'
424
426
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@coji/durably",
3
- "version": "0.11.0",
3
+ "version": "0.12.0",
4
4
  "description": "Step-oriented resumable batch execution for Node.js and browsers using SQLite",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",