@sonamu-kit/tasks 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client.d.ts +0 -1
- package/dist/client.d.ts.map +1 -1
- package/dist/client.js +0 -1
- package/dist/client.js.map +1 -1
- package/dist/core/duration.d.ts +1 -1
- package/dist/core/duration.js +1 -1
- package/dist/core/duration.js.map +1 -1
- package/dist/core/workflow.d.ts +2 -1
- package/dist/core/workflow.d.ts.map +1 -1
- package/dist/core/workflow.js +2 -1
- package/dist/core/workflow.js.map +1 -1
- package/dist/database/backend.d.ts.map +1 -1
- package/dist/database/backend.js +114 -0
- package/dist/database/backend.js.map +1 -1
- package/dist/database/pubsub.d.ts.map +1 -1
- package/dist/database/pubsub.js +9 -3
- package/dist/database/pubsub.js.map +1 -1
- package/package.json +7 -5
- package/src/client.ts +0 -1
- package/src/core/duration.ts +1 -1
- package/src/core/workflow.ts +2 -1
- package/src/database/backend.ts +90 -0
- package/src/database/pubsub.ts +9 -3
package/dist/client.d.ts
CHANGED
|
@@ -99,7 +99,6 @@ export declare class OpenWorkflow {
|
|
|
99
99
|
* Declare a workflow without providing its implementation (which is provided
|
|
100
100
|
* separately via `implementWorkflow`). Returns a lightweight WorkflowSpec
|
|
101
101
|
* that can be used to schedule workflow runs.
|
|
102
|
-
* @param config - Workflow config
|
|
103
102
|
* @param spec - Workflow spec
|
|
104
103
|
* @returns Workflow spec
|
|
105
104
|
* @example
|
package/dist/client.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACzC,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AACtD,OAAO,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAE9E,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAEpD,OAAO,EAAE,MAAM,EAAsB,MAAM,UAAU,CAAC;AACtD,OAAO,EAAkB,kBAAkB,EAAE,KAAK,QAAQ,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AAMlG,KAAK,oBAAoB,CAAC,OAAO,EAAE,KAAK,IAAI,YAAY,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAGzE,KAAK,gBAAgB,CAAC,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAEpE;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE,OAAO,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,OAAO,CAAU;IACzB,OAAO,CAAC,QAAQ,CAA0B;gBAE9B,OAAO,EAAE,mBAAmB;IAIxC;;;;;OAKG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACjC,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,GAAG,MAAM;IAUV;;;;;;OAMG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,GAAG,KAAK,EAC/C,IAAI,EAAE,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAC3C,EAAE,EAAE,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,GAClC,IAAI;IAKP;;;;;;;;;;;;OAYG;IACG,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,GAAG,KAAK,EAC/C,IAAI,EAAE,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAC3C,KAAK,CAAC,EAAE,QAAQ,EAChB,OAAO,CAAC,EAAE,kBAAkB,GAC3B,OAAO,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC;IA6BrC;;;;;;;;;;;;;;;;;;;OAmBG;IACH,cAAc,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,SAAS,gBAAgB,GAAG,SAAS,GAAG,SAAS,EACpF,IAAI,EAAE,YAAY,CAChB,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EACpC,MAAM,EACN,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CACjC,EACD,EAAE,EAAE,gBAAgB,CAAC,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,CAAC,GACjE,gBAAgB,CACjB,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EACpC,MAAM,EACN,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CACjC;IAMD;;;;;;;;OAQG;IACH,kBAAkB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAI9D;;;;;;;;;OASG;IACH,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,OAAO;CAGpE;AAED
|
|
1
|
+
{"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACzC,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,eAAe,CAAC;AACtD,OAAO,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAE9E,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,aAAa,CAAC;AAEpD,OAAO,EAAE,MAAM,EAAsB,MAAM,UAAU,CAAC;AACtD,OAAO,EAAkB,kBAAkB,EAAE,KAAK,QAAQ,EAAE,KAAK,YAAY,EAAE,MAAM,YAAY,CAAC;AAMlG,KAAK,oBAAoB,CAAC,OAAO,EAAE,KAAK,IAAI,YAAY,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAGzE,KAAK,gBAAgB,CAAC,OAAO,EAAE,KAAK,IAAI,WAAW,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AAEpE;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE,OAAO,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,OAAO,CAAU;IACzB,OAAO,CAAC,QAAQ,CAA0B;gBAE9B,OAAO,EAAE,mBAAmB;IAIxC;;;;;OAKG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE;QAClB,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;QACjC,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,WAAW,CAAC,EAAE,MAAM,CAAC;KACtB,GAAG,MAAM;IAUV;;;;;;OAMG;IACH,iBAAiB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,GAAG,KAAK,EAC/C,IAAI,EAAE,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAC3C,EAAE,EAAE,gBAAgB,CAAC,KAAK,EAAE,MAAM,CAAC,GAClC,IAAI;IAKP;;;;;;;;;;;;OAYG;IACG,WAAW,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,GAAG,KAAK,EAC/C,IAAI,EAAE,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,EAC3C,KAAK,CAAC,EAAE,QAAQ,EAChB,OAAO,CAAC,EAAE,kBAAkB,GAC3B,OAAO,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC;IA6BrC;;;;;;;;;;;;;;;;;;;OAmBG;IACH,cAAc,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,SAAS,gBAAgB,GAAG,SAAS,GAAG,SAAS,EACpF,IAAI,EAAE,YAAY,CAChB,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EACpC,MAAM,EACN,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CACjC,EACD,EAAE,EAAE,gBAAgB,CAAC,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,CAAC,GACjE,gBAAgB,CACjB,oBAAoB,CAAC,OAAO,EAAE,KAAK,CAAC,EACpC,MAAM,EACN,gBAAgB,CAAC,OAAO,EAAE,KAAK,CAAC,CACjC;IAMD;;;;;;;;OAQG;IACH,kBAAkB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,IAAI;IAI9D;;;;;;;;;OASG;IACH,oBAAoB,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,OAAO;CAGpE;AAED;;;;;;;;;;;;;GAaG;AAGH,eAAO,MAAM,eAAe,2BAAqB,CAAC;AAMlD;;;;GAIG;AACH,qBAAa,gBAAgB,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,GAAG,KAAK;IAC3D,OAAO,CAAC,QAAQ,CAAC,EAAE,CAAe;IAClC,QAAQ,CAAC,QAAQ,EAAE,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;gBAEzC,EAAE,EAAE,YAAY,EAAE,QAAQ,EAAE,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,CAAC;IAKzE;;;;;OAKG;IACG,GAAG,CAAC,KAAK,CAAC,EAAE,QAAQ,EAAE,OAAO,CAAC,EAAE,kBAAkB,GAAG,OAAO,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC;CAG9F;AAMD;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,UAAU,CAAC,EAAE,IAAI,CAAC;IAElB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,OAAO,EAAE,OAAO,CAAC;IACjB,WAAW,EAAE,WAAW,CAAC;IACzB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,eAAe,EAAE,MAAM,CAAC;CACzB;AAED;;;GAGG;AACH,qBAAa,iBAAiB,CAAC,MAAM;IACnC,OAAO,CAAC,OAAO,CAAU;IACzB,QAAQ,CAAC,WAAW,EAAE,WAAW,CAAC;IAClC,OAAO,CAAC,oBAAoB,CAAS;IACrC,OAAO,CAAC,eAAe,CAAS;gBAEpB,OAAO,EAAE,qBAAqB;IAO1C;;;OAGG;IACG,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC;IAsC/B;;;OAGG;IACG,MAAM,IAAI,OAAO,CAAC,IAAI,CAAC;CAK9B"}
|
package/dist/client.js
CHANGED
|
@@ -129,7 +129,6 @@ const DEFAULT_RESULT_TIMEOUT_MS = 5 * 60 * 1000; // 5m
|
|
|
129
129
|
* Declare a workflow without providing its implementation (which is provided
|
|
130
130
|
* separately via `implementWorkflow`). Returns a lightweight WorkflowSpec
|
|
131
131
|
* that can be used to schedule workflow runs.
|
|
132
|
-
* @param config - Workflow config
|
|
133
132
|
* @param spec - Workflow spec
|
|
134
133
|
* @returns Workflow spec
|
|
135
134
|
* @example
|
package/dist/client.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/client.ts"],"sourcesContent":["import type { Backend } from \"./backend\";\nimport type { StandardSchemaV1 } from \"./core/schema\";\nimport type { SchemaInput, SchemaOutput, WorkflowRun } from \"./core/workflow\";\nimport { validateInput } from \"./core/workflow\";\nimport type { WorkflowFunction } from \"./execution\";\nimport { WorkflowRegistry } from \"./registry\";\nimport { Worker, type WorkerOptions } from \"./worker\";\nimport { defineWorkflow, defineWorkflowSpec, type Workflow, type WorkflowSpec } from \"./workflow\";\n\nconst DEFAULT_RESULT_POLL_INTERVAL_MS = 1000; // 1s\nconst DEFAULT_RESULT_TIMEOUT_MS = 5 * 60 * 1000; // 5m\n\n/* The data the worker function receives (after transformation). */\ntype WorkflowHandlerInput<TSchema, Input> = SchemaOutput<TSchema, Input>;\n\n/* The data the client sends (before transformation) */\ntype WorkflowRunInput<TSchema, Input> = SchemaInput<TSchema, Input>;\n\n/**\n * Options for the OpenWorkflow client.\n */\nexport interface OpenWorkflowOptions {\n backend: Backend;\n}\n\n/**\n * Client used to register workflows and start runs.\n */\nexport class OpenWorkflow {\n private backend: Backend;\n private registry = new WorkflowRegistry();\n\n constructor(options: OpenWorkflowOptions) {\n this.backend = options.backend;\n }\n\n /**\n * Create a new Worker with this client's backend and workflows.\n * @param options - Worker options\n * @param options.concurrency - Max concurrent workflow runs\n * @returns Worker instance\n */\n newWorker(options?: {\n concurrency?: number | undefined;\n usePubSub?: boolean;\n listenDelay?: number;\n }): Worker {\n return new Worker({\n backend: this.backend,\n registry: this.registry,\n concurrency: options?.concurrency,\n usePubSub: options?.usePubSub,\n listenDelay: options?.listenDelay,\n } satisfies WorkerOptions);\n }\n\n /**\n * Provide the implementation for a declared workflow. This links the workflow\n * specification to its execution logic and registers it with this\n * OpenWorkflow instance for worker execution.\n * @param spec - Workflow spec\n * @param fn - Workflow implementation\n */\n implementWorkflow<Input, Output, RunInput = Input>(\n spec: WorkflowSpec<Input, Output, RunInput>,\n fn: WorkflowFunction<Input, Output>,\n ): void {\n const workflow: Workflow<Input, Output, RunInput> = { spec, fn };\n this.registry.register(workflow as Workflow<unknown, unknown, unknown>);\n }\n\n /**\n * Run a workflow from its specification. This is the primary way to schedule\n * a workflow using only its WorkflowSpec.\n * @param spec - Workflow spec\n * @param input - Workflow input\n * @param options - Run options\n * @returns Handle for awaiting the result\n * @example\n * ```ts\n * const handle = await ow.runWorkflow(emailWorkflow, { to: 'user@example.com' });\n * const result = await handle.result();\n * ```\n */\n async runWorkflow<Input, Output, RunInput = Input>(\n spec: WorkflowSpec<Input, Output, RunInput>,\n input?: RunInput,\n options?: WorkflowRunOptions,\n ): Promise<WorkflowRunHandle<Output>> {\n const validationResult = await validateInput(spec.schema, input);\n if (!validationResult.success) {\n throw new Error(validationResult.error);\n }\n const parsedInput = validationResult.value;\n const workflowRun = await this.backend.createWorkflowRun({\n workflowName: spec.name,\n version: spec.version ?? null,\n idempotencyKey: null,\n config: {},\n context: null,\n input: parsedInput ?? null,\n availableAt: null,\n deadlineAt: options?.deadlineAt ?? null,\n });\n\n if (options?.publishToChannel) {\n await this.backend.publish(workflowRun.id);\n }\n\n return new WorkflowRunHandle<Output>({\n backend: this.backend,\n workflowRun: workflowRun,\n resultPollIntervalMs: DEFAULT_RESULT_POLL_INTERVAL_MS,\n resultTimeoutMs: DEFAULT_RESULT_TIMEOUT_MS,\n });\n }\n\n /**\n * Define and register a new workflow.\n *\n * This is a convenience method that combines `declareWorkflow` and\n * `implementWorkflow` into a single call. For better code splitting and to\n * separate declaration from implementation, consider using those methods\n * separately.\n * @param config - Workflow config\n * @param fn - Workflow implementation\n * @returns Runnable workflow\n * @example\n * ```ts\n * const workflow = ow.defineWorkflow(\n * { name: 'my-workflow' },\n * async ({ input, step }) => {\n * // workflow implementation\n * },\n * );\n * ```\n */\n defineWorkflow<Input, Output, TSchema extends StandardSchemaV1 | undefined = undefined>(\n spec: WorkflowSpec<\n WorkflowHandlerInput<TSchema, Input>,\n Output,\n WorkflowRunInput<TSchema, Input>\n >,\n fn: WorkflowFunction<WorkflowHandlerInput<TSchema, Input>, Output>,\n ): RunnableWorkflow<\n WorkflowHandlerInput<TSchema, Input>,\n Output,\n WorkflowRunInput<TSchema, Input>\n > {\n const workflow = defineWorkflow(spec, fn);\n this.registry.register(workflow as Workflow<unknown, unknown, unknown>);\n return new RunnableWorkflow(this, workflow);\n }\n\n /**\n * Unregister a workflow from the registry.\n * @param name - The workflow name\n * @param version - The workflow version (null for unversioned)\n * @example\n * ```ts\n * ow.unregisterWorkflow(\"my-workflow\", \"v1\");\n * ```\n */\n unregisterWorkflow(name: string, version: string | null): void {\n this.registry.remove(name, version);\n }\n\n /**\n * Check if a workflow is registered in the registry.\n * @param name - The workflow name\n * @param version - The workflow version (null for unversioned)\n * @returns True if the workflow is registered, false otherwise\n * @example\n * ```ts\n * ow.isWorkflowRegistered(\"my-workflow\", \"v1\");\n * ```\n */\n isWorkflowRegistered(name: string, version: string | null): boolean {\n return this.registry.has(name, version);\n }\n}\n\n/**\n * Declare a workflow without providing its implementation (which is provided\n * separately via `implementWorkflow`). Returns a lightweight WorkflowSpec\n * that can be used to schedule workflow runs.\n * @param config - Workflow config\n * @param spec - Workflow spec\n * @returns Workflow spec\n * @example\n * ```ts\n * export const emailWorkflow = declareWorkflow({\n * name: 'send-email',\n * schema: z.object({ to: z.string().email() }),\n * });\n * ```\n */\n// kept for backwards compatibility, to be deprecated\n// eslint-disable-next-line unicorn/prefer-export-from\nexport const declareWorkflow = defineWorkflowSpec;\n\n//\n// --- Workflow Definition\n//\n\n/**\n * A fully defined workflow with its implementation. This class is returned by\n * `defineWorkflow` and provides the `.run()` method for scheduling workflow\n * runs.\n */\nexport class RunnableWorkflow<Input, Output, RunInput = Input> {\n private readonly ow: OpenWorkflow;\n readonly workflow: Workflow<Input, Output, RunInput>;\n\n constructor(ow: OpenWorkflow, workflow: Workflow<Input, Output, RunInput>) {\n this.ow = ow;\n this.workflow = workflow;\n }\n\n /**\n * Starts a new workflow run.\n * @param input - Workflow input\n * @param options - Run options\n * @returns Workflow run handle\n */\n async run(input?: RunInput, options?: WorkflowRunOptions): Promise<WorkflowRunHandle<Output>> {\n return this.ow.runWorkflow(this.workflow.spec, input, options);\n }\n}\n\n//\n// --- Workflow Run\n//\n\n/**\n * Options for creating a new workflow run from a runnable workflow when calling\n * `workflow.run()`.\n */\nexport interface WorkflowRunOptions {\n /**\n * Set a deadline for the workflow run. If the workflow exceeds this deadline,\n * it will be marked as failed.\n */\n deadlineAt?: Date;\n\n /**\n * Publish when the workflow run is created to the channel.\n * Default: true\n */\n publishToChannel?: boolean;\n}\n\n/**\n * Options for WorkflowHandle.\n */\nexport interface WorkflowHandleOptions {\n backend: Backend;\n workflowRun: WorkflowRun;\n resultPollIntervalMs: number;\n resultTimeoutMs: number;\n}\n\n/**\n * Represents a started workflow run and provides methods to await its result.\n * Returned from `workflowDef.run()`.\n */\nexport class WorkflowRunHandle<Output> {\n private backend: Backend;\n readonly workflowRun: WorkflowRun;\n private resultPollIntervalMs: number;\n private resultTimeoutMs: number;\n\n constructor(options: WorkflowHandleOptions) {\n this.backend = options.backend;\n this.workflowRun = options.workflowRun;\n this.resultPollIntervalMs = options.resultPollIntervalMs;\n this.resultTimeoutMs = options.resultTimeoutMs;\n }\n\n /**\n * Waits for the workflow run to complete and returns the result.\n * @returns Workflow output\n */\n async result(): Promise<Output> {\n const start = Date.now();\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n while (true) {\n const latest = await this.backend.getWorkflowRun({\n workflowRunId: this.workflowRun.id,\n });\n\n if (!latest) {\n throw new Error(`Workflow run ${this.workflowRun.id} no longer exists`);\n }\n\n // 'succeeded' status is deprecated\n if (latest.status === \"succeeded\" || latest.status === \"completed\") {\n return latest.output as Output;\n }\n\n if (latest.status === \"failed\") {\n throw new Error(\n `Workflow ${this.workflowRun.workflowName} failed: ${JSON.stringify(latest.error)}`,\n );\n }\n\n if (latest.status === \"canceled\") {\n throw new Error(`Workflow ${this.workflowRun.workflowName} was canceled`);\n }\n\n if (Date.now() - start > this.resultTimeoutMs) {\n throw new Error(`Timed out waiting for workflow run ${this.workflowRun.id} to finish`);\n }\n\n await new Promise((resolve) => {\n setTimeout(resolve, this.resultPollIntervalMs);\n });\n }\n }\n\n /**\n * Cancels the workflow run. Only workflows in pending, running, or sleeping\n * status can be canceled.\n */\n async cancel(): Promise<void> {\n await this.backend.cancelWorkflowRun({\n workflowRunId: this.workflowRun.id,\n });\n }\n}\n"],"names":["validateInput","WorkflowRegistry","Worker","defineWorkflow","defineWorkflowSpec","DEFAULT_RESULT_POLL_INTERVAL_MS","DEFAULT_RESULT_TIMEOUT_MS","OpenWorkflow","backend","registry","options","newWorker","concurrency","usePubSub","listenDelay","implementWorkflow","spec","fn","workflow","register","runWorkflow","input","validationResult","schema","success","Error","error","parsedInput","value","workflowRun","createWorkflowRun","workflowName","name","version","idempotencyKey","config","context","availableAt","deadlineAt","publishToChannel","publish","id","WorkflowRunHandle","resultPollIntervalMs","resultTimeoutMs","RunnableWorkflow","unregisterWorkflow","remove","isWorkflowRegistered","has","declareWorkflow","ow","run","result","start","Date","now","latest","getWorkflowRun","workflowRunId","status","output","JSON","stringify","Promise","resolve","setTimeout","cancel","cancelWorkflowRun"],"mappings":"AAGA,SAASA,aAAa,QAAQ,qBAAkB;AAEhD,SAASC,gBAAgB,QAAQ,gBAAa;AAC9C,SAASC,MAAM,QAA4B,cAAW;AACtD,SAASC,cAAc,EAAEC,kBAAkB,QAA0C,gBAAa;AAElG,MAAMC,kCAAkC,MAAM,KAAK;AACnD,MAAMC,4BAA4B,IAAI,KAAK,MAAM,KAAK;AAetD;;CAEC,GACD,OAAO,MAAMC;IACHC,QAAiB;IACjBC,WAAW,IAAIR,mBAAmB;IAE1C,YAAYS,OAA4B,CAAE;QACxC,IAAI,CAACF,OAAO,GAAGE,QAAQF,OAAO;IAChC;IAEA;;;;;GAKC,GACDG,UAAUD,OAIT,EAAU;QACT,OAAO,IAAIR,OAAO;YAChBM,SAAS,IAAI,CAACA,OAAO;YACrBC,UAAU,IAAI,CAACA,QAAQ;YACvBG,aAAaF,SAASE;YACtBC,WAAWH,SAASG;YACpBC,aAAaJ,SAASI;QACxB;IACF;IAEA;;;;;;GAMC,GACDC,kBACEC,IAA2C,EAC3CC,EAAmC,EAC7B;QACN,MAAMC,WAA8C;YAAEF;YAAMC;QAAG;QAC/D,IAAI,CAACR,QAAQ,CAACU,QAAQ,CAACD;IACzB;IAEA;;;;;;;;;;;;GAYC,GACD,MAAME,YACJJ,IAA2C,EAC3CK,KAAgB,EAChBX,OAA4B,EACQ;QACpC,MAAMY,mBAAmB,MAAMtB,cAAcgB,KAAKO,MAAM,EAAEF;QAC1D,IAAI,CAACC,iBAAiBE,OAAO,EAAE;YAC7B,MAAM,IAAIC,MAAMH,iBAAiBI,KAAK;QACxC;QACA,MAAMC,cAAcL,iBAAiBM,KAAK;QAC1C,MAAMC,cAAc,MAAM,IAAI,CAACrB,OAAO,CAACsB,iBAAiB,CAAC;YACvDC,cAAcf,KAAKgB,IAAI;YACvBC,SAASjB,KAAKiB,OAAO,IAAI;YACzBC,gBAAgB;YAChBC,QAAQ,CAAC;YACTC,SAAS;YACTf,OAAOM,eAAe;YACtBU,aAAa;YACbC,YAAY5B,SAAS4B,cAAc;QACrC;QAEA,IAAI5B,SAAS6B,kBAAkB;YAC7B,MAAM,IAAI,CAAC/B,OAAO,CAACgC,OAAO,CAACX,YAAYY,EAAE;QAC3C;QAEA,OAAO,IAAIC,kBAA0B;YACnClC,SAAS,IAAI,CAACA,OAAO;YACrBqB,aAAaA;YACbc,sBAAsBtC;YACtBuC,iBAAiBtC;QACnB;IACF;IAEA;;;;;;;;;;;;;;;;;;;GAmBC,GACDH,eACEa,IAIC,EACDC,EAAkE,EAKlE;QACA,MAAMC,WAAWf,eAAea,MAAMC;QACtC,IAAI,CAACR,QAAQ,CAACU,QAAQ,CAACD;QACvB,OAAO,IAAI2B,iBAAiB,IAAI,EAAE3B;IACpC;IAEA;;;;;;;;GAQC,GACD4B,mBAAmBd,IAAY,EAAEC,OAAsB,EAAQ;QAC7D,IAAI,CAACxB,QAAQ,CAACsC,MAAM,CAACf,MAAMC;IAC7B;IAEA;;;;;;;;;GASC,GACDe,qBAAqBhB,IAAY,EAAEC,OAAsB,EAAW;QAClE,OAAO,IAAI,CAACxB,QAAQ,CAACwC,GAAG,CAACjB,MAAMC;IACjC;AACF;AAEA;;;;;;;;;;;;;;CAcC,GACD,qDAAqD;AACrD,sDAAsD;AACtD,OAAO,MAAMiB,kBAAkB9C,mBAAmB;AAElD,EAAE;AACF,0BAA0B;AAC1B,EAAE;AAEF;;;;CAIC,GACD,OAAO,MAAMyC;IACMM,GAAiB;IACzBjC,SAA4C;IAErD,YAAYiC,EAAgB,EAAEjC,QAA2C,CAAE;QACzE,IAAI,CAACiC,EAAE,GAAGA;QACV,IAAI,CAACjC,QAAQ,GAAGA;IAClB;IAEA;;;;;GAKC,GACD,MAAMkC,IAAI/B,KAAgB,EAAEX,OAA4B,EAAsC;QAC5F,OAAO,IAAI,CAACyC,EAAE,CAAC/B,WAAW,CAAC,IAAI,CAACF,QAAQ,CAACF,IAAI,EAAEK,OAAOX;IACxD;AACF;AAkCA;;;CAGC,GACD,OAAO,MAAMgC;IACHlC,QAAiB;IAChBqB,YAAyB;IAC1Bc,qBAA6B;IAC7BC,gBAAwB;IAEhC,YAAYlC,OAA8B,CAAE;QAC1C,IAAI,CAACF,OAAO,GAAGE,QAAQF,OAAO;QAC9B,IAAI,CAACqB,WAAW,GAAGnB,QAAQmB,WAAW;QACtC,IAAI,CAACc,oBAAoB,GAAGjC,QAAQiC,oBAAoB;QACxD,IAAI,CAACC,eAAe,GAAGlC,QAAQkC,eAAe;IAChD;IAEA;;;GAGC,GACD,MAAMS,SAA0B;QAC9B,MAAMC,QAAQC,KAAKC,GAAG;QAEtB,uEAAuE;QACvE,MAAO,KAAM;YACX,MAAMC,SAAS,MAAM,IAAI,CAACjD,OAAO,CAACkD,cAAc,CAAC;gBAC/CC,eAAe,IAAI,CAAC9B,WAAW,CAACY,EAAE;YACpC;YAEA,IAAI,CAACgB,QAAQ;gBACX,MAAM,IAAIhC,MAAM,CAAC,aAAa,EAAE,IAAI,CAACI,WAAW,CAACY,EAAE,CAAC,iBAAiB,CAAC;YACxE;YAEA,mCAAmC;YACnC,IAAIgB,OAAOG,MAAM,KAAK,eAAeH,OAAOG,MAAM,KAAK,aAAa;gBAClE,OAAOH,OAAOI,MAAM;YACtB;YAEA,IAAIJ,OAAOG,MAAM,KAAK,UAAU;gBAC9B,MAAM,IAAInC,MACR,CAAC,SAAS,EAAE,IAAI,CAACI,WAAW,CAACE,YAAY,CAAC,SAAS,EAAE+B,KAAKC,SAAS,CAACN,OAAO/B,KAAK,GAAG;YAEvF;YAEA,IAAI+B,OAAOG,MAAM,KAAK,YAAY;gBAChC,MAAM,IAAInC,MAAM,CAAC,SAAS,EAAE,IAAI,CAACI,WAAW,CAACE,YAAY,CAAC,aAAa,CAAC;YAC1E;YAEA,IAAIwB,KAAKC,GAAG,KAAKF,QAAQ,IAAI,CAACV,eAAe,EAAE;gBAC7C,MAAM,IAAInB,MAAM,CAAC,mCAAmC,EAAE,IAAI,CAACI,WAAW,CAACY,EAAE,CAAC,UAAU,CAAC;YACvF;YAEA,MAAM,IAAIuB,QAAQ,CAACC;gBACjBC,WAAWD,SAAS,IAAI,CAACtB,oBAAoB;YAC/C;QACF;IACF;IAEA;;;GAGC,GACD,MAAMwB,SAAwB;QAC5B,MAAM,IAAI,CAAC3D,OAAO,CAAC4D,iBAAiB,CAAC;YACnCT,eAAe,IAAI,CAAC9B,WAAW,CAACY,EAAE;QACpC;IACF;AACF"}
|
|
1
|
+
{"version":3,"sources":["../src/client.ts"],"sourcesContent":["import type { Backend } from \"./backend\";\nimport type { StandardSchemaV1 } from \"./core/schema\";\nimport type { SchemaInput, SchemaOutput, WorkflowRun } from \"./core/workflow\";\nimport { validateInput } from \"./core/workflow\";\nimport type { WorkflowFunction } from \"./execution\";\nimport { WorkflowRegistry } from \"./registry\";\nimport { Worker, type WorkerOptions } from \"./worker\";\nimport { defineWorkflow, defineWorkflowSpec, type Workflow, type WorkflowSpec } from \"./workflow\";\n\nconst DEFAULT_RESULT_POLL_INTERVAL_MS = 1000; // 1s\nconst DEFAULT_RESULT_TIMEOUT_MS = 5 * 60 * 1000; // 5m\n\n/* The data the worker function receives (after transformation). */\ntype WorkflowHandlerInput<TSchema, Input> = SchemaOutput<TSchema, Input>;\n\n/* The data the client sends (before transformation) */\ntype WorkflowRunInput<TSchema, Input> = SchemaInput<TSchema, Input>;\n\n/**\n * Options for the OpenWorkflow client.\n */\nexport interface OpenWorkflowOptions {\n backend: Backend;\n}\n\n/**\n * Client used to register workflows and start runs.\n */\nexport class OpenWorkflow {\n private backend: Backend;\n private registry = new WorkflowRegistry();\n\n constructor(options: OpenWorkflowOptions) {\n this.backend = options.backend;\n }\n\n /**\n * Create a new Worker with this client's backend and workflows.\n * @param options - Worker options\n * @param options.concurrency - Max concurrent workflow runs\n * @returns Worker instance\n */\n newWorker(options?: {\n concurrency?: number | undefined;\n usePubSub?: boolean;\n listenDelay?: number;\n }): Worker {\n return new Worker({\n backend: this.backend,\n registry: this.registry,\n concurrency: options?.concurrency,\n usePubSub: options?.usePubSub,\n listenDelay: options?.listenDelay,\n } satisfies WorkerOptions);\n }\n\n /**\n * Provide the implementation for a declared workflow. This links the workflow\n * specification to its execution logic and registers it with this\n * OpenWorkflow instance for worker execution.\n * @param spec - Workflow spec\n * @param fn - Workflow implementation\n */\n implementWorkflow<Input, Output, RunInput = Input>(\n spec: WorkflowSpec<Input, Output, RunInput>,\n fn: WorkflowFunction<Input, Output>,\n ): void {\n const workflow: Workflow<Input, Output, RunInput> = { spec, fn };\n this.registry.register(workflow as Workflow<unknown, unknown, unknown>);\n }\n\n /**\n * Run a workflow from its specification. This is the primary way to schedule\n * a workflow using only its WorkflowSpec.\n * @param spec - Workflow spec\n * @param input - Workflow input\n * @param options - Run options\n * @returns Handle for awaiting the result\n * @example\n * ```ts\n * const handle = await ow.runWorkflow(emailWorkflow, { to: 'user@example.com' });\n * const result = await handle.result();\n * ```\n */\n async runWorkflow<Input, Output, RunInput = Input>(\n spec: WorkflowSpec<Input, Output, RunInput>,\n input?: RunInput,\n options?: WorkflowRunOptions,\n ): Promise<WorkflowRunHandle<Output>> {\n const validationResult = await validateInput(spec.schema, input);\n if (!validationResult.success) {\n throw new Error(validationResult.error);\n }\n const parsedInput = validationResult.value;\n const workflowRun = await this.backend.createWorkflowRun({\n workflowName: spec.name,\n version: spec.version ?? null,\n idempotencyKey: null,\n config: {},\n context: null,\n input: parsedInput ?? null,\n availableAt: null,\n deadlineAt: options?.deadlineAt ?? null,\n });\n\n if (options?.publishToChannel) {\n await this.backend.publish(workflowRun.id);\n }\n\n return new WorkflowRunHandle<Output>({\n backend: this.backend,\n workflowRun: workflowRun,\n resultPollIntervalMs: DEFAULT_RESULT_POLL_INTERVAL_MS,\n resultTimeoutMs: DEFAULT_RESULT_TIMEOUT_MS,\n });\n }\n\n /**\n * Define and register a new workflow.\n *\n * This is a convenience method that combines `declareWorkflow` and\n * `implementWorkflow` into a single call. For better code splitting and to\n * separate declaration from implementation, consider using those methods\n * separately.\n * @param config - Workflow config\n * @param fn - Workflow implementation\n * @returns Runnable workflow\n * @example\n * ```ts\n * const workflow = ow.defineWorkflow(\n * { name: 'my-workflow' },\n * async ({ input, step }) => {\n * // workflow implementation\n * },\n * );\n * ```\n */\n defineWorkflow<Input, Output, TSchema extends StandardSchemaV1 | undefined = undefined>(\n spec: WorkflowSpec<\n WorkflowHandlerInput<TSchema, Input>,\n Output,\n WorkflowRunInput<TSchema, Input>\n >,\n fn: WorkflowFunction<WorkflowHandlerInput<TSchema, Input>, Output>,\n ): RunnableWorkflow<\n WorkflowHandlerInput<TSchema, Input>,\n Output,\n WorkflowRunInput<TSchema, Input>\n > {\n const workflow = defineWorkflow(spec, fn);\n this.registry.register(workflow as Workflow<unknown, unknown, unknown>);\n return new RunnableWorkflow(this, workflow);\n }\n\n /**\n * Unregister a workflow from the registry.\n * @param name - The workflow name\n * @param version - The workflow version (null for unversioned)\n * @example\n * ```ts\n * ow.unregisterWorkflow(\"my-workflow\", \"v1\");\n * ```\n */\n unregisterWorkflow(name: string, version: string | null): void {\n this.registry.remove(name, version);\n }\n\n /**\n * Check if a workflow is registered in the registry.\n * @param name - The workflow name\n * @param version - The workflow version (null for unversioned)\n * @returns True if the workflow is registered, false otherwise\n * @example\n * ```ts\n * ow.isWorkflowRegistered(\"my-workflow\", \"v1\");\n * ```\n */\n isWorkflowRegistered(name: string, version: string | null): boolean {\n return this.registry.has(name, version);\n }\n}\n\n/**\n * Declare a workflow without providing its implementation (which is provided\n * separately via `implementWorkflow`). Returns a lightweight WorkflowSpec\n * that can be used to schedule workflow runs.\n * @param spec - Workflow spec\n * @returns Workflow spec\n * @example\n * ```ts\n * export const emailWorkflow = declareWorkflow({\n * name: 'send-email',\n * schema: z.object({ to: z.string().email() }),\n * });\n * ```\n */\n// kept for backwards compatibility, to be deprecated\n// eslint-disable-next-line unicorn/prefer-export-from\nexport const declareWorkflow = defineWorkflowSpec;\n\n//\n// --- Workflow Definition\n//\n\n/**\n * A fully defined workflow with its implementation. This class is returned by\n * `defineWorkflow` and provides the `.run()` method for scheduling workflow\n * runs.\n */\nexport class RunnableWorkflow<Input, Output, RunInput = Input> {\n private readonly ow: OpenWorkflow;\n readonly workflow: Workflow<Input, Output, RunInput>;\n\n constructor(ow: OpenWorkflow, workflow: Workflow<Input, Output, RunInput>) {\n this.ow = ow;\n this.workflow = workflow;\n }\n\n /**\n * Starts a new workflow run.\n * @param input - Workflow input\n * @param options - Run options\n * @returns Workflow run handle\n */\n async run(input?: RunInput, options?: WorkflowRunOptions): Promise<WorkflowRunHandle<Output>> {\n return this.ow.runWorkflow(this.workflow.spec, input, options);\n }\n}\n\n//\n// --- Workflow Run\n//\n\n/**\n * Options for creating a new workflow run from a runnable workflow when calling\n * `workflow.run()`.\n */\nexport interface WorkflowRunOptions {\n /**\n * Set a deadline for the workflow run. If the workflow exceeds this deadline,\n * it will be marked as failed.\n */\n deadlineAt?: Date;\n\n /**\n * Publish when the workflow run is created to the channel.\n * Default: true\n */\n publishToChannel?: boolean;\n}\n\n/**\n * Options for WorkflowHandle.\n */\nexport interface WorkflowHandleOptions {\n backend: Backend;\n workflowRun: WorkflowRun;\n resultPollIntervalMs: number;\n resultTimeoutMs: number;\n}\n\n/**\n * Represents a started workflow run and provides methods to await its result.\n * Returned from `workflowDef.run()`.\n */\nexport class WorkflowRunHandle<Output> {\n private backend: Backend;\n readonly workflowRun: WorkflowRun;\n private resultPollIntervalMs: number;\n private resultTimeoutMs: number;\n\n constructor(options: WorkflowHandleOptions) {\n this.backend = options.backend;\n this.workflowRun = options.workflowRun;\n this.resultPollIntervalMs = options.resultPollIntervalMs;\n this.resultTimeoutMs = options.resultTimeoutMs;\n }\n\n /**\n * Waits for the workflow run to complete and returns the result.\n * @returns Workflow output\n */\n async result(): Promise<Output> {\n const start = Date.now();\n\n // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition\n while (true) {\n const latest = await this.backend.getWorkflowRun({\n workflowRunId: this.workflowRun.id,\n });\n\n if (!latest) {\n throw new Error(`Workflow run ${this.workflowRun.id} no longer exists`);\n }\n\n // 'succeeded' status is deprecated\n if (latest.status === \"succeeded\" || latest.status === \"completed\") {\n return latest.output as Output;\n }\n\n if (latest.status === \"failed\") {\n throw new Error(\n `Workflow ${this.workflowRun.workflowName} failed: ${JSON.stringify(latest.error)}`,\n );\n }\n\n if (latest.status === \"canceled\") {\n throw new Error(`Workflow ${this.workflowRun.workflowName} was canceled`);\n }\n\n if (Date.now() - start > this.resultTimeoutMs) {\n throw new Error(`Timed out waiting for workflow run ${this.workflowRun.id} to finish`);\n }\n\n await new Promise((resolve) => {\n setTimeout(resolve, this.resultPollIntervalMs);\n });\n }\n }\n\n /**\n * Cancels the workflow run. Only workflows in pending, running, or sleeping\n * status can be canceled.\n */\n async cancel(): Promise<void> {\n await this.backend.cancelWorkflowRun({\n workflowRunId: this.workflowRun.id,\n });\n }\n}\n"],"names":["validateInput","WorkflowRegistry","Worker","defineWorkflow","defineWorkflowSpec","DEFAULT_RESULT_POLL_INTERVAL_MS","DEFAULT_RESULT_TIMEOUT_MS","OpenWorkflow","backend","registry","options","newWorker","concurrency","usePubSub","listenDelay","implementWorkflow","spec","fn","workflow","register","runWorkflow","input","validationResult","schema","success","Error","error","parsedInput","value","workflowRun","createWorkflowRun","workflowName","name","version","idempotencyKey","config","context","availableAt","deadlineAt","publishToChannel","publish","id","WorkflowRunHandle","resultPollIntervalMs","resultTimeoutMs","RunnableWorkflow","unregisterWorkflow","remove","isWorkflowRegistered","has","declareWorkflow","ow","run","result","start","Date","now","latest","getWorkflowRun","workflowRunId","status","output","JSON","stringify","Promise","resolve","setTimeout","cancel","cancelWorkflowRun"],"mappings":"AAGA,SAASA,aAAa,QAAQ,qBAAkB;AAEhD,SAASC,gBAAgB,QAAQ,gBAAa;AAC9C,SAASC,MAAM,QAA4B,cAAW;AACtD,SAASC,cAAc,EAAEC,kBAAkB,QAA0C,gBAAa;AAElG,MAAMC,kCAAkC,MAAM,KAAK;AACnD,MAAMC,4BAA4B,IAAI,KAAK,MAAM,KAAK;AAetD;;CAEC,GACD,OAAO,MAAMC;IACHC,QAAiB;IACjBC,WAAW,IAAIR,mBAAmB;IAE1C,YAAYS,OAA4B,CAAE;QACxC,IAAI,CAACF,OAAO,GAAGE,QAAQF,OAAO;IAChC;IAEA;;;;;GAKC,GACDG,UAAUD,OAIT,EAAU;QACT,OAAO,IAAIR,OAAO;YAChBM,SAAS,IAAI,CAACA,OAAO;YACrBC,UAAU,IAAI,CAACA,QAAQ;YACvBG,aAAaF,SAASE;YACtBC,WAAWH,SAASG;YACpBC,aAAaJ,SAASI;QACxB;IACF;IAEA;;;;;;GAMC,GACDC,kBACEC,IAA2C,EAC3CC,EAAmC,EAC7B;QACN,MAAMC,WAA8C;YAAEF;YAAMC;QAAG;QAC/D,IAAI,CAACR,QAAQ,CAACU,QAAQ,CAACD;IACzB;IAEA;;;;;;;;;;;;GAYC,GACD,MAAME,YACJJ,IAA2C,EAC3CK,KAAgB,EAChBX,OAA4B,EACQ;QACpC,MAAMY,mBAAmB,MAAMtB,cAAcgB,KAAKO,MAAM,EAAEF;QAC1D,IAAI,CAACC,iBAAiBE,OAAO,EAAE;YAC7B,MAAM,IAAIC,MAAMH,iBAAiBI,KAAK;QACxC;QACA,MAAMC,cAAcL,iBAAiBM,KAAK;QAC1C,MAAMC,cAAc,MAAM,IAAI,CAACrB,OAAO,CAACsB,iBAAiB,CAAC;YACvDC,cAAcf,KAAKgB,IAAI;YACvBC,SAASjB,KAAKiB,OAAO,IAAI;YACzBC,gBAAgB;YAChBC,QAAQ,CAAC;YACTC,SAAS;YACTf,OAAOM,eAAe;YACtBU,aAAa;YACbC,YAAY5B,SAAS4B,cAAc;QACrC;QAEA,IAAI5B,SAAS6B,kBAAkB;YAC7B,MAAM,IAAI,CAAC/B,OAAO,CAACgC,OAAO,CAACX,YAAYY,EAAE;QAC3C;QAEA,OAAO,IAAIC,kBAA0B;YACnClC,SAAS,IAAI,CAACA,OAAO;YACrBqB,aAAaA;YACbc,sBAAsBtC;YACtBuC,iBAAiBtC;QACnB;IACF;IAEA;;;;;;;;;;;;;;;;;;;GAmBC,GACDH,eACEa,IAIC,EACDC,EAAkE,EAKlE;QACA,MAAMC,WAAWf,eAAea,MAAMC;QACtC,IAAI,CAACR,QAAQ,CAACU,QAAQ,CAACD;QACvB,OAAO,IAAI2B,iBAAiB,IAAI,EAAE3B;IACpC;IAEA;;;;;;;;GAQC,GACD4B,mBAAmBd,IAAY,EAAEC,OAAsB,EAAQ;QAC7D,IAAI,CAACxB,QAAQ,CAACsC,MAAM,CAACf,MAAMC;IAC7B;IAEA;;;;;;;;;GASC,GACDe,qBAAqBhB,IAAY,EAAEC,OAAsB,EAAW;QAClE,OAAO,IAAI,CAACxB,QAAQ,CAACwC,GAAG,CAACjB,MAAMC;IACjC;AACF;AAEA;;;;;;;;;;;;;CAaC,GACD,qDAAqD;AACrD,sDAAsD;AACtD,OAAO,MAAMiB,kBAAkB9C,mBAAmB;AAElD,EAAE;AACF,0BAA0B;AAC1B,EAAE;AAEF;;;;CAIC,GACD,OAAO,MAAMyC;IACMM,GAAiB;IACzBjC,SAA4C;IAErD,YAAYiC,EAAgB,EAAEjC,QAA2C,CAAE;QACzE,IAAI,CAACiC,EAAE,GAAGA;QACV,IAAI,CAACjC,QAAQ,GAAGA;IAClB;IAEA;;;;;GAKC,GACD,MAAMkC,IAAI/B,KAAgB,EAAEX,OAA4B,EAAsC;QAC5F,OAAO,IAAI,CAACyC,EAAE,CAAC/B,WAAW,CAAC,IAAI,CAACF,QAAQ,CAACF,IAAI,EAAEK,OAAOX;IACxD;AACF;AAkCA;;;CAGC,GACD,OAAO,MAAMgC;IACHlC,QAAiB;IAChBqB,YAAyB;IAC1Bc,qBAA6B;IAC7BC,gBAAwB;IAEhC,YAAYlC,OAA8B,CAAE;QAC1C,IAAI,CAACF,OAAO,GAAGE,QAAQF,OAAO;QAC9B,IAAI,CAACqB,WAAW,GAAGnB,QAAQmB,WAAW;QACtC,IAAI,CAACc,oBAAoB,GAAGjC,QAAQiC,oBAAoB;QACxD,IAAI,CAACC,eAAe,GAAGlC,QAAQkC,eAAe;IAChD;IAEA;;;GAGC,GACD,MAAMS,SAA0B;QAC9B,MAAMC,QAAQC,KAAKC,GAAG;QAEtB,uEAAuE;QACvE,MAAO,KAAM;YACX,MAAMC,SAAS,MAAM,IAAI,CAACjD,OAAO,CAACkD,cAAc,CAAC;gBAC/CC,eAAe,IAAI,CAAC9B,WAAW,CAACY,EAAE;YACpC;YAEA,IAAI,CAACgB,QAAQ;gBACX,MAAM,IAAIhC,MAAM,CAAC,aAAa,EAAE,IAAI,CAACI,WAAW,CAACY,EAAE,CAAC,iBAAiB,CAAC;YACxE;YAEA,mCAAmC;YACnC,IAAIgB,OAAOG,MAAM,KAAK,eAAeH,OAAOG,MAAM,KAAK,aAAa;gBAClE,OAAOH,OAAOI,MAAM;YACtB;YAEA,IAAIJ,OAAOG,MAAM,KAAK,UAAU;gBAC9B,MAAM,IAAInC,MACR,CAAC,SAAS,EAAE,IAAI,CAACI,WAAW,CAACE,YAAY,CAAC,SAAS,EAAE+B,KAAKC,SAAS,CAACN,OAAO/B,KAAK,GAAG;YAEvF;YAEA,IAAI+B,OAAOG,MAAM,KAAK,YAAY;gBAChC,MAAM,IAAInC,MAAM,CAAC,SAAS,EAAE,IAAI,CAACI,WAAW,CAACE,YAAY,CAAC,aAAa,CAAC;YAC1E;YAEA,IAAIwB,KAAKC,GAAG,KAAKF,QAAQ,IAAI,CAACV,eAAe,EAAE;gBAC7C,MAAM,IAAInB,MAAM,CAAC,mCAAmC,EAAE,IAAI,CAACI,WAAW,CAACY,EAAE,CAAC,UAAU,CAAC;YACvF;YAEA,MAAM,IAAIuB,QAAQ,CAACC;gBACjBC,WAAWD,SAAS,IAAI,CAACtB,oBAAoB;YAC/C;QACF;IACF;IAEA;;;GAGC,GACD,MAAMwB,SAAwB;QAC5B,MAAM,IAAI,CAAC3D,OAAO,CAAC4D,iBAAiB,CAAC;YACnCT,eAAe,IAAI,CAAC9B,WAAW,CAACY,EAAE;QACpC;IACF;AACF"}
|
package/dist/core/duration.d.ts
CHANGED
|
@@ -11,7 +11,7 @@ type Unit = Years | Months | Weeks | Days | Hours | Minutes | Seconds | Millisec
|
|
|
11
11
|
type UnitAnyCase = Capitalize<Unit> | Uppercase<Unit> | Lowercase<Unit>;
|
|
12
12
|
export type DurationString = `${number}` | `${number}${UnitAnyCase}` | `${number} ${UnitAnyCase}`;
|
|
13
13
|
/**
|
|
14
|
-
* Parse a duration string into milliseconds.
|
|
14
|
+
* Parse a duration string into milliseconds. Examples:
|
|
15
15
|
* - short units: "1ms", "5s", "30m", "2h", "7d", "3w", "1y"
|
|
16
16
|
* - long units: "1 millisecond", "5 seconds", "30 minutes", "2 hours", "7 days", "3 weeks", "1 year"
|
|
17
17
|
* @param str - Duration string
|
package/dist/core/duration.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { err, ok } from "./result.js";
|
|
2
2
|
/**
|
|
3
|
-
* Parse a duration string into milliseconds.
|
|
3
|
+
* Parse a duration string into milliseconds. Examples:
|
|
4
4
|
* - short units: "1ms", "5s", "30m", "2h", "7d", "3w", "1y"
|
|
5
5
|
* - long units: "1 millisecond", "5 seconds", "30 minutes", "2 hours", "7 days", "3 weeks", "1 year"
|
|
6
6
|
* @param str - Duration string
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/core/duration.ts"],"sourcesContent":["import type { Result } from \"./result\";\nimport { err, ok } from \"./result\";\n\ntype Years = \"years\" | \"year\" | \"yrs\" | \"yr\" | \"y\";\ntype Months = \"months\" | \"month\" | \"mo\";\ntype Weeks = \"weeks\" | \"week\" | \"w\";\ntype Days = \"days\" | \"day\" | \"d\";\ntype Hours = \"hours\" | \"hour\" | \"hrs\" | \"hr\" | \"h\";\ntype Minutes = \"minutes\" | \"minute\" | \"mins\" | \"min\" | \"m\";\ntype Seconds = \"seconds\" | \"second\" | \"secs\" | \"sec\" | \"s\";\ntype Milliseconds = \"milliseconds\" | \"millisecond\" | \"msecs\" | \"msec\" | \"ms\";\ntype Unit = Years | Months | Weeks | Days | Hours | Minutes | Seconds | Milliseconds;\ntype UnitAnyCase = Capitalize<Unit> | Uppercase<Unit> | Lowercase<Unit>;\nexport type DurationString = `${number}` | `${number}${UnitAnyCase}` | `${number} ${UnitAnyCase}`;\n\n/**\n * Parse a duration string into milliseconds.
|
|
1
|
+
{"version":3,"sources":["../../src/core/duration.ts"],"sourcesContent":["import type { Result } from \"./result\";\nimport { err, ok } from \"./result\";\n\ntype Years = \"years\" | \"year\" | \"yrs\" | \"yr\" | \"y\";\ntype Months = \"months\" | \"month\" | \"mo\";\ntype Weeks = \"weeks\" | \"week\" | \"w\";\ntype Days = \"days\" | \"day\" | \"d\";\ntype Hours = \"hours\" | \"hour\" | \"hrs\" | \"hr\" | \"h\";\ntype Minutes = \"minutes\" | \"minute\" | \"mins\" | \"min\" | \"m\";\ntype Seconds = \"seconds\" | \"second\" | \"secs\" | \"sec\" | \"s\";\ntype Milliseconds = \"milliseconds\" | \"millisecond\" | \"msecs\" | \"msec\" | \"ms\";\ntype Unit = Years | Months | Weeks | Days | Hours | Minutes | Seconds | Milliseconds;\ntype UnitAnyCase = Capitalize<Unit> | Uppercase<Unit> | Lowercase<Unit>;\nexport type DurationString = `${number}` | `${number}${UnitAnyCase}` | `${number} ${UnitAnyCase}`;\n\n/**\n * Parse a duration string into milliseconds. Examples:\n * - short units: \"1ms\", \"5s\", \"30m\", \"2h\", \"7d\", \"3w\", \"1y\"\n * - long units: \"1 millisecond\", \"5 seconds\", \"30 minutes\", \"2 hours\", \"7 days\", \"3 weeks\", \"1 year\"\n * @param str - Duration string\n * @returns Milliseconds\n */\nexport function parseDuration(str: DurationString): Result<number> {\n if (typeof str !== \"string\") {\n return err(\n new TypeError(`Invalid duration format: expected a string but received ${typeof str}`),\n );\n }\n\n if (str.length === 0) {\n return err(new Error('Invalid duration format: \"\"'));\n }\n\n const match = /^(-?\\.?\\d+(?:\\.\\d+)?)\\s*([a-z]+)?$/i.exec(str);\n\n if (!match?.[1]) {\n return err(new Error(`Invalid duration format: \"${str}\"`));\n }\n\n const numValue = Number.parseFloat(match[1]);\n const unit = match[2]?.toLowerCase() ?? \"ms\"; // default to ms if not provided\n\n const multipliers: Record<string, number> = {\n millisecond: 1,\n milliseconds: 1,\n msec: 1,\n msecs: 1,\n ms: 1,\n second: 1000,\n seconds: 1000,\n sec: 1000,\n secs: 1000,\n s: 1000,\n minute: 60 * 1000,\n minutes: 60 * 1000,\n min: 60 * 1000,\n mins: 60 * 1000,\n m: 60 * 1000,\n hour: 60 * 60 * 1000,\n hours: 60 * 60 * 1000,\n hr: 60 * 60 * 1000,\n hrs: 60 * 60 * 1000,\n h: 60 * 60 * 1000,\n day: 24 * 60 * 60 * 1000,\n days: 24 * 60 * 60 * 1000,\n d: 24 * 60 * 60 * 1000,\n week: 7 * 24 * 60 * 60 * 1000,\n weeks: 7 * 24 * 60 * 60 * 1000,\n w: 7 * 24 * 60 * 60 * 1000,\n month: 2_629_800_000,\n months: 2_629_800_000,\n mo: 2_629_800_000,\n year: 31_557_600_000,\n years: 31_557_600_000,\n yr: 31_557_600_000,\n yrs: 31_557_600_000,\n y: 31_557_600_000,\n };\n\n const multiplier = multipliers[unit];\n if (multiplier === undefined) {\n return err(new Error(`Invalid duration format: \"${str}\"`));\n }\n\n return ok(numValue * multiplier);\n}\n"],"names":["err","ok","parseDuration","str","TypeError","length","Error","match","exec","numValue","Number","parseFloat","unit","toLowerCase","multipliers","millisecond","milliseconds","msec","msecs","ms","second","seconds","sec","secs","s","minute","minutes","min","mins","m","hour","hours","hr","hrs","h","day","days","d","week","weeks","w","month","months","mo","year","years","yr","yrs","y","multiplier","undefined"],"mappings":"AACA,SAASA,GAAG,EAAEC,EAAE,QAAQ,cAAW;AAcnC;;;;;;CAMC,GACD,OAAO,SAASC,cAAcC,GAAmB;IAC/C,IAAI,OAAOA,QAAQ,UAAU;QAC3B,OAAOH,IACL,IAAII,UAAU,CAAC,wDAAwD,EAAE,OAAOD,KAAK;IAEzF;IAEA,IAAIA,IAAIE,MAAM,KAAK,GAAG;QACpB,OAAOL,IAAI,IAAIM,MAAM;IACvB;IAEA,MAAMC,QAAQ,sCAAsCC,IAAI,CAACL;IAEzD,IAAI,CAACI,OAAO,CAAC,EAAE,EAAE;QACf,OAAOP,IAAI,IAAIM,MAAM,CAAC,0BAA0B,EAAEH,IAAI,CAAC,CAAC;IAC1D;IAEA,MAAMM,WAAWC,OAAOC,UAAU,CAACJ,KAAK,CAAC,EAAE;IAC3C,MAAMK,OAAOL,KAAK,CAAC,EAAE,EAAEM,iBAAiB,MAAM,gCAAgC;IAE9E,MAAMC,cAAsC;QAC1CC,aAAa;QACbC,cAAc;QACdC,MAAM;QACNC,OAAO;QACPC,IAAI;QACJC,QAAQ;QACRC,SAAS;QACTC,KAAK;QACLC,MAAM;QACNC,GAAG;QACHC,QAAQ,KAAK;QACbC,SAAS,KAAK;QACdC,KAAK,KAAK;QACVC,MAAM,KAAK;QACXC,GAAG,KAAK;QACRC,MAAM,KAAK,KAAK;QAChBC,OAAO,KAAK,KAAK;QACjBC,IAAI,KAAK,KAAK;QACdC,KAAK,KAAK,KAAK;QACfC,GAAG,KAAK,KAAK;QACbC,KAAK,KAAK,KAAK,KAAK;QACpBC,MAAM,KAAK,KAAK,KAAK;QACrBC,GAAG,KAAK,KAAK,KAAK;QAClBC,MAAM,IAAI,KAAK,KAAK,KAAK;QACzBC,OAAO,IAAI,KAAK,KAAK,KAAK;QAC1BC,GAAG,IAAI,KAAK,KAAK,KAAK;QACtBC,OAAO;QACPC,QAAQ;QACRC,IAAI;QACJC,MAAM;QACNC,OAAO;QACPC,IAAI;QACJC,KAAK;QACLC,GAAG;IACL;IAEA,MAAMC,aAAanC,WAAW,CAACF,KAAK;IACpC,IAAIqC,eAAeC,WAAW;QAC5B,OAAOlD,IAAI,IAAIM,MAAM,CAAC,0BAA0B,EAAEH,IAAI,CAAC,CAAC;IAC1D;IAEA,OAAOF,GAAGQ,WAAWwC;AACvB"}
|
package/dist/core/workflow.d.ts
CHANGED
|
@@ -72,7 +72,8 @@ export declare function validateInput<RunInput, Input>(schema: StandardSchemaV1<
|
|
|
72
72
|
/**
|
|
73
73
|
* Check if a workflow run status represents a terminal state.
|
|
74
74
|
* @param status - The workflow run status
|
|
75
|
-
* @returns True if the status is terminal (completed, failed, or canceled)
|
|
75
|
+
* @returns True if the status is terminal (succeeded, completed, failed, or canceled).
|
|
76
|
+
* Note: 'succeeded' is deprecated in favor of 'completed'.
|
|
76
77
|
*/
|
|
77
78
|
export declare function isTerminalStatus(status: string): status is "succeeded" | "completed" | "failed" | "canceled";
|
|
78
79
|
//# sourceMappingURL=workflow.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"workflow.d.ts","sourceRoot":"","sources":["../../src/core/workflow.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,QAAQ,CAAC;AACxC,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,UAAU,CAAC;AAEjD;;GAEG;AACH,MAAM,MAAM,iBAAiB,GACzB,SAAS,GACT,SAAS,GACT,UAAU,GACV,WAAW,GACX,WAAW,GACX,QAAQ,GACR,UAAU,CAAC;AAEf;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,iBAAiB,CAAC;IAC1B,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,MAAM,EAAE,SAAS,CAAC;IAClB,OAAO,EAAE,SAAS,GAAG,IAAI,CAAC;IAC1B,KAAK,EAAE,SAAS,GAAG,IAAI,CAAC;IACxB,MAAM,EAAE,SAAS,GAAG,IAAI,CAAC;IACzB,KAAK,EAAE,eAAe,GAAG,IAAI,CAAC;IAC9B,QAAQ,EAAE,MAAM,CAAC;IACjB,4BAA4B,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5C,mBAAmB,EAAE,MAAM,GAAG,IAAI,CAAC;IACnC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,WAAW,EAAE,IAAI,GAAG,IAAI,CAAC;IACzB,UAAU,EAAE,IAAI,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,IAAI,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;IAChB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,MAAM,WAAW,CAAC,OAAO,EAAE,QAAQ,IAAI,OAAO,SAAS,gBAAgB,GACzE,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,GACpC,QAAQ,CAAC;AAEb;;GAEG;AACH,MAAM,MAAM,YAAY,CAAC,OAAO,EAAE,QAAQ,IAAI,OAAO,SAAS,gBAAgB,GAC1E,gBAAgB,CAAC,WAAW,CAAC,OAAO,CAAC,GACrC,QAAQ,CAAC;AAEb;;;GAGG;AACH,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAAI;IAAE,OAAO,EAAE,IAAI,CAAC;IAAC,KAAK,EAAE,CAAC,CAAA;CAAE,GAAG;IAAE,OAAO,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAElG;;GAEG;AACH,eAAO,MAAM,8BAA8B;IACzC,kDAAkD;;IAElD,0CAA0C;;CAElC,CAAC;AAEX;;;;;;;;GAQG;AACH,wBAAsB,aAAa,CAAC,QAAQ,EAAE,KAAK,EACjD,MAAM,EAAE,gBAAgB,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,IAAI,GAAG,SAAS,EAC5D,KAAK,EAAE,QAAQ,GAAG,SAAS,GAC1B,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CA4BlC;AAED
|
|
1
|
+
{"version":3,"file":"workflow.d.ts","sourceRoot":"","sources":["../../src/core/workflow.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,QAAQ,CAAC;AACxC,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,UAAU,CAAC;AAEjD;;GAEG;AACH,MAAM,MAAM,iBAAiB,GACzB,SAAS,GACT,SAAS,GACT,UAAU,GACV,WAAW,GACX,WAAW,GACX,QAAQ,GACR,UAAU,CAAC;AAEf;;GAEG;AACH,MAAM,WAAW,WAAW;IAC1B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;IACX,YAAY,EAAE,MAAM,CAAC;IACrB,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,MAAM,EAAE,iBAAiB,CAAC;IAC1B,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,MAAM,EAAE,SAAS,CAAC;IAClB,OAAO,EAAE,SAAS,GAAG,IAAI,CAAC;IAC1B,KAAK,EAAE,SAAS,GAAG,IAAI,CAAC;IACxB,MAAM,EAAE,SAAS,GAAG,IAAI,CAAC;IACzB,KAAK,EAAE,eAAe,GAAG,IAAI,CAAC;IAC9B,QAAQ,EAAE,MAAM,CAAC;IACjB,4BAA4B,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5C,mBAAmB,EAAE,MAAM,GAAG,IAAI,CAAC;IACnC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,WAAW,EAAE,IAAI,GAAG,IAAI,CAAC;IACzB,UAAU,EAAE,IAAI,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,IAAI,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;IAChB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,MAAM,WAAW,CAAC,OAAO,EAAE,QAAQ,IAAI,OAAO,SAAS,gBAAgB,GACzE,gBAAgB,CAAC,UAAU,CAAC,OAAO,CAAC,GACpC,QAAQ,CAAC;AAEb;;GAEG;AACH,MAAM,MAAM,YAAY,CAAC,OAAO,EAAE,QAAQ,IAAI,OAAO,SAAS,gBAAgB,GAC1E,gBAAgB,CAAC,WAAW,CAAC,OAAO,CAAC,GACrC,QAAQ,CAAC;AAEb;;;GAGG;AACH,MAAM,MAAM,gBAAgB,CAAC,CAAC,IAAI;IAAE,OAAO,EAAE,IAAI,CAAC;IAAC,KAAK,EAAE,CAAC,CAAA;CAAE,GAAG;IAAE,OAAO,EAAE,KAAK,CAAC;IAAC,KAAK,EAAE,MAAM,CAAA;CAAE,CAAC;AAElG;;GAEG;AACH,eAAO,MAAM,8BAA8B;IACzC,kDAAkD;;IAElD,0CAA0C;;CAElC,CAAC;AAEX;;;;;;;;GAQG;AACH,wBAAsB,aAAa,CAAC,QAAQ,EAAE,KAAK,EACjD,MAAM,EAAE,gBAAgB,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,IAAI,GAAG,SAAS,EAC5D,KAAK,EAAE,QAAQ,GAAG,SAAS,GAC1B,OAAO,CAAC,gBAAgB,CAAC,KAAK,CAAC,CAAC,CA4BlC;AAED;;;;;GAKG;AACH,wBAAgB,gBAAgB,CAC9B,MAAM,EAAE,MAAM,GACb,MAAM,IAAI,WAAW,GAAG,WAAW,GAAG,QAAQ,GAAG,UAAU,CAI7D"}
|
package/dist/core/workflow.js
CHANGED
|
@@ -38,7 +38,8 @@
|
|
|
38
38
|
/**
|
|
39
39
|
* Check if a workflow run status represents a terminal state.
|
|
40
40
|
* @param status - The workflow run status
|
|
41
|
-
* @returns True if the status is terminal (completed, failed, or canceled)
|
|
41
|
+
* @returns True if the status is terminal (succeeded, completed, failed, or canceled).
|
|
42
|
+
* Note: 'succeeded' is deprecated in favor of 'completed'.
|
|
42
43
|
*/ export function isTerminalStatus(status) {
|
|
43
44
|
return status === "succeeded" || status === "completed" || status === "failed" || status === "canceled";
|
|
44
45
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/core/workflow.ts"],"sourcesContent":["import type { SerializedError } from \"./error\";\nimport type { JsonValue } from \"./json\";\nimport type { StandardSchemaV1 } from \"./schema\";\n\n/**\n * Status of a workflow run through its lifecycle.\n */\nexport type WorkflowRunStatus =\n | \"pending\"\n | \"running\"\n | \"sleeping\"\n | \"succeeded\" // deprecated in favor of 'completed'\n | \"completed\"\n | \"failed\"\n | \"canceled\";\n\n/**\n * WorkflowRun represents a single execution instance of a workflow.\n */\nexport interface WorkflowRun {\n namespaceId: string;\n id: string;\n workflowName: string;\n version: string | null;\n status: WorkflowRunStatus;\n idempotencyKey: string | null;\n config: JsonValue; // user-defined config\n context: JsonValue | null; // runtime execution metadata\n input: JsonValue | null;\n output: JsonValue | null;\n error: SerializedError | null;\n attempts: number;\n parentStepAttemptNamespaceId: string | null;\n parentStepAttemptId: string | null;\n workerId: string | null;\n availableAt: Date | null;\n deadlineAt: Date | null;\n startedAt: Date | null;\n finishedAt: Date | null;\n createdAt: Date;\n updatedAt: Date;\n}\n\n/**\n * Infers the input type from a Standard Schema.\n */\nexport type SchemaInput<TSchema, Fallback> = TSchema extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<TSchema>\n : Fallback;\n\n/**\n * Infers the output type from a Standard Schema.\n */\nexport type SchemaOutput<TSchema, Fallback> = TSchema extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<TSchema>\n : Fallback;\n\n/**\n * Result of input validation - either success with a value or failure with an\n * error message.\n */\nexport type ValidationResult<T> = { success: true; value: T } | { success: false; error: string };\n\n/**\n * Default configuration for result polling when awaiting workflow completion.\n */\nexport const DEFAULT_WORKFLOW_RESULT_CONFIG = {\n /** Polling interval in milliseconds (1 second) */\n pollIntervalMs: 1000,\n /** Timeout in milliseconds (5 minutes) */\n timeoutMs: 5 * 60 * 1000,\n} as const;\n\n/**\n * Validate input against a Standard Schema. Pure async function that validates\n * input and returns a ValidationResult.\n * @param schema - The Standard Schema to validate against (or null/undefined\n * for no validation)\n * @param input - The input value to validate\n * @returns A ValidationResult containing either the validated value or an error\n * message\n */\nexport async function validateInput<RunInput, Input>(\n schema: StandardSchemaV1<RunInput, Input> | null | undefined,\n input: RunInput | undefined,\n): Promise<ValidationResult<Input>> {\n // No schema means no validation - pass through as-is\n if (!schema) {\n return {\n success: true,\n value: input as unknown as Input,\n };\n }\n\n // Validate using Standard Schema v1 protocol https://standardschema.dev\n const result = schema[\"~standard\"].validate(input);\n const resolved = await Promise.resolve(result);\n\n if (resolved.issues) {\n const messages =\n resolved.issues.length > 0\n ? resolved.issues.map((issue) => issue.message).join(\"; \")\n : \"Validation failed\";\n return {\n success: false,\n error: messages,\n };\n }\n\n return {\n success: true,\n value: resolved.value,\n };\n}\n\n/**\n * Check if a workflow run status represents a terminal state.\n * @param status - The workflow run status\n * @returns True if the status is terminal (completed, failed, or canceled)
|
|
1
|
+
{"version":3,"sources":["../../src/core/workflow.ts"],"sourcesContent":["import type { SerializedError } from \"./error\";\nimport type { JsonValue } from \"./json\";\nimport type { StandardSchemaV1 } from \"./schema\";\n\n/**\n * Status of a workflow run through its lifecycle.\n */\nexport type WorkflowRunStatus =\n | \"pending\"\n | \"running\"\n | \"sleeping\"\n | \"succeeded\" // deprecated in favor of 'completed'\n | \"completed\"\n | \"failed\"\n | \"canceled\";\n\n/**\n * WorkflowRun represents a single execution instance of a workflow.\n */\nexport interface WorkflowRun {\n namespaceId: string;\n id: string;\n workflowName: string;\n version: string | null;\n status: WorkflowRunStatus;\n idempotencyKey: string | null;\n config: JsonValue; // user-defined config\n context: JsonValue | null; // runtime execution metadata\n input: JsonValue | null;\n output: JsonValue | null;\n error: SerializedError | null;\n attempts: number;\n parentStepAttemptNamespaceId: string | null;\n parentStepAttemptId: string | null;\n workerId: string | null;\n availableAt: Date | null;\n deadlineAt: Date | null;\n startedAt: Date | null;\n finishedAt: Date | null;\n createdAt: Date;\n updatedAt: Date;\n}\n\n/**\n * Infers the input type from a Standard Schema.\n */\nexport type SchemaInput<TSchema, Fallback> = TSchema extends StandardSchemaV1\n ? StandardSchemaV1.InferInput<TSchema>\n : Fallback;\n\n/**\n * Infers the output type from a Standard Schema.\n */\nexport type SchemaOutput<TSchema, Fallback> = TSchema extends StandardSchemaV1\n ? StandardSchemaV1.InferOutput<TSchema>\n : Fallback;\n\n/**\n * Result of input validation - either success with a value or failure with an\n * error message.\n */\nexport type ValidationResult<T> = { success: true; value: T } | { success: false; error: string };\n\n/**\n * Default configuration for result polling when awaiting workflow completion.\n */\nexport const DEFAULT_WORKFLOW_RESULT_CONFIG = {\n /** Polling interval in milliseconds (1 second) */\n pollIntervalMs: 1000,\n /** Timeout in milliseconds (5 minutes) */\n timeoutMs: 5 * 60 * 1000,\n} as const;\n\n/**\n * Validate input against a Standard Schema. Pure async function that validates\n * input and returns a ValidationResult.\n * @param schema - The Standard Schema to validate against (or null/undefined\n * for no validation)\n * @param input - The input value to validate\n * @returns A ValidationResult containing either the validated value or an error\n * message\n */\nexport async function validateInput<RunInput, Input>(\n schema: StandardSchemaV1<RunInput, Input> | null | undefined,\n input: RunInput | undefined,\n): Promise<ValidationResult<Input>> {\n // No schema means no validation - pass through as-is\n if (!schema) {\n return {\n success: true,\n value: input as unknown as Input,\n };\n }\n\n // Validate using Standard Schema v1 protocol https://standardschema.dev\n const result = schema[\"~standard\"].validate(input);\n const resolved = await Promise.resolve(result);\n\n if (resolved.issues) {\n const messages =\n resolved.issues.length > 0\n ? resolved.issues.map((issue) => issue.message).join(\"; \")\n : \"Validation failed\";\n return {\n success: false,\n error: messages,\n };\n }\n\n return {\n success: true,\n value: resolved.value,\n };\n}\n\n/**\n * Check if a workflow run status represents a terminal state.\n * @param status - The workflow run status\n * @returns True if the status is terminal (succeeded, completed, failed, or canceled).\n * Note: 'succeeded' is deprecated in favor of 'completed'.\n */\nexport function isTerminalStatus(\n status: string,\n): status is \"succeeded\" | \"completed\" | \"failed\" | \"canceled\" {\n return (\n status === \"succeeded\" || status === \"completed\" || status === \"failed\" || status === \"canceled\"\n );\n}\n"],"names":["DEFAULT_WORKFLOW_RESULT_CONFIG","pollIntervalMs","timeoutMs","validateInput","schema","input","success","value","result","validate","resolved","Promise","resolve","issues","messages","length","map","issue","message","join","error","isTerminalStatus","status"],"mappings":"AA+DA;;CAEC,GACD,OAAO,MAAMA,iCAAiC;IAC5C,gDAAgD,GAChDC,gBAAgB;IAChB,wCAAwC,GACxCC,WAAW,IAAI,KAAK;AACtB,EAAW;AAEX;;;;;;;;CAQC,GACD,OAAO,eAAeC,cACpBC,MAA4D,EAC5DC,KAA2B;IAE3B,qDAAqD;IACrD,IAAI,CAACD,QAAQ;QACX,OAAO;YACLE,SAAS;YACTC,OAAOF;QACT;IACF;IAEA,wEAAwE;IACxE,MAAMG,SAASJ,MAAM,CAAC,YAAY,CAACK,QAAQ,CAACJ;IAC5C,MAAMK,WAAW,MAAMC,QAAQC,OAAO,CAACJ;IAEvC,IAAIE,SAASG,MAAM,EAAE;QACnB,MAAMC,WACJJ,SAASG,MAAM,CAACE,MAAM,GAAG,IACrBL,SAASG,MAAM,CAACG,GAAG,CAAC,CAACC,QAAUA,MAAMC,OAAO,EAAEC,IAAI,CAAC,QACnD;QACN,OAAO;YACLb,SAAS;YACTc,OAAON;QACT;IACF;IAEA,OAAO;QACLR,SAAS;QACTC,OAAOG,SAASH,KAAK;IACvB;AACF;AAEA;;;;;CAKC,GACD,OAAO,SAASc,iBACdC,MAAc;IAEd,OACEA,WAAW,eAAeA,WAAW,eAAeA,WAAW,YAAYA,WAAW;AAE1F"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"backend.d.ts","sourceRoot":"","sources":["../../src/database/backend.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"backend.d.ts","sourceRoot":"","sources":["../../src/database/backend.ts"],"names":[],"mappings":"AAEA,OAAa,EAAE,KAAK,IAAI,EAAE,MAAM,MAAM,CAAC;AACvC,OAAO,EACL,KAAK,OAAO,EACZ,KAAK,uBAAuB,EAC5B,KAAK,sBAAsB,EAC3B,KAAK,yBAAyB,EAC9B,KAAK,yBAAyB,EAC9B,KAAK,uBAAuB,EAC5B,KAAK,uBAAuB,EAE5B,KAAK,4BAA4B,EACjC,KAAK,qBAAqB,EAC1B,KAAK,qBAAqB,EAC1B,KAAK,oBAAoB,EACzB,KAAK,oBAAoB,EACzB,KAAK,sBAAsB,EAC3B,KAAK,sBAAsB,EAC3B,KAAK,iBAAiB,EACtB,KAAK,sBAAsB,EAC5B,MAAM,YAAY,CAAC;AAEpB,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAChD,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAEpD,OAAO,EAAE,KAAK,YAAY,EAAkB,MAAM,UAAU,CAAC;AAE7D,eAAO,MAAM,sBAAsB,EAAG,WAAoB,CAAC;AAG3D,UAAU,sBAAsB;IAC9B,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,aAAa,CAAC,EAAE,OAAO,CAAC;IAGxB,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB;AAKD;;GAEG;AACH,qBAAa,eAAgB,YAAW,OAAO;IAC7C,OAAO,CAAC,MAAM,CAAc;IAC5B,OAAO,CAAC,WAAW,CAAS;IAC5B,OAAO,CAAC,SAAS,CAAU;IAC3B,OAAO,CAAC,MAAM,CAA+B;IAC7C,OAAO,CAAC,WAAW,CAAkB;IACrC,OAAO,CAAC,aAAa,CAAU;IAE/B,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,KAAK,IAAI,GAYf;gBAEW,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,sBAAsB;IAqC3D,UAAU;IAYV,SAAS,CAAC,QAAQ,EAAE,YAAY;IAgBhC,OAAO,CAAC,OAAO,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAgBxC,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAUrB,iBAAiB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CAAC,WAAW,CAAC;IAwCxE,cAAc,CAAC,MAAM,EAAE,oBAAoB,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC;IAuCzE,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAiC/F,OAAO,CAAC,0BAA0B;IAkB5B,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC;IAgE7E,sBAAsB,CAAC,MAAM,EAAE,4BAA4B,GAAG,OAAO,CAAC,WAAW,CAAC;IA+BlF,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,WAAW,CAAC;IAmCtE,mBAAmB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,WAAW,CAAC;IAqC5E,eAAe,CAAC,MAAM,EAAE,qBAAqB,GAAG,OAAO,CAAC,WAAW,CAAC;IAuDpE,iBAAiB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CAAC,WAAW,CAAC;IAuDxE,iBAAiB,CAAC,MAAM,EAAE,uBAAuB,GAAG,OAAO,CAAC,WAAW,CAAC;IAqCxE,cAAc,CAAC,MAAM,EAAE,oBAAoB,GAAG,OAAO,CAAC,WAAW,GAAG,IAAI,CAAC;IAiBzE,gBAAgB,CAAC,MAAM,EAAE,sBAAsB,GAAG,OAAO,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;IAmC/F,OAAO,CAAC,0BAA0B;IAmBlC,OAAO,CAAC,wBAAwB;IA0C1B,mBAAmB,CAAC,MAAM,EAAE,yBAAyB,GAAG,OAAO,CAAC,WAAW,CAAC;IAwC5E,eAAe,CAAC,MAAM,EAAE,qBAAqB,GAAG,OAAO,CAAC,WAAW,CAAC;CAwC3E;AAED;;;;;GAKG;AACH,UAAU,MAAM;IACd,SAAS,EAAE,IAAI,CAAC;IAChB,EAAE,EAAE,MAAM,CAAC;CACZ;AASD,wBAAgB,YAAY,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAOnD"}
|
package/dist/database/backend.js
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { getLogger } from "@logtape/logtape";
|
|
1
2
|
import { camelize } from "inflection";
|
|
2
3
|
import knex from "knex";
|
|
3
4
|
import { DEFAULT_NAMESPACE_ID } from "../backend.js";
|
|
@@ -6,6 +7,17 @@ import { DEFAULT_SCHEMA, migrate } from "./base.js";
|
|
|
6
7
|
import { PostgresPubSub } from "./pubsub.js";
|
|
7
8
|
export const DEFAULT_LISTEN_CHANNEL = "new_tasks";
|
|
8
9
|
const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
10
|
+
const logger = getLogger([
|
|
11
|
+
"sonamu",
|
|
12
|
+
"internal",
|
|
13
|
+
"tasks"
|
|
14
|
+
]);
|
|
15
|
+
const queryLogger = getLogger([
|
|
16
|
+
"sonamu",
|
|
17
|
+
"internal",
|
|
18
|
+
"tasks",
|
|
19
|
+
"query"
|
|
20
|
+
]);
|
|
9
21
|
/**
|
|
10
22
|
* Manages a connection to a Postgres database for workflow operations.
|
|
11
23
|
*/ export class BackendPostgres {
|
|
@@ -19,6 +31,12 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
19
31
|
get knex() {
|
|
20
32
|
if (!this._knex) {
|
|
21
33
|
this._knex = knex(this.config);
|
|
34
|
+
this._knex.on("query", (query)=>{
|
|
35
|
+
queryLogger.debug("SQL: {query}, Values: {bindings}", {
|
|
36
|
+
query: query.sql,
|
|
37
|
+
bindings: query.bindings
|
|
38
|
+
});
|
|
39
|
+
});
|
|
22
40
|
}
|
|
23
41
|
return this._knex;
|
|
24
42
|
}
|
|
@@ -94,6 +112,10 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
94
112
|
if (!this.initialized) {
|
|
95
113
|
throw new Error("Backend not initialized");
|
|
96
114
|
}
|
|
115
|
+
logger.info("Creating workflow run: {workflowName}:{version}", {
|
|
116
|
+
workflowName: params.workflowName,
|
|
117
|
+
version: params.version
|
|
118
|
+
});
|
|
97
119
|
const qb = this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").insert({
|
|
98
120
|
namespace_id: this.namespaceId,
|
|
99
121
|
id: crypto.randomUUID(),
|
|
@@ -112,6 +134,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
112
134
|
}).returning("*");
|
|
113
135
|
const workflowRun = await qb;
|
|
114
136
|
if (!workflowRun[0]) {
|
|
137
|
+
logger.error("Failed to create workflow run: {params}", {
|
|
138
|
+
params
|
|
139
|
+
});
|
|
115
140
|
throw new Error("Failed to create workflow run");
|
|
116
141
|
}
|
|
117
142
|
return workflowRun[0];
|
|
@@ -120,6 +145,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
120
145
|
if (!this.initialized) {
|
|
121
146
|
throw new Error("Backend not initialized");
|
|
122
147
|
}
|
|
148
|
+
logger.info("Getting workflow run: {workflowRunId}", {
|
|
149
|
+
workflowRunId: params.workflowRunId
|
|
150
|
+
});
|
|
123
151
|
const workflowRun = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).select("namespace_id", "id", "workflow_name", "version", "status", "idempotency_key", "config", "context", "input", "output", "error", "attempts", "parent_step_attempt_namespace_id", "parent_step_attempt_id", "worker_id", "available_at", "deadline_at", "started_at", "finished_at", "created_at", "updated_at").first();
|
|
124
152
|
return workflowRun ?? null;
|
|
125
153
|
}
|
|
@@ -127,6 +155,10 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
127
155
|
if (!this.initialized) {
|
|
128
156
|
throw new Error("Backend not initialized");
|
|
129
157
|
}
|
|
158
|
+
logger.info("Listing workflow runs: {after}, {before}", {
|
|
159
|
+
after: params.after,
|
|
160
|
+
before: params.before
|
|
161
|
+
});
|
|
130
162
|
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
131
163
|
const { after, before } = params;
|
|
132
164
|
let cursor = null;
|
|
@@ -155,6 +187,10 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
155
187
|
if (!this.initialized) {
|
|
156
188
|
throw new Error("Backend not initialized");
|
|
157
189
|
}
|
|
190
|
+
logger.info("Claiming workflow run: {workerId}, {leaseDurationMs}", {
|
|
191
|
+
workerId: params.workerId,
|
|
192
|
+
leaseDurationMs: params.leaseDurationMs
|
|
193
|
+
});
|
|
158
194
|
const claimed = await this.knex.with("expired", (qb)=>qb.withSchema(DEFAULT_SCHEMA).table("workflow_runs").update({
|
|
159
195
|
status: "failed",
|
|
160
196
|
error: JSON.stringify({
|
|
@@ -188,11 +224,19 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
188
224
|
if (!this.initialized) {
|
|
189
225
|
throw new Error("Backend not initialized");
|
|
190
226
|
}
|
|
227
|
+
logger.info("Extending workflow run lease: {workflowRunId}, {workerId}, {leaseDurationMs}", {
|
|
228
|
+
workflowRunId: params.workflowRunId,
|
|
229
|
+
workerId: params.workerId,
|
|
230
|
+
leaseDurationMs: params.leaseDurationMs
|
|
231
|
+
});
|
|
191
232
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
192
233
|
available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),
|
|
193
234
|
updated_at: this.knex.fn.now()
|
|
194
235
|
}).returning("*");
|
|
195
236
|
if (!updated) {
|
|
237
|
+
logger.error("Failed to extend lease for workflow run: {params}", {
|
|
238
|
+
params
|
|
239
|
+
});
|
|
196
240
|
throw new Error("Failed to extend lease for workflow run");
|
|
197
241
|
}
|
|
198
242
|
return updated;
|
|
@@ -201,6 +245,11 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
201
245
|
if (!this.initialized) {
|
|
202
246
|
throw new Error("Backend not initialized");
|
|
203
247
|
}
|
|
248
|
+
logger.info("Sleeping workflow run: {workflowRunId}, {workerId}, {availableAt}", {
|
|
249
|
+
workflowRunId: params.workflowRunId,
|
|
250
|
+
workerId: params.workerId,
|
|
251
|
+
availableAt: params.availableAt
|
|
252
|
+
});
|
|
204
253
|
// 'succeeded' status is deprecated
|
|
205
254
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).whereNotIn("status", [
|
|
206
255
|
"succeeded",
|
|
@@ -214,6 +263,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
214
263
|
updated_at: this.knex.fn.now()
|
|
215
264
|
}).returning("*");
|
|
216
265
|
if (!updated) {
|
|
266
|
+
logger.error("Failed to sleep workflow run: {params}", {
|
|
267
|
+
params
|
|
268
|
+
});
|
|
217
269
|
throw new Error("Failed to sleep workflow run");
|
|
218
270
|
}
|
|
219
271
|
return updated;
|
|
@@ -222,6 +274,11 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
222
274
|
if (!this.initialized) {
|
|
223
275
|
throw new Error("Backend not initialized");
|
|
224
276
|
}
|
|
277
|
+
logger.info("Completing workflow run: {workflowRunId}, {workerId}, {output}", {
|
|
278
|
+
workflowRunId: params.workflowRunId,
|
|
279
|
+
workerId: params.workerId,
|
|
280
|
+
output: params.output
|
|
281
|
+
});
|
|
225
282
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
226
283
|
status: "completed",
|
|
227
284
|
output: JSON.stringify(params.output),
|
|
@@ -232,6 +289,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
232
289
|
updated_at: this.knex.fn.now()
|
|
233
290
|
}).returning("*");
|
|
234
291
|
if (!updated) {
|
|
292
|
+
logger.error("Failed to complete workflow run: {params}", {
|
|
293
|
+
params
|
|
294
|
+
});
|
|
235
295
|
throw new Error("Failed to complete workflow run");
|
|
236
296
|
}
|
|
237
297
|
return updated;
|
|
@@ -250,6 +310,11 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
250
310
|
// 'available_at' timestamp for the next retry
|
|
251
311
|
const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
|
|
252
312
|
const deadlineExceededCondition = `"deadline_at" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= "deadline_at"`;
|
|
313
|
+
logger.info("Failing workflow run: {workflowRunId}, {workerId}, {error}", {
|
|
314
|
+
workflowRunId: params.workflowRunId,
|
|
315
|
+
workerId: params.workerId,
|
|
316
|
+
error: params.error
|
|
317
|
+
});
|
|
253
318
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", workflowRunId).where("status", "running").where("worker_id", params.workerId).update({
|
|
254
319
|
status: this.knex.raw(`CASE WHEN ${deadlineExceededCondition} THEN 'failed' ELSE 'pending' END`),
|
|
255
320
|
available_at: this.knex.raw(`CASE WHEN ${deadlineExceededCondition} THEN NULL ELSE NOW() + (${retryIntervalExpr}) END`),
|
|
@@ -260,6 +325,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
260
325
|
updated_at: this.knex.fn.now()
|
|
261
326
|
}).returning("*");
|
|
262
327
|
if (!updated) {
|
|
328
|
+
logger.error("Failed to mark workflow run failed: {params}", {
|
|
329
|
+
params
|
|
330
|
+
});
|
|
263
331
|
throw new Error("Failed to mark workflow run failed");
|
|
264
332
|
}
|
|
265
333
|
return updated;
|
|
@@ -268,6 +336,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
268
336
|
if (!this.initialized) {
|
|
269
337
|
throw new Error("Backend not initialized");
|
|
270
338
|
}
|
|
339
|
+
logger.info("Canceling workflow run: {workflowRunId}", {
|
|
340
|
+
workflowRunId: params.workflowRunId
|
|
341
|
+
});
|
|
271
342
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("workflow_runs").where("namespace_id", this.namespaceId).where("id", params.workflowRunId).whereIn("status", [
|
|
272
343
|
"pending",
|
|
273
344
|
"running",
|
|
@@ -298,8 +369,15 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
298
369
|
"completed",
|
|
299
370
|
"failed"
|
|
300
371
|
].includes(existing.status)) {
|
|
372
|
+
logger.error("Cannot cancel workflow run: {params} with status {status}", {
|
|
373
|
+
params,
|
|
374
|
+
status: existing.status
|
|
375
|
+
});
|
|
301
376
|
throw new Error(`Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`);
|
|
302
377
|
}
|
|
378
|
+
logger.error("Failed to cancel workflow run: {params}", {
|
|
379
|
+
params
|
|
380
|
+
});
|
|
303
381
|
throw new Error("Failed to cancel workflow run");
|
|
304
382
|
}
|
|
305
383
|
return updated;
|
|
@@ -308,6 +386,11 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
308
386
|
if (!this.initialized) {
|
|
309
387
|
throw new Error("Backend not initialized");
|
|
310
388
|
}
|
|
389
|
+
logger.info("Creating step attempt: {workflowRunId}, {stepName}, {kind}", {
|
|
390
|
+
workflowRunId: params.workflowRunId,
|
|
391
|
+
stepName: params.stepName,
|
|
392
|
+
kind: params.kind
|
|
393
|
+
});
|
|
311
394
|
const [stepAttempt] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").insert({
|
|
312
395
|
namespace_id: this.namespaceId,
|
|
313
396
|
id: crypto.randomUUID(),
|
|
@@ -322,6 +405,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
322
405
|
updated_at: this.knex.fn.now()
|
|
323
406
|
}).returning("*");
|
|
324
407
|
if (!stepAttempt) {
|
|
408
|
+
logger.error("Failed to create step attempt: {params}", {
|
|
409
|
+
params
|
|
410
|
+
});
|
|
325
411
|
throw new Error("Failed to create step attempt");
|
|
326
412
|
}
|
|
327
413
|
return stepAttempt;
|
|
@@ -330,6 +416,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
330
416
|
if (!this.initialized) {
|
|
331
417
|
throw new Error("Backend not initialized");
|
|
332
418
|
}
|
|
419
|
+
logger.info("Getting step attempt: {stepAttemptId}", {
|
|
420
|
+
stepAttemptId: params.stepAttemptId
|
|
421
|
+
});
|
|
333
422
|
const stepAttempt = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts").where("namespace_id", this.namespaceId).where("id", params.stepAttemptId).first();
|
|
334
423
|
return stepAttempt ?? null;
|
|
335
424
|
}
|
|
@@ -337,6 +426,11 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
337
426
|
if (!this.initialized) {
|
|
338
427
|
throw new Error("Backend not initialized");
|
|
339
428
|
}
|
|
429
|
+
logger.info("Listing step attempts: {workflowRunId}, {after}, {before}", {
|
|
430
|
+
workflowRunId: params.workflowRunId,
|
|
431
|
+
after: params.after,
|
|
432
|
+
before: params.before
|
|
433
|
+
});
|
|
340
434
|
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
341
435
|
const { after, before } = params;
|
|
342
436
|
let cursor = null;
|
|
@@ -393,10 +487,16 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
393
487
|
}
|
|
394
488
|
};
|
|
395
489
|
}
|
|
490
|
+
// NOTE: 실제 서비스에서 이게 안 되는 것 같은데, 쿼리 등을 체크할 필요가 있음.
|
|
396
491
|
async completeStepAttempt(params) {
|
|
397
492
|
if (!this.initialized) {
|
|
398
493
|
throw new Error("Backend not initialized");
|
|
399
494
|
}
|
|
495
|
+
logger.info("Marking step attempt as completed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
496
|
+
workflowRunId: params.workflowRunId,
|
|
497
|
+
stepAttemptId: params.stepAttemptId,
|
|
498
|
+
workerId: params.workerId
|
|
499
|
+
});
|
|
400
500
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts as sa").update({
|
|
401
501
|
status: "completed",
|
|
402
502
|
output: JSON.stringify(params.output),
|
|
@@ -405,6 +505,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
405
505
|
updated_at: this.knex.fn.now()
|
|
406
506
|
}).updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`).where("sa.namespace_id", this.namespaceId).where("sa.workflow_run_id", params.workflowRunId).where("sa.id", params.stepAttemptId).where("sa.status", "running").where("wr.namespace_id", this.knex.ref("sa.namespace_id")).where("wr.id", this.knex.ref("sa.workflow_run_id")).where("wr.status", "running").where("wr.worker_id", params.workerId).returning("sa.*");
|
|
407
507
|
if (!updated) {
|
|
508
|
+
logger.error("Failed to mark step attempt completed: {params}", {
|
|
509
|
+
params
|
|
510
|
+
});
|
|
408
511
|
throw new Error("Failed to mark step attempt completed");
|
|
409
512
|
}
|
|
410
513
|
return updated;
|
|
@@ -413,6 +516,14 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
413
516
|
if (!this.initialized) {
|
|
414
517
|
throw new Error("Backend not initialized");
|
|
415
518
|
}
|
|
519
|
+
logger.info("Marking step attempt as failed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
520
|
+
workflowRunId: params.workflowRunId,
|
|
521
|
+
stepAttemptId: params.stepAttemptId,
|
|
522
|
+
workerId: params.workerId
|
|
523
|
+
});
|
|
524
|
+
logger.info("Error: {error.message}", {
|
|
525
|
+
error: params.error.message
|
|
526
|
+
});
|
|
416
527
|
const [updated] = await this.knex.withSchema(DEFAULT_SCHEMA).table("step_attempts as sa").update({
|
|
417
528
|
status: "failed",
|
|
418
529
|
output: null,
|
|
@@ -421,6 +532,9 @@ const DEFAULT_PAGINATION_PAGE_SIZE = 100;
|
|
|
421
532
|
updated_at: this.knex.fn.now()
|
|
422
533
|
}).updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`).where("sa.namespace_id", this.namespaceId).where("sa.workflow_run_id", params.workflowRunId).where("sa.id", params.stepAttemptId).where("sa.status", "running").where("wr.namespace_id", this.knex.ref("sa.namespace_id")).where("wr.id", this.knex.ref("sa.workflow_run_id")).where("wr.status", "running").where("wr.worker_id", params.workerId).returning("sa.*");
|
|
423
534
|
if (!updated) {
|
|
535
|
+
logger.error("Failed to mark step attempt failed: {params}", {
|
|
536
|
+
params
|
|
537
|
+
});
|
|
424
538
|
throw new Error("Failed to mark step attempt failed");
|
|
425
539
|
}
|
|
426
540
|
return updated;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/database/backend.ts"],"sourcesContent":["import { camelize } from \"inflection\";\nimport knex, { type Knex } from \"knex\";\nimport {\n type Backend,\n type CancelWorkflowRunParams,\n type ClaimWorkflowRunParams,\n type CompleteStepAttemptParams,\n type CompleteWorkflowRunParams,\n type CreateStepAttemptParams,\n type CreateWorkflowRunParams,\n DEFAULT_NAMESPACE_ID,\n type ExtendWorkflowRunLeaseParams,\n type FailStepAttemptParams,\n type FailWorkflowRunParams,\n type GetStepAttemptParams,\n type GetWorkflowRunParams,\n type ListStepAttemptsParams,\n type ListWorkflowRunsParams,\n type PaginatedResponse,\n type SleepWorkflowRunParams,\n} from \"../backend\";\nimport { DEFAULT_RETRY_POLICY } from \"../core/retry\";\nimport type { StepAttempt } from \"../core/step\";\nimport type { WorkflowRun } from \"../core/workflow\";\nimport { DEFAULT_SCHEMA, migrate } from \"./base\";\nimport { type OnSubscribed, PostgresPubSub } from \"./pubsub\";\n\nexport const DEFAULT_LISTEN_CHANNEL = \"new_tasks\" as const;\nconst DEFAULT_PAGINATION_PAGE_SIZE = 100 as const;\n\ninterface BackendPostgresOptions {\n namespaceId?: string;\n runMigrations?: boolean;\n\n // default: true\n usePubSub?: boolean;\n}\n\n/**\n * Manages a connection to a Postgres database for workflow operations.\n */\nexport class BackendPostgres implements Backend {\n private config: Knex.Config;\n private namespaceId: string;\n private usePubSub: boolean;\n private pubsub: PostgresPubSub | null = null;\n private initialized: boolean = false;\n private runMigrations: boolean;\n\n private _knex: Knex | null = null;\n private get knex(): Knex {\n if (!this._knex) {\n this._knex = knex(this.config);\n }\n\n return this._knex;\n }\n\n constructor(config: Knex.Config, options?: BackendPostgresOptions) {\n this.config = {\n ...config,\n postProcessResponse: (result, _queryContext) => {\n if (result === null || result === undefined) {\n return result;\n }\n\n if (config?.postProcessResponse) {\n result = config.postProcessResponse(result, _queryContext);\n }\n\n const camelizeRow = (row: Record<string, unknown>) =>\n Object.fromEntries(\n Object.entries(row).map(([key, value]) => [camelize(key, true), value]),\n );\n\n if (Array.isArray(result)) {\n return result.map(camelizeRow);\n }\n\n return camelizeRow(result);\n },\n };\n\n const { namespaceId, usePubSub, runMigrations } = {\n namespaceId: DEFAULT_NAMESPACE_ID,\n usePubSub: true,\n runMigrations: true,\n ...options,\n };\n\n this.namespaceId = namespaceId;\n this.usePubSub = usePubSub;\n this.runMigrations = runMigrations;\n }\n\n async initialize() {\n if (this.initialized) {\n return;\n }\n\n if (this.runMigrations) {\n await migrate(this.config, DEFAULT_SCHEMA);\n }\n\n this.initialized = true;\n }\n\n async subscribe(callback: OnSubscribed) {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n if (!this.usePubSub) {\n return;\n }\n\n if (!this.pubsub) {\n this.pubsub = await PostgresPubSub.create(this.knex);\n }\n\n this.pubsub.listenEvent(DEFAULT_LISTEN_CHANNEL, callback);\n }\n\n async publish(payload?: string): Promise<void> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n if (!this.usePubSub) {\n return;\n }\n\n await this.knex.raw(\n payload\n ? `NOTIFY ${DEFAULT_LISTEN_CHANNEL}, '${payload}'`\n : `NOTIFY ${DEFAULT_LISTEN_CHANNEL}`,\n );\n }\n\n async stop(): Promise<void> {\n if (!this.initialized) {\n return;\n }\n\n await this.pubsub?.destroy();\n this.pubsub = null;\n await this.knex.destroy();\n }\n\n async createWorkflowRun(params: CreateWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .insert({\n namespace_id: this.namespaceId,\n id: crypto.randomUUID(),\n workflow_name: params.workflowName,\n version: params.version,\n status: \"pending\",\n idempotency_key: params.idempotencyKey,\n config: params.config,\n context: params.context,\n input: params.input,\n attempts: 0,\n available_at: params.availableAt ?? this.knex.fn.now(),\n deadline_at: params.deadlineAt,\n created_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n const workflowRun = await qb;\n if (!workflowRun[0]) {\n throw new Error(\"Failed to create workflow run\");\n }\n\n return workflowRun[0];\n }\n\n async getWorkflowRun(params: GetWorkflowRunParams): Promise<WorkflowRun | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const workflowRun = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .select(\n \"namespace_id\",\n \"id\",\n \"workflow_name\",\n \"version\",\n \"status\",\n \"idempotency_key\",\n \"config\",\n \"context\",\n \"input\",\n \"output\",\n \"error\",\n \"attempts\",\n \"parent_step_attempt_namespace_id\",\n \"parent_step_attempt_id\",\n \"worker_id\",\n \"available_at\",\n \"deadline_at\",\n \"started_at\",\n \"finished_at\",\n \"created_at\",\n \"updated_at\",\n )\n .first();\n\n return workflowRun ?? null;\n }\n\n async listWorkflowRuns(params: ListWorkflowRunsParams): Promise<PaginatedResponse<WorkflowRun>> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;\n const { after, before } = params;\n\n let cursor: Cursor | null = null;\n if (after) {\n cursor = decodeCursor(after);\n } else if (before) {\n cursor = decodeCursor(before);\n }\n\n const qb = this.buildListWorkflowRunsWhere(params, cursor);\n const rows = await qb\n .orderBy(\"created_at\", before ? \"desc\" : \"asc\")\n .orderBy(\"id\", before ? \"desc\" : \"asc\")\n .limit(limit + 1);\n\n return this.processPaginationResults(\n rows,\n limit,\n typeof after === \"string\",\n typeof before === \"string\",\n );\n }\n\n private buildListWorkflowRunsWhere(params: ListWorkflowRunsParams, cursor: Cursor | null) {\n const { after } = params;\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId);\n\n if (cursor) {\n const operator = after ? \">\" : \"<\";\n return qb.whereRaw(`(\"created_at\", \"id\") ${operator} (?, ?)`, [\n cursor.createdAt.toISOString(),\n cursor.id,\n ]);\n }\n\n return qb;\n }\n\n async claimWorkflowRun(params: ClaimWorkflowRunParams): Promise<WorkflowRun | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const claimed = await this.knex\n .with(\"expired\", (qb) =>\n qb\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .update({\n status: \"failed\",\n error: JSON.stringify({ message: \"Workflow run deadline exceeded\" }),\n worker_id: null,\n available_at: null,\n finished_at: this.knex.raw(\"NOW()\"),\n updated_at: this.knex.raw(\"NOW()\"),\n })\n .where(\"namespace_id\", this.namespaceId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .whereNotNull(\"deadline_at\")\n .where(\"deadline_at\", \"<=\", this.knex.raw(\"NOW()\"))\n .returning(\"id\"),\n )\n .with(\"candidate\", (qb) =>\n qb\n .withSchema(DEFAULT_SCHEMA)\n .select(\"id\")\n .from(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .where(\"available_at\", \"<=\", this.knex.raw(\"NOW()\"))\n .where((qb2) => {\n qb2.whereNull(\"deadline_at\").orWhere(\"deadline_at\", \">\", this.knex.raw(\"NOW()\"));\n })\n .orderByRaw(\"CASE WHEN status = 'pending' THEN 0 ELSE 1 END\")\n .orderBy(\"available_at\", \"asc\")\n .orderBy(\"created_at\", \"asc\")\n .limit(1)\n .forUpdate()\n .skipLocked(),\n )\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs as wr\")\n .where(\"wr.namespace_id\", this.namespaceId)\n .where(\"wr.id\", this.knex.ref(\"candidate.id\"))\n .update({\n status: \"running\",\n attempts: this.knex.raw(\"wr.attempts + 1\"),\n worker_id: params.workerId,\n available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),\n started_at: this.knex.raw(\"COALESCE(wr.started_at, NOW())\"),\n updated_at: this.knex.raw(\"NOW()\"),\n })\n .updateFrom(\"candidate\")\n .returning(\"wr.*\");\n\n return claimed[0] ?? null;\n }\n\n async extendWorkflowRunLease(params: ExtendWorkflowRunLeaseParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n throw new Error(\"Failed to extend lease for workflow run\");\n }\n\n return updated;\n }\n\n async sleepWorkflowRun(params: SleepWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n // 'succeeded' status is deprecated\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .whereNotIn(\"status\", [\"succeeded\", \"completed\", \"failed\", \"canceled\"])\n .where(\"worker_id\", params.workerId)\n .update({\n status: \"sleeping\",\n available_at: params.availableAt,\n worker_id: null,\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n throw new Error(\"Failed to sleep workflow run\");\n }\n\n return updated;\n }\n\n async completeWorkflowRun(params: CompleteWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n status: \"completed\",\n output: JSON.stringify(params.output),\n error: null,\n worker_id: params.workerId,\n available_at: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n throw new Error(\"Failed to complete workflow run\");\n }\n\n return updated;\n }\n\n async failWorkflowRun(params: FailWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const { workflowRunId, error } = params;\n const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;\n\n // this beefy query updates a workflow's status, available_at, and\n // finished_at based on the workflow's deadline and retry policy\n //\n // if the next retry would exceed the deadline, the run is marked as\n // 'failed' and finalized, otherwise, the run is rescheduled with an updated\n // 'available_at' timestamp for the next retry\n const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, \"attempts\" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;\n const deadlineExceededCondition = `\"deadline_at\" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= \"deadline_at\"`;\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n status: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN 'failed' ELSE 'pending' END`,\n ),\n available_at: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN NULL ELSE NOW() + (${retryIntervalExpr}) END`,\n ),\n finished_at: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN NOW() ELSE NULL END`,\n ),\n error: JSON.stringify(error),\n worker_id: null,\n started_at: null,\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n throw new Error(\"Failed to mark workflow run failed\");\n }\n\n return updated;\n }\n\n async cancelWorkflowRun(params: CancelWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .update({\n status: \"canceled\",\n worker_id: null,\n available_at: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n // workflow may already be in a terminal state\n const existing = await this.getWorkflowRun({\n workflowRunId: params.workflowRunId,\n });\n if (!existing) {\n throw new Error(`Workflow run ${params.workflowRunId} does not exist`);\n }\n\n // if already canceled, just return it\n if (existing.status === \"canceled\") {\n return existing;\n }\n\n // throw error for completed/failed workflows\n // 'succeeded' status is deprecated\n if ([\"succeeded\", \"completed\", \"failed\"].includes(existing.status)) {\n throw new Error(\n `Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`,\n );\n }\n\n throw new Error(\"Failed to cancel workflow run\");\n }\n\n return updated;\n }\n\n async createStepAttempt(params: CreateStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [stepAttempt] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .insert({\n namespace_id: this.namespaceId,\n id: crypto.randomUUID(),\n workflow_run_id: params.workflowRunId,\n step_name: params.stepName,\n kind: params.kind,\n status: \"running\",\n config: JSON.stringify(params.config),\n context: JSON.stringify(params.context),\n started_at: this.knex.fn.now(),\n created_at: this.knex.raw(\"date_trunc('milliseconds', NOW())\"),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!stepAttempt) {\n throw new Error(\"Failed to create step attempt\");\n }\n\n return stepAttempt;\n }\n\n async getStepAttempt(params: GetStepAttemptParams): Promise<StepAttempt | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const stepAttempt = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.stepAttemptId)\n .first();\n\n return stepAttempt ?? null;\n }\n\n async listStepAttempts(params: ListStepAttemptsParams): Promise<PaginatedResponse<StepAttempt>> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;\n const { after, before } = params;\n\n let cursor: Cursor | null = null;\n if (after) {\n cursor = decodeCursor(after);\n } else if (before) {\n cursor = decodeCursor(before);\n }\n\n const qb = this.buildListStepAttemptsWhere(params, cursor);\n const rows = await qb\n .orderBy(\"created_at\", before ? \"desc\" : \"asc\")\n .orderBy(\"id\", before ? \"desc\" : \"asc\")\n .limit(limit + 1);\n\n return this.processPaginationResults(\n rows,\n limit,\n typeof after === \"string\",\n typeof before === \"string\",\n );\n }\n\n private buildListStepAttemptsWhere(params: ListStepAttemptsParams, cursor: Cursor | null) {\n const { after } = params;\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"workflow_run_id\", params.workflowRunId);\n\n if (cursor) {\n const operator = after ? \">\" : \"<\";\n return qb.whereRaw(`(\"created_at\", \"id\") ${operator} (?, ?)`, [\n cursor.createdAt.toISOString(),\n cursor.id,\n ]);\n }\n\n return qb;\n }\n\n private processPaginationResults<T extends Cursor>(\n rows: T[],\n limit: number,\n hasAfter: boolean,\n hasBefore: boolean,\n ): PaginatedResponse<T> {\n const data = rows;\n let hasNext = false;\n let hasPrev = false;\n\n if (hasBefore) {\n data.reverse();\n if (data.length > limit) {\n hasPrev = true;\n data.shift();\n }\n hasNext = true;\n } else {\n if (data.length > limit) {\n hasNext = true;\n data.pop();\n }\n if (hasAfter) {\n hasPrev = true;\n }\n }\n\n const lastItem = data.at(-1);\n const nextCursor = hasNext && lastItem ? encodeCursor(lastItem) : null;\n const firstItem = data[0];\n const prevCursor = hasPrev && firstItem ? encodeCursor(firstItem) : null;\n\n return {\n data,\n pagination: {\n next: nextCursor,\n prev: prevCursor,\n },\n };\n }\n\n async completeStepAttempt(params: CompleteStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts as sa\")\n .update({\n status: \"completed\",\n output: JSON.stringify(params.output),\n error: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)\n .where(\"sa.namespace_id\", this.namespaceId)\n .where(\"sa.workflow_run_id\", params.workflowRunId)\n .where(\"sa.id\", params.stepAttemptId)\n .where(\"sa.status\", \"running\")\n .where(\"wr.namespace_id\", this.knex.ref(\"sa.namespace_id\"))\n .where(\"wr.id\", this.knex.ref(\"sa.workflow_run_id\"))\n .where(\"wr.status\", \"running\")\n .where(\"wr.worker_id\", params.workerId)\n .returning(\"sa.*\");\n\n if (!updated) {\n throw new Error(\"Failed to mark step attempt completed\");\n }\n\n return updated;\n }\n\n async failStepAttempt(params: FailStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts as sa\")\n .update({\n status: \"failed\",\n output: null,\n error: JSON.stringify(params.error),\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)\n .where(\"sa.namespace_id\", this.namespaceId)\n .where(\"sa.workflow_run_id\", params.workflowRunId)\n .where(\"sa.id\", params.stepAttemptId)\n .where(\"sa.status\", \"running\")\n .where(\"wr.namespace_id\", this.knex.ref(\"sa.namespace_id\"))\n .where(\"wr.id\", this.knex.ref(\"sa.workflow_run_id\"))\n .where(\"wr.status\", \"running\")\n .where(\"wr.worker_id\", params.workerId)\n .returning(\"sa.*\");\n\n if (!updated) {\n throw new Error(\"Failed to mark step attempt failed\");\n }\n\n return updated;\n }\n}\n\n/**\n * Cursor used for pagination. Requires created_at and id fields. Because JS\n * Date does not natively support microsecond precision dates, created_at should\n * be stored with millisecond precision in paginated tables to avoid issues with\n * cursor comparisons.\n */\ninterface Cursor {\n createdAt: Date;\n id: string;\n}\n\nfunction encodeCursor(item: Cursor): string {\n const encoded = Buffer.from(\n JSON.stringify({ createdAt: item.createdAt.toISOString(), id: item.id }),\n ).toString(\"base64\");\n return encoded;\n}\n\nexport function decodeCursor(cursor: string): Cursor {\n const decoded = Buffer.from(cursor, \"base64\").toString(\"utf8\");\n const parsed = JSON.parse(decoded) as { createdAt: string; id: string };\n return {\n createdAt: new Date(parsed.createdAt),\n id: parsed.id,\n };\n}\n"],"names":["camelize","knex","DEFAULT_NAMESPACE_ID","DEFAULT_RETRY_POLICY","DEFAULT_SCHEMA","migrate","PostgresPubSub","DEFAULT_LISTEN_CHANNEL","DEFAULT_PAGINATION_PAGE_SIZE","BackendPostgres","config","namespaceId","usePubSub","pubsub","initialized","runMigrations","_knex","options","postProcessResponse","result","_queryContext","undefined","camelizeRow","row","Object","fromEntries","entries","map","key","value","Array","isArray","initialize","subscribe","callback","Error","create","listenEvent","publish","payload","raw","stop","destroy","createWorkflowRun","params","qb","withSchema","table","insert","namespace_id","id","crypto","randomUUID","workflow_name","workflowName","version","status","idempotency_key","idempotencyKey","context","input","attempts","available_at","availableAt","fn","now","deadline_at","deadlineAt","created_at","updated_at","returning","workflowRun","getWorkflowRun","where","workflowRunId","select","first","listWorkflowRuns","limit","after","before","cursor","decodeCursor","buildListWorkflowRunsWhere","rows","orderBy","processPaginationResults","operator","whereRaw","createdAt","toISOString","claimWorkflowRun","claimed","with","update","error","JSON","stringify","message","worker_id","finished_at","whereIn","whereNotNull","from","qb2","whereNull","orWhere","orderByRaw","forUpdate","skipLocked","ref","workerId","leaseDurationMs","started_at","updateFrom","extendWorkflowRunLease","updated","sleepWorkflowRun","whereNotIn","completeWorkflowRun","output","failWorkflowRun","initialIntervalMs","backoffCoefficient","maximumIntervalMs","retryIntervalExpr","deadlineExceededCondition","cancelWorkflowRun","existing","includes","createStepAttempt","stepAttempt","workflow_run_id","step_name","stepName","kind","getStepAttempt","stepAttemptId","listStepAttempts","buildListStepAttemptsWhere","hasAfter","hasBefore","data","hasNext","hasPrev","reverse","length","shift","pop","lastItem","at","nextCursor","encodeCursor","firstItem","prevCursor","pagination","next","prev","completeStepAttempt","failStepAttempt","item","encoded","Buffer","toString","decoded","parsed","parse","Date"],"mappings":"AAAA,SAASA,QAAQ,QAAQ,aAAa;AACtC,OAAOC,UAAyB,OAAO;AACvC,SAQEC,oBAAoB,QAUf,gBAAa;AACpB,SAASC,oBAAoB,QAAQ,mBAAgB;AAGrD,SAASC,cAAc,EAAEC,OAAO,QAAQ,YAAS;AACjD,SAA4BC,cAAc,QAAQ,cAAW;AAE7D,OAAO,MAAMC,yBAAyB,YAAqB;AAC3D,MAAMC,+BAA+B;AAUrC;;CAEC,GACD,OAAO,MAAMC;IACHC,OAAoB;IACpBC,YAAoB;IACpBC,UAAmB;IACnBC,SAAgC,KAAK;IACrCC,cAAuB,MAAM;IAC7BC,cAAuB;IAEvBC,QAAqB,KAAK;IAClC,IAAYf,OAAa;QACvB,IAAI,CAAC,IAAI,CAACe,KAAK,EAAE;YACf,IAAI,CAACA,KAAK,GAAGf,KAAK,IAAI,CAACS,MAAM;QAC/B;QAEA,OAAO,IAAI,CAACM,KAAK;IACnB;IAEA,YAAYN,MAAmB,EAAEO,OAAgC,CAAE;QACjE,IAAI,CAACP,MAAM,GAAG;YACZ,GAAGA,MAAM;YACTQ,qBAAqB,CAACC,QAAQC;gBAC5B,IAAID,WAAW,QAAQA,WAAWE,WAAW;oBAC3C,OAAOF;gBACT;gBAEA,IAAIT,QAAQQ,qBAAqB;oBAC/BC,SAAST,OAAOQ,mBAAmB,CAACC,QAAQC;gBAC9C;gBAEA,MAAME,cAAc,CAACC,MACnBC,OAAOC,WAAW,CAChBD,OAAOE,OAAO,CAACH,KAAKI,GAAG,CAAC,CAAC,CAACC,KAAKC,MAAM,GAAK;4BAAC7B,SAAS4B,KAAK;4BAAOC;yBAAM;gBAG1E,IAAIC,MAAMC,OAAO,CAACZ,SAAS;oBACzB,OAAOA,OAAOQ,GAAG,CAACL;gBACpB;gBAEA,OAAOA,YAAYH;YACrB;QACF;QAEA,MAAM,EAAER,WAAW,EAAEC,SAAS,EAAEG,aAAa,EAAE,GAAG;YAChDJ,aAAaT;YACbU,WAAW;YACXG,eAAe;YACf,GAAGE,OAAO;QACZ;QAEA,IAAI,CAACN,WAAW,GAAGA;QACnB,IAAI,CAACC,SAAS,GAAGA;QACjB,IAAI,CAACG,aAAa,GAAGA;IACvB;IAEA,MAAMiB,aAAa;QACjB,IAAI,IAAI,CAAClB,WAAW,EAAE;YACpB;QACF;QAEA,IAAI,IAAI,CAACC,aAAa,EAAE;YACtB,MAAMV,QAAQ,IAAI,CAACK,MAAM,EAAEN;QAC7B;QAEA,IAAI,CAACU,WAAW,GAAG;IACrB;IAEA,MAAMmB,UAAUC,QAAsB,EAAE;QACtC,IAAI,CAAC,IAAI,CAACpB,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,IAAI,CAAC,IAAI,CAACvB,SAAS,EAAE;YACnB;QACF;QAEA,IAAI,CAAC,IAAI,CAACC,MAAM,EAAE;YAChB,IAAI,CAACA,MAAM,GAAG,MAAMP,eAAe8B,MAAM,CAAC,IAAI,CAACnC,IAAI;QACrD;QAEA,IAAI,CAACY,MAAM,CAACwB,WAAW,CAAC9B,wBAAwB2B;IAClD;IAEA,MAAMI,QAAQC,OAAgB,EAAiB;QAC7C,IAAI,CAAC,IAAI,CAACzB,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,IAAI,CAAC,IAAI,CAACvB,SAAS,EAAE;YACnB;QACF;QAEA,MAAM,IAAI,CAACX,IAAI,CAACuC,GAAG,CACjBD,UACI,CAAC,OAAO,EAAEhC,uBAAuB,GAAG,EAAEgC,QAAQ,CAAC,CAAC,GAChD,CAAC,OAAO,EAAEhC,wBAAwB;IAE1C;IAEA,MAAMkC,OAAsB;QAC1B,IAAI,CAAC,IAAI,CAAC3B,WAAW,EAAE;YACrB;QACF;QAEA,MAAM,IAAI,CAACD,MAAM,EAAE6B;QACnB,IAAI,CAAC7B,MAAM,GAAG;QACd,MAAM,IAAI,CAACZ,IAAI,CAACyC,OAAO;IACzB;IAEA,MAAMC,kBAAkBC,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAMU,KAAK,IAAI,CAAC5C,IAAI,CACjB6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACNC,MAAM,CAAC;YACNC,cAAc,IAAI,CAACtC,WAAW;YAC9BuC,IAAIC,OAAOC,UAAU;YACrBC,eAAeT,OAAOU,YAAY;YAClCC,SAASX,OAAOW,OAAO;YACvBC,QAAQ;YACRC,iBAAiBb,OAAOc,cAAc;YACtChD,QAAQkC,OAAOlC,MAAM;YACrBiD,SAASf,OAAOe,OAAO;YACvBC,OAAOhB,OAAOgB,KAAK;YACnBC,UAAU;YACVC,cAAclB,OAAOmB,WAAW,IAAI,IAAI,CAAC9D,IAAI,CAAC+D,EAAE,CAACC,GAAG;YACpDC,aAAatB,OAAOuB,UAAU;YAC9BC,YAAY,IAAI,CAACnE,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC5BI,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,MAAMC,cAAc,MAAM1B;QAC1B,IAAI,CAAC0B,WAAW,CAAC,EAAE,EAAE;YACnB,MAAM,IAAIpC,MAAM;QAClB;QAEA,OAAOoC,WAAW,CAAC,EAAE;IACvB;IAEA,MAAMC,eAAe5B,MAA4B,EAA+B;QAC9E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAMoC,cAAc,MAAM,IAAI,CAACtE,IAAI,CAChC6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO8B,aAAa,EAChCC,MAAM,CACL,gBACA,MACA,iBACA,WACA,UACA,mBACA,UACA,WACA,SACA,UACA,SACA,YACA,oCACA,0BACA,aACA,gBACA,eACA,cACA,eACA,cACA,cAEDC,KAAK;QAER,OAAOL,eAAe;IACxB;IAEA,MAAMM,iBAAiBjC,MAA8B,EAA2C;QAC9F,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM2C,QAAQlC,OAAOkC,KAAK,IAAItE;QAC9B,MAAM,EAAEuE,KAAK,EAAEC,MAAM,EAAE,GAAGpC;QAE1B,IAAIqC,SAAwB;QAC5B,IAAIF,OAAO;YACTE,SAASC,aAAaH;QACxB,OAAO,IAAIC,QAAQ;YACjBC,SAASC,aAAaF;QACxB;QAEA,MAAMnC,KAAK,IAAI,CAACsC,0BAA0B,CAACvC,QAAQqC;QACnD,MAAMG,OAAO,MAAMvC,GAChBwC,OAAO,CAAC,cAAcL,SAAS,SAAS,OACxCK,OAAO,CAAC,MAAML,SAAS,SAAS,OAChCF,KAAK,CAACA,QAAQ;QAEjB,OAAO,IAAI,CAACQ,wBAAwB,CAClCF,MACAN,OACA,OAAOC,UAAU,UACjB,OAAOC,WAAW;IAEtB;IAEQG,2BAA2BvC,MAA8B,EAAEqC,MAAqB,EAAE;QACxF,MAAM,EAAEF,KAAK,EAAE,GAAGnC;QAClB,MAAMC,KAAK,IAAI,CAAC5C,IAAI,CACjB6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW;QAEzC,IAAIsE,QAAQ;YACV,MAAMM,WAAWR,QAAQ,MAAM;YAC/B,OAAOlC,GAAG2C,QAAQ,CAAC,CAAC,qBAAqB,EAAED,SAAS,OAAO,CAAC,EAAE;gBAC5DN,OAAOQ,SAAS,CAACC,WAAW;gBAC5BT,OAAO/B,EAAE;aACV;QACH;QAEA,OAAOL;IACT;IAEA,MAAM8C,iBAAiB/C,MAA8B,EAA+B;QAClF,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAMyD,UAAU,MAAM,IAAI,CAAC3F,IAAI,CAC5B4F,IAAI,CAAC,WAAW,CAAChD,KAChBA,GACGC,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN+C,MAAM,CAAC;gBACNtC,QAAQ;gBACRuC,OAAOC,KAAKC,SAAS,CAAC;oBAAEC,SAAS;gBAAiC;gBAClEC,WAAW;gBACXrC,cAAc;gBACdsC,aAAa,IAAI,CAACnG,IAAI,CAACuC,GAAG,CAAC;gBAC3B6B,YAAY,IAAI,CAACpE,IAAI,CAACuC,GAAG,CAAC;YAC5B,GACCiC,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC0F,OAAO,CAAC,UAAU;gBAAC;gBAAW;gBAAW;aAAW,EACpDC,YAAY,CAAC,eACb7B,KAAK,CAAC,eAAe,MAAM,IAAI,CAACxE,IAAI,CAACuC,GAAG,CAAC,UACzC8B,SAAS,CAAC,OAEduB,IAAI,CAAC,aAAa,CAAChD,KAClBA,GACGC,UAAU,CAAC1C,gBACXuE,MAAM,CAAC,MACP4B,IAAI,CAAC,iBACL9B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC0F,OAAO,CAAC,UAAU;gBAAC;gBAAW;gBAAW;aAAW,EACpD5B,KAAK,CAAC,gBAAgB,MAAM,IAAI,CAACxE,IAAI,CAACuC,GAAG,CAAC,UAC1CiC,KAAK,CAAC,CAAC+B;gBACNA,IAAIC,SAAS,CAAC,eAAeC,OAAO,CAAC,eAAe,KAAK,IAAI,CAACzG,IAAI,CAACuC,GAAG,CAAC;YACzE,GACCmE,UAAU,CAAC,kDACXtB,OAAO,CAAC,gBAAgB,OACxBA,OAAO,CAAC,cAAc,OACtBP,KAAK,CAAC,GACN8B,SAAS,GACTC,UAAU,IAEd/D,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,uBACN0B,KAAK,CAAC,mBAAmB,IAAI,CAAC9D,WAAW,EACzC8D,KAAK,CAAC,SAAS,IAAI,CAACxE,IAAI,CAAC6G,GAAG,CAAC,iBAC7BhB,MAAM,CAAC;YACNtC,QAAQ;YACRK,UAAU,IAAI,CAAC5D,IAAI,CAACuC,GAAG,CAAC;YACxB2D,WAAWvD,OAAOmE,QAAQ;YAC1BjD,cAAc,IAAI,CAAC7D,IAAI,CAACuC,GAAG,CAAC,CAAC,QAAQ,EAAEI,OAAOoE,eAAe,CAAC,2BAA2B,CAAC;YAC1FC,YAAY,IAAI,CAAChH,IAAI,CAACuC,GAAG,CAAC;YAC1B6B,YAAY,IAAI,CAACpE,IAAI,CAACuC,GAAG,CAAC;QAC5B,GACC0E,UAAU,CAAC,aACX5C,SAAS,CAAC;QAEb,OAAOsB,OAAO,CAAC,EAAE,IAAI;IACvB;IAEA,MAAMuB,uBAAuBvE,MAAoC,EAAwB;QACvF,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO8B,aAAa,EAChCD,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAa7B,OAAOmE,QAAQ,EAClCjB,MAAM,CAAC;YACNhC,cAAc,IAAI,CAAC7D,IAAI,CAACuC,GAAG,CAAC,CAAC,QAAQ,EAAEI,OAAOoE,eAAe,CAAC,2BAA2B,CAAC;YAC1F3C,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAMC,iBAAiBzE,MAA8B,EAAwB;QAC3E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,mCAAmC;QACnC,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO8B,aAAa,EAChC4C,UAAU,CAAC,UAAU;YAAC;YAAa;YAAa;YAAU;SAAW,EACrE7C,KAAK,CAAC,aAAa7B,OAAOmE,QAAQ,EAClCjB,MAAM,CAAC;YACNtC,QAAQ;YACRM,cAAclB,OAAOmB,WAAW;YAChCoC,WAAW;YACX9B,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAMG,oBAAoB3E,MAAiC,EAAwB;QACjF,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO8B,aAAa,EAChCD,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAa7B,OAAOmE,QAAQ,EAClCjB,MAAM,CAAC;YACNtC,QAAQ;YACRgE,QAAQxB,KAAKC,SAAS,CAACrD,OAAO4E,MAAM;YACpCzB,OAAO;YACPI,WAAWvD,OAAOmE,QAAQ;YAC1BjD,cAAc;YACdsC,aAAa,IAAI,CAACnG,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAMK,gBAAgB7E,MAA6B,EAAwB;QACzE,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,EAAEuC,aAAa,EAAEqB,KAAK,EAAE,GAAGnD;QACjC,MAAM,EAAE8E,iBAAiB,EAAEC,kBAAkB,EAAEC,iBAAiB,EAAE,GAAGzH;QAErE,kEAAkE;QAClE,gEAAgE;QAChE,EAAE;QACF,oEAAoE;QACpE,4EAA4E;QAC5E,8CAA8C;QAC9C,MAAM0H,oBAAoB,CAAC,MAAM,EAAEH,kBAAkB,SAAS,EAAEC,mBAAmB,mBAAmB,EAAEC,kBAAkB,4BAA4B,CAAC;QACvJ,MAAME,4BAA4B,CAAC,uCAAuC,EAAED,kBAAkB,kBAAkB,CAAC;QAEjH,MAAM,CAACT,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAMC,eACZD,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAa7B,OAAOmE,QAAQ,EAClCjB,MAAM,CAAC;YACNtC,QAAQ,IAAI,CAACvD,IAAI,CAACuC,GAAG,CACnB,CAAC,UAAU,EAAEsF,0BAA0B,iCAAiC,CAAC;YAE3EhE,cAAc,IAAI,CAAC7D,IAAI,CAACuC,GAAG,CACzB,CAAC,UAAU,EAAEsF,0BAA0B,yBAAyB,EAAED,kBAAkB,KAAK,CAAC;YAE5FzB,aAAa,IAAI,CAACnG,IAAI,CAACuC,GAAG,CACxB,CAAC,UAAU,EAAEsF,0BAA0B,yBAAyB,CAAC;YAEnE/B,OAAOC,KAAKC,SAAS,CAACF;YACtBI,WAAW;YACXc,YAAY;YACZ5C,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAMW,kBAAkBnF,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO8B,aAAa,EAChC2B,OAAO,CAAC,UAAU;YAAC;YAAW;YAAW;SAAW,EACpDP,MAAM,CAAC;YACNtC,QAAQ;YACR2C,WAAW;YACXrC,cAAc;YACdsC,aAAa,IAAI,CAACnG,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,8CAA8C;YAC9C,MAAMY,WAAW,MAAM,IAAI,CAACxD,cAAc,CAAC;gBACzCE,eAAe9B,OAAO8B,aAAa;YACrC;YACA,IAAI,CAACsD,UAAU;gBACb,MAAM,IAAI7F,MAAM,CAAC,aAAa,EAAES,OAAO8B,aAAa,CAAC,eAAe,CAAC;YACvE;YAEA,sCAAsC;YACtC,IAAIsD,SAASxE,MAAM,KAAK,YAAY;gBAClC,OAAOwE;YACT;YAEA,6CAA6C;YAC7C,mCAAmC;YACnC,IAAI;gBAAC;gBAAa;gBAAa;aAAS,CAACC,QAAQ,CAACD,SAASxE,MAAM,GAAG;gBAClE,MAAM,IAAIrB,MACR,CAAC,2BAA2B,EAAES,OAAO8B,aAAa,CAAC,aAAa,EAAEsD,SAASxE,MAAM,EAAE;YAEvF;YAEA,MAAM,IAAIrB,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAMc,kBAAkBtF,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACgG,YAAY,GAAG,MAAM,IAAI,CAAClI,IAAI,CAClC6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACNC,MAAM,CAAC;YACNC,cAAc,IAAI,CAACtC,WAAW;YAC9BuC,IAAIC,OAAOC,UAAU;YACrBgF,iBAAiBxF,OAAO8B,aAAa;YACrC2D,WAAWzF,OAAO0F,QAAQ;YAC1BC,MAAM3F,OAAO2F,IAAI;YACjB/E,QAAQ;YACR9C,QAAQsF,KAAKC,SAAS,CAACrD,OAAOlC,MAAM;YACpCiD,SAASqC,KAAKC,SAAS,CAACrD,OAAOe,OAAO;YACtCsD,YAAY,IAAI,CAAChH,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC5BG,YAAY,IAAI,CAACnE,IAAI,CAACuC,GAAG,CAAC;YAC1B6B,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC6D,aAAa;YAChB,MAAM,IAAIhG,MAAM;QAClB;QAEA,OAAOgG;IACT;IAEA,MAAMK,eAAe5F,MAA4B,EAA+B;QAC9E,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAMgG,cAAc,MAAM,IAAI,CAAClI,IAAI,CAChC6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,MAAM7B,OAAO6F,aAAa,EAChC7D,KAAK;QAER,OAAOuD,eAAe;IACxB;IAEA,MAAMO,iBAAiB9F,MAA8B,EAA2C;QAC9F,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM2C,QAAQlC,OAAOkC,KAAK,IAAItE;QAC9B,MAAM,EAAEuE,KAAK,EAAEC,MAAM,EAAE,GAAGpC;QAE1B,IAAIqC,SAAwB;QAC5B,IAAIF,OAAO;YACTE,SAASC,aAAaH;QACxB,OAAO,IAAIC,QAAQ;YACjBC,SAASC,aAAaF;QACxB;QAEA,MAAMnC,KAAK,IAAI,CAAC8F,0BAA0B,CAAC/F,QAAQqC;QACnD,MAAMG,OAAO,MAAMvC,GAChBwC,OAAO,CAAC,cAAcL,SAAS,SAAS,OACxCK,OAAO,CAAC,MAAML,SAAS,SAAS,OAChCF,KAAK,CAACA,QAAQ;QAEjB,OAAO,IAAI,CAACQ,wBAAwB,CAClCF,MACAN,OACA,OAAOC,UAAU,UACjB,OAAOC,WAAW;IAEtB;IAEQ2D,2BAA2B/F,MAA8B,EAAEqC,MAAqB,EAAE;QACxF,MAAM,EAAEF,KAAK,EAAE,GAAGnC;QAClB,MAAMC,KAAK,IAAI,CAAC5C,IAAI,CACjB6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAAC9D,WAAW,EACtC8D,KAAK,CAAC,mBAAmB7B,OAAO8B,aAAa;QAEhD,IAAIO,QAAQ;YACV,MAAMM,WAAWR,QAAQ,MAAM;YAC/B,OAAOlC,GAAG2C,QAAQ,CAAC,CAAC,qBAAqB,EAAED,SAAS,OAAO,CAAC,EAAE;gBAC5DN,OAAOQ,SAAS,CAACC,WAAW;gBAC5BT,OAAO/B,EAAE;aACV;QACH;QAEA,OAAOL;IACT;IAEQyC,yBACNF,IAAS,EACTN,KAAa,EACb8D,QAAiB,EACjBC,SAAkB,EACI;QACtB,MAAMC,OAAO1D;QACb,IAAI2D,UAAU;QACd,IAAIC,UAAU;QAEd,IAAIH,WAAW;YACbC,KAAKG,OAAO;YACZ,IAAIH,KAAKI,MAAM,GAAGpE,OAAO;gBACvBkE,UAAU;gBACVF,KAAKK,KAAK;YACZ;YACAJ,UAAU;QACZ,OAAO;YACL,IAAID,KAAKI,MAAM,GAAGpE,OAAO;gBACvBiE,UAAU;gBACVD,KAAKM,GAAG;YACV;YACA,IAAIR,UAAU;gBACZI,UAAU;YACZ;QACF;QAEA,MAAMK,WAAWP,KAAKQ,EAAE,CAAC,CAAC;QAC1B,MAAMC,aAAaR,WAAWM,WAAWG,aAAaH,YAAY;QAClE,MAAMI,YAAYX,IAAI,CAAC,EAAE;QACzB,MAAMY,aAAaV,WAAWS,YAAYD,aAAaC,aAAa;QAEpE,OAAO;YACLX;YACAa,YAAY;gBACVC,MAAML;gBACNM,MAAMH;YACR;QACF;IACF;IAEA,MAAMI,oBAAoBlH,MAAiC,EAAwB;QACjF,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,uBACN+C,MAAM,CAAC;YACNtC,QAAQ;YACRgE,QAAQxB,KAAKC,SAAS,CAACrD,OAAO4E,MAAM;YACpCzB,OAAO;YACPK,aAAa,IAAI,CAACnG,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCiD,UAAU,CAAC,GAAG9G,eAAe,oBAAoB,CAAC,EAClDqE,KAAK,CAAC,mBAAmB,IAAI,CAAC9D,WAAW,EACzC8D,KAAK,CAAC,sBAAsB7B,OAAO8B,aAAa,EAChDD,KAAK,CAAC,SAAS7B,OAAO6F,aAAa,EACnChE,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,mBAAmB,IAAI,CAACxE,IAAI,CAAC6G,GAAG,CAAC,oBACvCrC,KAAK,CAAC,SAAS,IAAI,CAACxE,IAAI,CAAC6G,GAAG,CAAC,uBAC7BrC,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,gBAAgB7B,OAAOmE,QAAQ,EACrCzC,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;IAEA,MAAM2C,gBAAgBnH,MAA6B,EAAwB;QACzE,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAIqB,MAAM;QAClB;QAEA,MAAM,CAACiF,QAAQ,GAAG,MAAM,IAAI,CAACnH,IAAI,CAC9B6C,UAAU,CAAC1C,gBACX2C,KAAK,CAAC,uBACN+C,MAAM,CAAC;YACNtC,QAAQ;YACRgE,QAAQ;YACRzB,OAAOC,KAAKC,SAAS,CAACrD,OAAOmD,KAAK;YAClCK,aAAa,IAAI,CAACnG,IAAI,CAAC+D,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAACpE,IAAI,CAAC+D,EAAE,CAACC,GAAG;QAC9B,GACCiD,UAAU,CAAC,GAAG9G,eAAe,oBAAoB,CAAC,EAClDqE,KAAK,CAAC,mBAAmB,IAAI,CAAC9D,WAAW,EACzC8D,KAAK,CAAC,sBAAsB7B,OAAO8B,aAAa,EAChDD,KAAK,CAAC,SAAS7B,OAAO6F,aAAa,EACnChE,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,mBAAmB,IAAI,CAACxE,IAAI,CAAC6G,GAAG,CAAC,oBACvCrC,KAAK,CAAC,SAAS,IAAI,CAACxE,IAAI,CAAC6G,GAAG,CAAC,uBAC7BrC,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,gBAAgB7B,OAAOmE,QAAQ,EACrCzC,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,MAAM,IAAIjF,MAAM;QAClB;QAEA,OAAOiF;IACT;AACF;AAaA,SAASoC,aAAaQ,IAAY;IAChC,MAAMC,UAAUC,OAAO3D,IAAI,CACzBP,KAAKC,SAAS,CAAC;QAAER,WAAWuE,KAAKvE,SAAS,CAACC,WAAW;QAAIxC,IAAI8G,KAAK9G,EAAE;IAAC,IACtEiH,QAAQ,CAAC;IACX,OAAOF;AACT;AAEA,OAAO,SAAS/E,aAAaD,MAAc;IACzC,MAAMmF,UAAUF,OAAO3D,IAAI,CAACtB,QAAQ,UAAUkF,QAAQ,CAAC;IACvD,MAAME,SAASrE,KAAKsE,KAAK,CAACF;IAC1B,OAAO;QACL3E,WAAW,IAAI8E,KAAKF,OAAO5E,SAAS;QACpCvC,IAAImH,OAAOnH,EAAE;IACf;AACF"}
|
|
1
|
+
{"version":3,"sources":["../../src/database/backend.ts"],"sourcesContent":["import { getLogger } from \"@logtape/logtape\";\nimport { camelize } from \"inflection\";\nimport knex, { type Knex } from \"knex\";\nimport {\n type Backend,\n type CancelWorkflowRunParams,\n type ClaimWorkflowRunParams,\n type CompleteStepAttemptParams,\n type CompleteWorkflowRunParams,\n type CreateStepAttemptParams,\n type CreateWorkflowRunParams,\n DEFAULT_NAMESPACE_ID,\n type ExtendWorkflowRunLeaseParams,\n type FailStepAttemptParams,\n type FailWorkflowRunParams,\n type GetStepAttemptParams,\n type GetWorkflowRunParams,\n type ListStepAttemptsParams,\n type ListWorkflowRunsParams,\n type PaginatedResponse,\n type SleepWorkflowRunParams,\n} from \"../backend\";\nimport { DEFAULT_RETRY_POLICY } from \"../core/retry\";\nimport type { StepAttempt } from \"../core/step\";\nimport type { WorkflowRun } from \"../core/workflow\";\nimport { DEFAULT_SCHEMA, migrate } from \"./base\";\nimport { type OnSubscribed, PostgresPubSub } from \"./pubsub\";\n\nexport const DEFAULT_LISTEN_CHANNEL = \"new_tasks\" as const;\nconst DEFAULT_PAGINATION_PAGE_SIZE = 100 as const;\n\ninterface BackendPostgresOptions {\n namespaceId?: string;\n runMigrations?: boolean;\n\n // default: true\n usePubSub?: boolean;\n}\n\nconst logger = getLogger([\"sonamu\", \"internal\", \"tasks\"]);\nconst queryLogger = getLogger([\"sonamu\", \"internal\", \"tasks\", \"query\"]);\n\n/**\n * Manages a connection to a Postgres database for workflow operations.\n */\nexport class BackendPostgres implements Backend {\n private config: Knex.Config;\n private namespaceId: string;\n private usePubSub: boolean;\n private pubsub: PostgresPubSub | null = null;\n private initialized: boolean = false;\n private runMigrations: boolean;\n\n private _knex: Knex | null = null;\n private get knex(): Knex {\n if (!this._knex) {\n this._knex = knex(this.config);\n this._knex.on(\"query\", (query) => {\n queryLogger.debug(\"SQL: {query}, Values: {bindings}\", {\n query: query.sql,\n bindings: query.bindings,\n });\n });\n }\n\n return this._knex;\n }\n\n constructor(config: Knex.Config, options?: BackendPostgresOptions) {\n this.config = {\n ...config,\n postProcessResponse: (result, _queryContext) => {\n if (result === null || result === undefined) {\n return result;\n }\n\n if (config?.postProcessResponse) {\n result = config.postProcessResponse(result, _queryContext);\n }\n\n const camelizeRow = (row: Record<string, unknown>) =>\n Object.fromEntries(\n Object.entries(row).map(([key, value]) => [camelize(key, true), value]),\n );\n\n if (Array.isArray(result)) {\n return result.map(camelizeRow);\n }\n\n return camelizeRow(result);\n },\n };\n\n const { namespaceId, usePubSub, runMigrations } = {\n namespaceId: DEFAULT_NAMESPACE_ID,\n usePubSub: true,\n runMigrations: true,\n ...options,\n };\n\n this.namespaceId = namespaceId;\n this.usePubSub = usePubSub;\n this.runMigrations = runMigrations;\n }\n\n async initialize() {\n if (this.initialized) {\n return;\n }\n\n if (this.runMigrations) {\n await migrate(this.config, DEFAULT_SCHEMA);\n }\n\n this.initialized = true;\n }\n\n async subscribe(callback: OnSubscribed) {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n if (!this.usePubSub) {\n return;\n }\n\n if (!this.pubsub) {\n this.pubsub = await PostgresPubSub.create(this.knex);\n }\n\n this.pubsub.listenEvent(DEFAULT_LISTEN_CHANNEL, callback);\n }\n\n async publish(payload?: string): Promise<void> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n if (!this.usePubSub) {\n return;\n }\n\n await this.knex.raw(\n payload\n ? `NOTIFY ${DEFAULT_LISTEN_CHANNEL}, '${payload}'`\n : `NOTIFY ${DEFAULT_LISTEN_CHANNEL}`,\n );\n }\n\n async stop(): Promise<void> {\n if (!this.initialized) {\n return;\n }\n\n await this.pubsub?.destroy();\n this.pubsub = null;\n await this.knex.destroy();\n }\n\n async createWorkflowRun(params: CreateWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Creating workflow run: {workflowName}:{version}\", {\n workflowName: params.workflowName,\n version: params.version,\n });\n\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .insert({\n namespace_id: this.namespaceId,\n id: crypto.randomUUID(),\n workflow_name: params.workflowName,\n version: params.version,\n status: \"pending\",\n idempotency_key: params.idempotencyKey,\n config: params.config,\n context: params.context,\n input: params.input,\n attempts: 0,\n available_at: params.availableAt ?? this.knex.fn.now(),\n deadline_at: params.deadlineAt,\n created_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n const workflowRun = await qb;\n if (!workflowRun[0]) {\n logger.error(\"Failed to create workflow run: {params}\", { params });\n throw new Error(\"Failed to create workflow run\");\n }\n\n return workflowRun[0];\n }\n\n async getWorkflowRun(params: GetWorkflowRunParams): Promise<WorkflowRun | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Getting workflow run: {workflowRunId}\", { workflowRunId: params.workflowRunId });\n const workflowRun = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .select(\n \"namespace_id\",\n \"id\",\n \"workflow_name\",\n \"version\",\n \"status\",\n \"idempotency_key\",\n \"config\",\n \"context\",\n \"input\",\n \"output\",\n \"error\",\n \"attempts\",\n \"parent_step_attempt_namespace_id\",\n \"parent_step_attempt_id\",\n \"worker_id\",\n \"available_at\",\n \"deadline_at\",\n \"started_at\",\n \"finished_at\",\n \"created_at\",\n \"updated_at\",\n )\n .first();\n\n return workflowRun ?? null;\n }\n\n async listWorkflowRuns(params: ListWorkflowRunsParams): Promise<PaginatedResponse<WorkflowRun>> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Listing workflow runs: {after}, {before}\", {\n after: params.after,\n before: params.before,\n });\n const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;\n const { after, before } = params;\n\n let cursor: Cursor | null = null;\n if (after) {\n cursor = decodeCursor(after);\n } else if (before) {\n cursor = decodeCursor(before);\n }\n\n const qb = this.buildListWorkflowRunsWhere(params, cursor);\n const rows = await qb\n .orderBy(\"created_at\", before ? \"desc\" : \"asc\")\n .orderBy(\"id\", before ? \"desc\" : \"asc\")\n .limit(limit + 1);\n\n return this.processPaginationResults(\n rows,\n limit,\n typeof after === \"string\",\n typeof before === \"string\",\n );\n }\n\n private buildListWorkflowRunsWhere(params: ListWorkflowRunsParams, cursor: Cursor | null) {\n const { after } = params;\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId);\n\n if (cursor) {\n const operator = after ? \">\" : \"<\";\n return qb.whereRaw(`(\"created_at\", \"id\") ${operator} (?, ?)`, [\n cursor.createdAt.toISOString(),\n cursor.id,\n ]);\n }\n\n return qb;\n }\n\n async claimWorkflowRun(params: ClaimWorkflowRunParams): Promise<WorkflowRun | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Claiming workflow run: {workerId}, {leaseDurationMs}\", {\n workerId: params.workerId,\n leaseDurationMs: params.leaseDurationMs,\n });\n const claimed = await this.knex\n .with(\"expired\", (qb) =>\n qb\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .update({\n status: \"failed\",\n error: JSON.stringify({ message: \"Workflow run deadline exceeded\" }),\n worker_id: null,\n available_at: null,\n finished_at: this.knex.raw(\"NOW()\"),\n updated_at: this.knex.raw(\"NOW()\"),\n })\n .where(\"namespace_id\", this.namespaceId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .whereNotNull(\"deadline_at\")\n .where(\"deadline_at\", \"<=\", this.knex.raw(\"NOW()\"))\n .returning(\"id\"),\n )\n .with(\"candidate\", (qb) =>\n qb\n .withSchema(DEFAULT_SCHEMA)\n .select(\"id\")\n .from(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .where(\"available_at\", \"<=\", this.knex.raw(\"NOW()\"))\n .where((qb2) => {\n qb2.whereNull(\"deadline_at\").orWhere(\"deadline_at\", \">\", this.knex.raw(\"NOW()\"));\n })\n .orderByRaw(\"CASE WHEN status = 'pending' THEN 0 ELSE 1 END\")\n .orderBy(\"available_at\", \"asc\")\n .orderBy(\"created_at\", \"asc\")\n .limit(1)\n .forUpdate()\n .skipLocked(),\n )\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs as wr\")\n .where(\"wr.namespace_id\", this.namespaceId)\n .where(\"wr.id\", this.knex.ref(\"candidate.id\"))\n .update({\n status: \"running\",\n attempts: this.knex.raw(\"wr.attempts + 1\"),\n worker_id: params.workerId,\n available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),\n started_at: this.knex.raw(\"COALESCE(wr.started_at, NOW())\"),\n updated_at: this.knex.raw(\"NOW()\"),\n })\n .updateFrom(\"candidate\")\n .returning(\"wr.*\");\n\n return claimed[0] ?? null;\n }\n\n async extendWorkflowRunLease(params: ExtendWorkflowRunLeaseParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Extending workflow run lease: {workflowRunId}, {workerId}, {leaseDurationMs}\", {\n workflowRunId: params.workflowRunId,\n workerId: params.workerId,\n leaseDurationMs: params.leaseDurationMs,\n });\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n available_at: this.knex.raw(`NOW() + ${params.leaseDurationMs} * INTERVAL '1 millisecond'`),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n logger.error(\"Failed to extend lease for workflow run: {params}\", { params });\n throw new Error(\"Failed to extend lease for workflow run\");\n }\n\n return updated;\n }\n\n async sleepWorkflowRun(params: SleepWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Sleeping workflow run: {workflowRunId}, {workerId}, {availableAt}\", {\n workflowRunId: params.workflowRunId,\n workerId: params.workerId,\n availableAt: params.availableAt,\n });\n\n // 'succeeded' status is deprecated\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .whereNotIn(\"status\", [\"succeeded\", \"completed\", \"failed\", \"canceled\"])\n .where(\"worker_id\", params.workerId)\n .update({\n status: \"sleeping\",\n available_at: params.availableAt,\n worker_id: null,\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n logger.error(\"Failed to sleep workflow run: {params}\", { params });\n throw new Error(\"Failed to sleep workflow run\");\n }\n\n return updated;\n }\n\n async completeWorkflowRun(params: CompleteWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Completing workflow run: {workflowRunId}, {workerId}, {output}\", {\n workflowRunId: params.workflowRunId,\n workerId: params.workerId,\n output: params.output,\n });\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n status: \"completed\",\n output: JSON.stringify(params.output),\n error: null,\n worker_id: params.workerId,\n available_at: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n logger.error(\"Failed to complete workflow run: {params}\", { params });\n throw new Error(\"Failed to complete workflow run\");\n }\n\n return updated;\n }\n\n async failWorkflowRun(params: FailWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n const { workflowRunId, error } = params;\n const { initialIntervalMs, backoffCoefficient, maximumIntervalMs } = DEFAULT_RETRY_POLICY;\n\n // this beefy query updates a workflow's status, available_at, and\n // finished_at based on the workflow's deadline and retry policy\n //\n // if the next retry would exceed the deadline, the run is marked as\n // 'failed' and finalized, otherwise, the run is rescheduled with an updated\n // 'available_at' timestamp for the next retry\n const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, \"attempts\" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;\n const deadlineExceededCondition = `\"deadline_at\" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= \"deadline_at\"`;\n\n logger.info(\"Failing workflow run: {workflowRunId}, {workerId}, {error}\", {\n workflowRunId: params.workflowRunId,\n workerId: params.workerId,\n error: params.error,\n });\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", workflowRunId)\n .where(\"status\", \"running\")\n .where(\"worker_id\", params.workerId)\n .update({\n status: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN 'failed' ELSE 'pending' END`,\n ),\n available_at: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN NULL ELSE NOW() + (${retryIntervalExpr}) END`,\n ),\n finished_at: this.knex.raw(\n `CASE WHEN ${deadlineExceededCondition} THEN NOW() ELSE NULL END`,\n ),\n error: JSON.stringify(error),\n worker_id: null,\n started_at: null,\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n logger.error(\"Failed to mark workflow run failed: {params}\", { params });\n throw new Error(\"Failed to mark workflow run failed\");\n }\n\n return updated;\n }\n\n async cancelWorkflowRun(params: CancelWorkflowRunParams): Promise<WorkflowRun> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Canceling workflow run: {workflowRunId}\", { workflowRunId: params.workflowRunId });\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"workflow_runs\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.workflowRunId)\n .whereIn(\"status\", [\"pending\", \"running\", \"sleeping\"])\n .update({\n status: \"canceled\",\n worker_id: null,\n available_at: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!updated) {\n // workflow may already be in a terminal state\n const existing = await this.getWorkflowRun({\n workflowRunId: params.workflowRunId,\n });\n if (!existing) {\n throw new Error(`Workflow run ${params.workflowRunId} does not exist`);\n }\n\n // if already canceled, just return it\n if (existing.status === \"canceled\") {\n return existing;\n }\n\n // throw error for completed/failed workflows\n // 'succeeded' status is deprecated\n if ([\"succeeded\", \"completed\", \"failed\"].includes(existing.status)) {\n logger.error(\"Cannot cancel workflow run: {params} with status {status}\", {\n params,\n status: existing.status,\n });\n throw new Error(\n `Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`,\n );\n }\n\n logger.error(\"Failed to cancel workflow run: {params}\", { params });\n throw new Error(\"Failed to cancel workflow run\");\n }\n\n return updated;\n }\n\n async createStepAttempt(params: CreateStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Creating step attempt: {workflowRunId}, {stepName}, {kind}\", {\n workflowRunId: params.workflowRunId,\n stepName: params.stepName,\n kind: params.kind,\n });\n\n const [stepAttempt] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .insert({\n namespace_id: this.namespaceId,\n id: crypto.randomUUID(),\n workflow_run_id: params.workflowRunId,\n step_name: params.stepName,\n kind: params.kind,\n status: \"running\",\n config: JSON.stringify(params.config),\n context: JSON.stringify(params.context),\n started_at: this.knex.fn.now(),\n created_at: this.knex.raw(\"date_trunc('milliseconds', NOW())\"),\n updated_at: this.knex.fn.now(),\n })\n .returning(\"*\");\n\n if (!stepAttempt) {\n logger.error(\"Failed to create step attempt: {params}\", { params });\n throw new Error(\"Failed to create step attempt\");\n }\n\n return stepAttempt;\n }\n\n async getStepAttempt(params: GetStepAttemptParams): Promise<StepAttempt | null> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Getting step attempt: {stepAttemptId}\", { stepAttemptId: params.stepAttemptId });\n\n const stepAttempt = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"id\", params.stepAttemptId)\n .first();\n\n return stepAttempt ?? null;\n }\n\n async listStepAttempts(params: ListStepAttemptsParams): Promise<PaginatedResponse<StepAttempt>> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Listing step attempts: {workflowRunId}, {after}, {before}\", {\n workflowRunId: params.workflowRunId,\n after: params.after,\n before: params.before,\n });\n\n const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;\n const { after, before } = params;\n\n let cursor: Cursor | null = null;\n if (after) {\n cursor = decodeCursor(after);\n } else if (before) {\n cursor = decodeCursor(before);\n }\n\n const qb = this.buildListStepAttemptsWhere(params, cursor);\n const rows = await qb\n .orderBy(\"created_at\", before ? \"desc\" : \"asc\")\n .orderBy(\"id\", before ? \"desc\" : \"asc\")\n .limit(limit + 1);\n\n return this.processPaginationResults(\n rows,\n limit,\n typeof after === \"string\",\n typeof before === \"string\",\n );\n }\n\n private buildListStepAttemptsWhere(params: ListStepAttemptsParams, cursor: Cursor | null) {\n const { after } = params;\n const qb = this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts\")\n .where(\"namespace_id\", this.namespaceId)\n .where(\"workflow_run_id\", params.workflowRunId);\n\n if (cursor) {\n const operator = after ? \">\" : \"<\";\n return qb.whereRaw(`(\"created_at\", \"id\") ${operator} (?, ?)`, [\n cursor.createdAt.toISOString(),\n cursor.id,\n ]);\n }\n\n return qb;\n }\n\n private processPaginationResults<T extends Cursor>(\n rows: T[],\n limit: number,\n hasAfter: boolean,\n hasBefore: boolean,\n ): PaginatedResponse<T> {\n const data = rows;\n let hasNext = false;\n let hasPrev = false;\n\n if (hasBefore) {\n data.reverse();\n if (data.length > limit) {\n hasPrev = true;\n data.shift();\n }\n hasNext = true;\n } else {\n if (data.length > limit) {\n hasNext = true;\n data.pop();\n }\n if (hasAfter) {\n hasPrev = true;\n }\n }\n\n const lastItem = data.at(-1);\n const nextCursor = hasNext && lastItem ? encodeCursor(lastItem) : null;\n const firstItem = data[0];\n const prevCursor = hasPrev && firstItem ? encodeCursor(firstItem) : null;\n\n return {\n data,\n pagination: {\n next: nextCursor,\n prev: prevCursor,\n },\n };\n }\n\n // NOTE: 실제 서비스에서 이게 안 되는 것 같은데, 쿼리 등을 체크할 필요가 있음.\n async completeStepAttempt(params: CompleteStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Marking step attempt as completed: {workflowRunId}, {stepAttemptId}, {workerId}\", {\n workflowRunId: params.workflowRunId,\n stepAttemptId: params.stepAttemptId,\n workerId: params.workerId,\n });\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts as sa\")\n .update({\n status: \"completed\",\n output: JSON.stringify(params.output),\n error: null,\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)\n .where(\"sa.namespace_id\", this.namespaceId)\n .where(\"sa.workflow_run_id\", params.workflowRunId)\n .where(\"sa.id\", params.stepAttemptId)\n .where(\"sa.status\", \"running\")\n .where(\"wr.namespace_id\", this.knex.ref(\"sa.namespace_id\"))\n .where(\"wr.id\", this.knex.ref(\"sa.workflow_run_id\"))\n .where(\"wr.status\", \"running\")\n .where(\"wr.worker_id\", params.workerId)\n .returning(\"sa.*\");\n\n if (!updated) {\n logger.error(\"Failed to mark step attempt completed: {params}\", { params });\n throw new Error(\"Failed to mark step attempt completed\");\n }\n\n return updated;\n }\n\n async failStepAttempt(params: FailStepAttemptParams): Promise<StepAttempt> {\n if (!this.initialized) {\n throw new Error(\"Backend not initialized\");\n }\n\n logger.info(\"Marking step attempt as failed: {workflowRunId}, {stepAttemptId}, {workerId}\", {\n workflowRunId: params.workflowRunId,\n stepAttemptId: params.stepAttemptId,\n workerId: params.workerId,\n });\n logger.info(\"Error: {error.message}\", { error: params.error.message });\n\n const [updated] = await this.knex\n .withSchema(DEFAULT_SCHEMA)\n .table(\"step_attempts as sa\")\n .update({\n status: \"failed\",\n output: null,\n error: JSON.stringify(params.error),\n finished_at: this.knex.fn.now(),\n updated_at: this.knex.fn.now(),\n })\n .updateFrom(`${DEFAULT_SCHEMA}.workflow_runs as wr`)\n .where(\"sa.namespace_id\", this.namespaceId)\n .where(\"sa.workflow_run_id\", params.workflowRunId)\n .where(\"sa.id\", params.stepAttemptId)\n .where(\"sa.status\", \"running\")\n .where(\"wr.namespace_id\", this.knex.ref(\"sa.namespace_id\"))\n .where(\"wr.id\", this.knex.ref(\"sa.workflow_run_id\"))\n .where(\"wr.status\", \"running\")\n .where(\"wr.worker_id\", params.workerId)\n .returning(\"sa.*\");\n\n if (!updated) {\n logger.error(\"Failed to mark step attempt failed: {params}\", { params });\n throw new Error(\"Failed to mark step attempt failed\");\n }\n\n return updated;\n }\n}\n\n/**\n * Cursor used for pagination. Requires created_at and id fields. Because JS\n * Date does not natively support microsecond precision dates, created_at should\n * be stored with millisecond precision in paginated tables to avoid issues with\n * cursor comparisons.\n */\ninterface Cursor {\n createdAt: Date;\n id: string;\n}\n\nfunction encodeCursor(item: Cursor): string {\n const encoded = Buffer.from(\n JSON.stringify({ createdAt: item.createdAt.toISOString(), id: item.id }),\n ).toString(\"base64\");\n return encoded;\n}\n\nexport function decodeCursor(cursor: string): Cursor {\n const decoded = Buffer.from(cursor, \"base64\").toString(\"utf8\");\n const parsed = JSON.parse(decoded) as { createdAt: string; id: string };\n return {\n createdAt: new Date(parsed.createdAt),\n id: parsed.id,\n };\n}\n"],"names":["getLogger","camelize","knex","DEFAULT_NAMESPACE_ID","DEFAULT_RETRY_POLICY","DEFAULT_SCHEMA","migrate","PostgresPubSub","DEFAULT_LISTEN_CHANNEL","DEFAULT_PAGINATION_PAGE_SIZE","logger","queryLogger","BackendPostgres","config","namespaceId","usePubSub","pubsub","initialized","runMigrations","_knex","on","query","debug","sql","bindings","options","postProcessResponse","result","_queryContext","undefined","camelizeRow","row","Object","fromEntries","entries","map","key","value","Array","isArray","initialize","subscribe","callback","Error","create","listenEvent","publish","payload","raw","stop","destroy","createWorkflowRun","params","info","workflowName","version","qb","withSchema","table","insert","namespace_id","id","crypto","randomUUID","workflow_name","status","idempotency_key","idempotencyKey","context","input","attempts","available_at","availableAt","fn","now","deadline_at","deadlineAt","created_at","updated_at","returning","workflowRun","error","getWorkflowRun","workflowRunId","where","select","first","listWorkflowRuns","after","before","limit","cursor","decodeCursor","buildListWorkflowRunsWhere","rows","orderBy","processPaginationResults","operator","whereRaw","createdAt","toISOString","claimWorkflowRun","workerId","leaseDurationMs","claimed","with","update","JSON","stringify","message","worker_id","finished_at","whereIn","whereNotNull","from","qb2","whereNull","orWhere","orderByRaw","forUpdate","skipLocked","ref","started_at","updateFrom","extendWorkflowRunLease","updated","sleepWorkflowRun","whereNotIn","completeWorkflowRun","output","failWorkflowRun","initialIntervalMs","backoffCoefficient","maximumIntervalMs","retryIntervalExpr","deadlineExceededCondition","cancelWorkflowRun","existing","includes","createStepAttempt","stepName","kind","stepAttempt","workflow_run_id","step_name","getStepAttempt","stepAttemptId","listStepAttempts","buildListStepAttemptsWhere","hasAfter","hasBefore","data","hasNext","hasPrev","reverse","length","shift","pop","lastItem","at","nextCursor","encodeCursor","firstItem","prevCursor","pagination","next","prev","completeStepAttempt","failStepAttempt","item","encoded","Buffer","toString","decoded","parsed","parse","Date"],"mappings":"AAAA,SAASA,SAAS,QAAQ,mBAAmB;AAC7C,SAASC,QAAQ,QAAQ,aAAa;AACtC,OAAOC,UAAyB,OAAO;AACvC,SAQEC,oBAAoB,QAUf,gBAAa;AACpB,SAASC,oBAAoB,QAAQ,mBAAgB;AAGrD,SAASC,cAAc,EAAEC,OAAO,QAAQ,YAAS;AACjD,SAA4BC,cAAc,QAAQ,cAAW;AAE7D,OAAO,MAAMC,yBAAyB,YAAqB;AAC3D,MAAMC,+BAA+B;AAUrC,MAAMC,SAASV,UAAU;IAAC;IAAU;IAAY;CAAQ;AACxD,MAAMW,cAAcX,UAAU;IAAC;IAAU;IAAY;IAAS;CAAQ;AAEtE;;CAEC,GACD,OAAO,MAAMY;IACHC,OAAoB;IACpBC,YAAoB;IACpBC,UAAmB;IACnBC,SAAgC,KAAK;IACrCC,cAAuB,MAAM;IAC7BC,cAAuB;IAEvBC,QAAqB,KAAK;IAClC,IAAYjB,OAAa;QACvB,IAAI,CAAC,IAAI,CAACiB,KAAK,EAAE;YACf,IAAI,CAACA,KAAK,GAAGjB,KAAK,IAAI,CAACW,MAAM;YAC7B,IAAI,CAACM,KAAK,CAACC,EAAE,CAAC,SAAS,CAACC;gBACtBV,YAAYW,KAAK,CAAC,oCAAoC;oBACpDD,OAAOA,MAAME,GAAG;oBAChBC,UAAUH,MAAMG,QAAQ;gBAC1B;YACF;QACF;QAEA,OAAO,IAAI,CAACL,KAAK;IACnB;IAEA,YAAYN,MAAmB,EAAEY,OAAgC,CAAE;QACjE,IAAI,CAACZ,MAAM,GAAG;YACZ,GAAGA,MAAM;YACTa,qBAAqB,CAACC,QAAQC;gBAC5B,IAAID,WAAW,QAAQA,WAAWE,WAAW;oBAC3C,OAAOF;gBACT;gBAEA,IAAId,QAAQa,qBAAqB;oBAC/BC,SAASd,OAAOa,mBAAmB,CAACC,QAAQC;gBAC9C;gBAEA,MAAME,cAAc,CAACC,MACnBC,OAAOC,WAAW,CAChBD,OAAOE,OAAO,CAACH,KAAKI,GAAG,CAAC,CAAC,CAACC,KAAKC,MAAM,GAAK;4BAACpC,SAASmC,KAAK;4BAAOC;yBAAM;gBAG1E,IAAIC,MAAMC,OAAO,CAACZ,SAAS;oBACzB,OAAOA,OAAOQ,GAAG,CAACL;gBACpB;gBAEA,OAAOA,YAAYH;YACrB;QACF;QAEA,MAAM,EAAEb,WAAW,EAAEC,SAAS,EAAEG,aAAa,EAAE,GAAG;YAChDJ,aAAaX;YACbY,WAAW;YACXG,eAAe;YACf,GAAGO,OAAO;QACZ;QAEA,IAAI,CAACX,WAAW,GAAGA;QACnB,IAAI,CAACC,SAAS,GAAGA;QACjB,IAAI,CAACG,aAAa,GAAGA;IACvB;IAEA,MAAMsB,aAAa;QACjB,IAAI,IAAI,CAACvB,WAAW,EAAE;YACpB;QACF;QAEA,IAAI,IAAI,CAACC,aAAa,EAAE;YACtB,MAAMZ,QAAQ,IAAI,CAACO,MAAM,EAAER;QAC7B;QAEA,IAAI,CAACY,WAAW,GAAG;IACrB;IAEA,MAAMwB,UAAUC,QAAsB,EAAE;QACtC,IAAI,CAAC,IAAI,CAACzB,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEA,IAAI,CAAC,IAAI,CAAC5B,SAAS,EAAE;YACnB;QACF;QAEA,IAAI,CAAC,IAAI,CAACC,MAAM,EAAE;YAChB,IAAI,CAACA,MAAM,GAAG,MAAMT,eAAeqC,MAAM,CAAC,IAAI,CAAC1C,IAAI;QACrD;QAEA,IAAI,CAACc,MAAM,CAAC6B,WAAW,CAACrC,wBAAwBkC;IAClD;IAEA,MAAMI,QAAQC,OAAgB,EAAiB;QAC7C,IAAI,CAAC,IAAI,CAAC9B,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEA,IAAI,CAAC,IAAI,CAAC5B,SAAS,EAAE;YACnB;QACF;QAEA,MAAM,IAAI,CAACb,IAAI,CAAC8C,GAAG,CACjBD,UACI,CAAC,OAAO,EAAEvC,uBAAuB,GAAG,EAAEuC,QAAQ,CAAC,CAAC,GAChD,CAAC,OAAO,EAAEvC,wBAAwB;IAE1C;IAEA,MAAMyC,OAAsB;QAC1B,IAAI,CAAC,IAAI,CAAChC,WAAW,EAAE;YACrB;QACF;QAEA,MAAM,IAAI,CAACD,MAAM,EAAEkC;QACnB,IAAI,CAAClC,MAAM,GAAG;QACd,MAAM,IAAI,CAACd,IAAI,CAACgD,OAAO;IACzB;IAEA,MAAMC,kBAAkBC,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,mDAAmD;YAC7DC,cAAcF,OAAOE,YAAY;YACjCC,SAASH,OAAOG,OAAO;QACzB;QAEA,MAAMC,KAAK,IAAI,CAACtD,IAAI,CACjBuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACNC,MAAM,CAAC;YACNC,cAAc,IAAI,CAAC9C,WAAW;YAC9B+C,IAAIC,OAAOC,UAAU;YACrBC,eAAeZ,OAAOE,YAAY;YAClCC,SAASH,OAAOG,OAAO;YACvBU,QAAQ;YACRC,iBAAiBd,OAAOe,cAAc;YACtCtD,QAAQuC,OAAOvC,MAAM;YACrBuD,SAAShB,OAAOgB,OAAO;YACvBC,OAAOjB,OAAOiB,KAAK;YACnBC,UAAU;YACVC,cAAcnB,OAAOoB,WAAW,IAAI,IAAI,CAACtE,IAAI,CAACuE,EAAE,CAACC,GAAG;YACpDC,aAAavB,OAAOwB,UAAU;YAC9BC,YAAY,IAAI,CAAC3E,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC5BI,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,MAAMC,cAAc,MAAMxB;QAC1B,IAAI,CAACwB,WAAW,CAAC,EAAE,EAAE;YACnBtE,OAAOuE,KAAK,CAAC,2CAA2C;gBAAE7B;YAAO;YACjE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOqC,WAAW,CAAC,EAAE;IACvB;IAEA,MAAME,eAAe9B,MAA4B,EAA+B;QAC9E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,yCAAyC;YAAE8B,eAAe/B,OAAO+B,aAAa;QAAC;QAC3F,MAAMH,cAAc,MAAM,IAAI,CAAC9E,IAAI,CAChCuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO+B,aAAa,EAChCE,MAAM,CACL,gBACA,MACA,iBACA,WACA,UACA,mBACA,UACA,WACA,SACA,UACA,SACA,YACA,oCACA,0BACA,aACA,gBACA,eACA,cACA,eACA,cACA,cAEDC,KAAK;QAER,OAAON,eAAe;IACxB;IAEA,MAAMO,iBAAiBnC,MAA8B,EAA2C;QAC9F,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,4CAA4C;YACtDmC,OAAOpC,OAAOoC,KAAK;YACnBC,QAAQrC,OAAOqC,MAAM;QACvB;QACA,MAAMC,QAAQtC,OAAOsC,KAAK,IAAIjF;QAC9B,MAAM,EAAE+E,KAAK,EAAEC,MAAM,EAAE,GAAGrC;QAE1B,IAAIuC,SAAwB;QAC5B,IAAIH,OAAO;YACTG,SAASC,aAAaJ;QACxB,OAAO,IAAIC,QAAQ;YACjBE,SAASC,aAAaH;QACxB;QAEA,MAAMjC,KAAK,IAAI,CAACqC,0BAA0B,CAACzC,QAAQuC;QACnD,MAAMG,OAAO,MAAMtC,GAChBuC,OAAO,CAAC,cAAcN,SAAS,SAAS,OACxCM,OAAO,CAAC,MAAMN,SAAS,SAAS,OAChCC,KAAK,CAACA,QAAQ;QAEjB,OAAO,IAAI,CAACM,wBAAwB,CAClCF,MACAJ,OACA,OAAOF,UAAU,UACjB,OAAOC,WAAW;IAEtB;IAEQI,2BAA2BzC,MAA8B,EAAEuC,MAAqB,EAAE;QACxF,MAAM,EAAEH,KAAK,EAAE,GAAGpC;QAClB,MAAMI,KAAK,IAAI,CAACtD,IAAI,CACjBuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW;QAEzC,IAAI6E,QAAQ;YACV,MAAMM,WAAWT,QAAQ,MAAM;YAC/B,OAAOhC,GAAG0C,QAAQ,CAAC,CAAC,qBAAqB,EAAED,SAAS,OAAO,CAAC,EAAE;gBAC5DN,OAAOQ,SAAS,CAACC,WAAW;gBAC5BT,OAAO9B,EAAE;aACV;QACH;QAEA,OAAOL;IACT;IAEA,MAAM6C,iBAAiBjD,MAA8B,EAA+B;QAClF,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,wDAAwD;YAClEiD,UAAUlD,OAAOkD,QAAQ;YACzBC,iBAAiBnD,OAAOmD,eAAe;QACzC;QACA,MAAMC,UAAU,MAAM,IAAI,CAACtG,IAAI,CAC5BuG,IAAI,CAAC,WAAW,CAACjD,KAChBA,GACGC,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACNgD,MAAM,CAAC;gBACNzC,QAAQ;gBACRgB,OAAO0B,KAAKC,SAAS,CAAC;oBAAEC,SAAS;gBAAiC;gBAClEC,WAAW;gBACXvC,cAAc;gBACdwC,aAAa,IAAI,CAAC7G,IAAI,CAAC8C,GAAG,CAAC;gBAC3B8B,YAAY,IAAI,CAAC5E,IAAI,CAAC8C,GAAG,CAAC;YAC5B,GACCoC,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCkG,OAAO,CAAC,UAAU;gBAAC;gBAAW;gBAAW;aAAW,EACpDC,YAAY,CAAC,eACb7B,KAAK,CAAC,eAAe,MAAM,IAAI,CAAClF,IAAI,CAAC8C,GAAG,CAAC,UACzC+B,SAAS,CAAC,OAEd0B,IAAI,CAAC,aAAa,CAACjD,KAClBA,GACGC,UAAU,CAACpD,gBACXgF,MAAM,CAAC,MACP6B,IAAI,CAAC,iBACL9B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCkG,OAAO,CAAC,UAAU;gBAAC;gBAAW;gBAAW;aAAW,EACpD5B,KAAK,CAAC,gBAAgB,MAAM,IAAI,CAAClF,IAAI,CAAC8C,GAAG,CAAC,UAC1CoC,KAAK,CAAC,CAAC+B;gBACNA,IAAIC,SAAS,CAAC,eAAeC,OAAO,CAAC,eAAe,KAAK,IAAI,CAACnH,IAAI,CAAC8C,GAAG,CAAC;YACzE,GACCsE,UAAU,CAAC,kDACXvB,OAAO,CAAC,gBAAgB,OACxBA,OAAO,CAAC,cAAc,OACtBL,KAAK,CAAC,GACN6B,SAAS,GACTC,UAAU,IAEd/D,UAAU,CAACpD,gBACXqD,KAAK,CAAC,uBACN0B,KAAK,CAAC,mBAAmB,IAAI,CAACtE,WAAW,EACzCsE,KAAK,CAAC,SAAS,IAAI,CAAClF,IAAI,CAACuH,GAAG,CAAC,iBAC7Bf,MAAM,CAAC;YACNzC,QAAQ;YACRK,UAAU,IAAI,CAACpE,IAAI,CAAC8C,GAAG,CAAC;YACxB8D,WAAW1D,OAAOkD,QAAQ;YAC1B/B,cAAc,IAAI,CAACrE,IAAI,CAAC8C,GAAG,CAAC,CAAC,QAAQ,EAAEI,OAAOmD,eAAe,CAAC,2BAA2B,CAAC;YAC1FmB,YAAY,IAAI,CAACxH,IAAI,CAAC8C,GAAG,CAAC;YAC1B8B,YAAY,IAAI,CAAC5E,IAAI,CAAC8C,GAAG,CAAC;QAC5B,GACC2E,UAAU,CAAC,aACX5C,SAAS,CAAC;QAEb,OAAOyB,OAAO,CAAC,EAAE,IAAI;IACvB;IAEA,MAAMoB,uBAAuBxE,MAAoC,EAAwB;QACvF,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,gFAAgF;YAC1F8B,eAAe/B,OAAO+B,aAAa;YACnCmB,UAAUlD,OAAOkD,QAAQ;YACzBC,iBAAiBnD,OAAOmD,eAAe;QACzC;QACA,MAAM,CAACsB,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO+B,aAAa,EAChCC,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAahC,OAAOkD,QAAQ,EAClCI,MAAM,CAAC;YACNnC,cAAc,IAAI,CAACrE,IAAI,CAAC8C,GAAG,CAAC,CAAC,QAAQ,EAAEI,OAAOmD,eAAe,CAAC,2BAA2B,CAAC;YAC1FzB,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,qDAAqD;gBAAE7B;YAAO;YAC3E,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAMC,iBAAiB1E,MAA8B,EAAwB;QAC3E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,qEAAqE;YAC/E8B,eAAe/B,OAAO+B,aAAa;YACnCmB,UAAUlD,OAAOkD,QAAQ;YACzB9B,aAAapB,OAAOoB,WAAW;QACjC;QAEA,mCAAmC;QACnC,MAAM,CAACqD,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO+B,aAAa,EAChC4C,UAAU,CAAC,UAAU;YAAC;YAAa;YAAa;YAAU;SAAW,EACrE3C,KAAK,CAAC,aAAahC,OAAOkD,QAAQ,EAClCI,MAAM,CAAC;YACNzC,QAAQ;YACRM,cAAcnB,OAAOoB,WAAW;YAChCsC,WAAW;YACXhC,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,0CAA0C;gBAAE7B;YAAO;YAChE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAMG,oBAAoB5E,MAAiC,EAAwB;QACjF,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,kEAAkE;YAC5E8B,eAAe/B,OAAO+B,aAAa;YACnCmB,UAAUlD,OAAOkD,QAAQ;YACzB2B,QAAQ7E,OAAO6E,MAAM;QACvB;QAEA,MAAM,CAACJ,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO+B,aAAa,EAChCC,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAahC,OAAOkD,QAAQ,EAClCI,MAAM,CAAC;YACNzC,QAAQ;YACRgE,QAAQtB,KAAKC,SAAS,CAACxD,OAAO6E,MAAM;YACpChD,OAAO;YACP6B,WAAW1D,OAAOkD,QAAQ;YAC1B/B,cAAc;YACdwC,aAAa,IAAI,CAAC7G,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,6CAA6C;gBAAE7B;YAAO;YACnE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAMK,gBAAgB9E,MAA6B,EAAwB;QACzE,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEA,MAAM,EAAEwC,aAAa,EAAEF,KAAK,EAAE,GAAG7B;QACjC,MAAM,EAAE+E,iBAAiB,EAAEC,kBAAkB,EAAEC,iBAAiB,EAAE,GAAGjI;QAErE,kEAAkE;QAClE,gEAAgE;QAChE,EAAE;QACF,oEAAoE;QACpE,4EAA4E;QAC5E,8CAA8C;QAC9C,MAAMkI,oBAAoB,CAAC,MAAM,EAAEH,kBAAkB,SAAS,EAAEC,mBAAmB,mBAAmB,EAAEC,kBAAkB,4BAA4B,CAAC;QACvJ,MAAME,4BAA4B,CAAC,uCAAuC,EAAED,kBAAkB,kBAAkB,CAAC;QAEjH5H,OAAO2C,IAAI,CAAC,8DAA8D;YACxE8B,eAAe/B,OAAO+B,aAAa;YACnCmB,UAAUlD,OAAOkD,QAAQ;YACzBrB,OAAO7B,OAAO6B,KAAK;QACrB;QAEA,MAAM,CAAC4C,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMD,eACZC,KAAK,CAAC,UAAU,WAChBA,KAAK,CAAC,aAAahC,OAAOkD,QAAQ,EAClCI,MAAM,CAAC;YACNzC,QAAQ,IAAI,CAAC/D,IAAI,CAAC8C,GAAG,CACnB,CAAC,UAAU,EAAEuF,0BAA0B,iCAAiC,CAAC;YAE3EhE,cAAc,IAAI,CAACrE,IAAI,CAAC8C,GAAG,CACzB,CAAC,UAAU,EAAEuF,0BAA0B,yBAAyB,EAAED,kBAAkB,KAAK,CAAC;YAE5FvB,aAAa,IAAI,CAAC7G,IAAI,CAAC8C,GAAG,CACxB,CAAC,UAAU,EAAEuF,0BAA0B,yBAAyB,CAAC;YAEnEtD,OAAO0B,KAAKC,SAAS,CAAC3B;YACtB6B,WAAW;YACXY,YAAY;YACZ5C,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,gDAAgD;gBAAE7B;YAAO;YACtE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAMW,kBAAkBpF,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,2CAA2C;YAAE8B,eAAe/B,OAAO+B,aAAa;QAAC;QAE7F,MAAM,CAAC0C,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO+B,aAAa,EAChC6B,OAAO,CAAC,UAAU;YAAC;YAAW;YAAW;SAAW,EACpDN,MAAM,CAAC;YACNzC,QAAQ;YACR6C,WAAW;YACXvC,cAAc;YACdwC,aAAa,IAAI,CAAC7G,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZ,8CAA8C;YAC9C,MAAMY,WAAW,MAAM,IAAI,CAACvD,cAAc,CAAC;gBACzCC,eAAe/B,OAAO+B,aAAa;YACrC;YACA,IAAI,CAACsD,UAAU;gBACb,MAAM,IAAI9F,MAAM,CAAC,aAAa,EAAES,OAAO+B,aAAa,CAAC,eAAe,CAAC;YACvE;YAEA,sCAAsC;YACtC,IAAIsD,SAASxE,MAAM,KAAK,YAAY;gBAClC,OAAOwE;YACT;YAEA,6CAA6C;YAC7C,mCAAmC;YACnC,IAAI;gBAAC;gBAAa;gBAAa;aAAS,CAACC,QAAQ,CAACD,SAASxE,MAAM,GAAG;gBAClEvD,OAAOuE,KAAK,CAAC,6DAA6D;oBACxE7B;oBACAa,QAAQwE,SAASxE,MAAM;gBACzB;gBACA,MAAM,IAAItB,MACR,CAAC,2BAA2B,EAAES,OAAO+B,aAAa,CAAC,aAAa,EAAEsD,SAASxE,MAAM,EAAE;YAEvF;YAEAvD,OAAOuE,KAAK,CAAC,2CAA2C;gBAAE7B;YAAO;YACjE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAMc,kBAAkBvF,MAA+B,EAAwB;QAC7E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,8DAA8D;YACxE8B,eAAe/B,OAAO+B,aAAa;YACnCyD,UAAUxF,OAAOwF,QAAQ;YACzBC,MAAMzF,OAAOyF,IAAI;QACnB;QAEA,MAAM,CAACC,YAAY,GAAG,MAAM,IAAI,CAAC5I,IAAI,CAClCuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACNC,MAAM,CAAC;YACNC,cAAc,IAAI,CAAC9C,WAAW;YAC9B+C,IAAIC,OAAOC,UAAU;YACrBgF,iBAAiB3F,OAAO+B,aAAa;YACrC6D,WAAW5F,OAAOwF,QAAQ;YAC1BC,MAAMzF,OAAOyF,IAAI;YACjB5E,QAAQ;YACRpD,QAAQ8F,KAAKC,SAAS,CAACxD,OAAOvC,MAAM;YACpCuD,SAASuC,KAAKC,SAAS,CAACxD,OAAOgB,OAAO;YACtCsD,YAAY,IAAI,CAACxH,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC5BG,YAAY,IAAI,CAAC3E,IAAI,CAAC8C,GAAG,CAAC;YAC1B8B,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCK,SAAS,CAAC;QAEb,IAAI,CAAC+D,aAAa;YAChBpI,OAAOuE,KAAK,CAAC,2CAA2C;gBAAE7B;YAAO;YACjE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOmG;IACT;IAEA,MAAMG,eAAe7F,MAA4B,EAA+B;QAC9E,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,yCAAyC;YAAE6F,eAAe9F,OAAO8F,aAAa;QAAC;QAE3F,MAAMJ,cAAc,MAAM,IAAI,CAAC5I,IAAI,CAChCuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,MAAMhC,OAAO8F,aAAa,EAChC5D,KAAK;QAER,OAAOwD,eAAe;IACxB;IAEA,MAAMK,iBAAiB/F,MAA8B,EAA2C;QAC9F,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,6DAA6D;YACvE8B,eAAe/B,OAAO+B,aAAa;YACnCK,OAAOpC,OAAOoC,KAAK;YACnBC,QAAQrC,OAAOqC,MAAM;QACvB;QAEA,MAAMC,QAAQtC,OAAOsC,KAAK,IAAIjF;QAC9B,MAAM,EAAE+E,KAAK,EAAEC,MAAM,EAAE,GAAGrC;QAE1B,IAAIuC,SAAwB;QAC5B,IAAIH,OAAO;YACTG,SAASC,aAAaJ;QACxB,OAAO,IAAIC,QAAQ;YACjBE,SAASC,aAAaH;QACxB;QAEA,MAAMjC,KAAK,IAAI,CAAC4F,0BAA0B,CAAChG,QAAQuC;QACnD,MAAMG,OAAO,MAAMtC,GAChBuC,OAAO,CAAC,cAAcN,SAAS,SAAS,OACxCM,OAAO,CAAC,MAAMN,SAAS,SAAS,OAChCC,KAAK,CAACA,QAAQ;QAEjB,OAAO,IAAI,CAACM,wBAAwB,CAClCF,MACAJ,OACA,OAAOF,UAAU,UACjB,OAAOC,WAAW;IAEtB;IAEQ2D,2BAA2BhG,MAA8B,EAAEuC,MAAqB,EAAE;QACxF,MAAM,EAAEH,KAAK,EAAE,GAAGpC;QAClB,MAAMI,KAAK,IAAI,CAACtD,IAAI,CACjBuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,iBACN0B,KAAK,CAAC,gBAAgB,IAAI,CAACtE,WAAW,EACtCsE,KAAK,CAAC,mBAAmBhC,OAAO+B,aAAa;QAEhD,IAAIQ,QAAQ;YACV,MAAMM,WAAWT,QAAQ,MAAM;YAC/B,OAAOhC,GAAG0C,QAAQ,CAAC,CAAC,qBAAqB,EAAED,SAAS,OAAO,CAAC,EAAE;gBAC5DN,OAAOQ,SAAS,CAACC,WAAW;gBAC5BT,OAAO9B,EAAE;aACV;QACH;QAEA,OAAOL;IACT;IAEQwC,yBACNF,IAAS,EACTJ,KAAa,EACb2D,QAAiB,EACjBC,SAAkB,EACI;QACtB,MAAMC,OAAOzD;QACb,IAAI0D,UAAU;QACd,IAAIC,UAAU;QAEd,IAAIH,WAAW;YACbC,KAAKG,OAAO;YACZ,IAAIH,KAAKI,MAAM,GAAGjE,OAAO;gBACvB+D,UAAU;gBACVF,KAAKK,KAAK;YACZ;YACAJ,UAAU;QACZ,OAAO;YACL,IAAID,KAAKI,MAAM,GAAGjE,OAAO;gBACvB8D,UAAU;gBACVD,KAAKM,GAAG;YACV;YACA,IAAIR,UAAU;gBACZI,UAAU;YACZ;QACF;QAEA,MAAMK,WAAWP,KAAKQ,EAAE,CAAC,CAAC;QAC1B,MAAMC,aAAaR,WAAWM,WAAWG,aAAaH,YAAY;QAClE,MAAMI,YAAYX,IAAI,CAAC,EAAE;QACzB,MAAMY,aAAaV,WAAWS,YAAYD,aAAaC,aAAa;QAEpE,OAAO;YACLX;YACAa,YAAY;gBACVC,MAAML;gBACNM,MAAMH;YACR;QACF;IACF;IAEA,kDAAkD;IAClD,MAAMI,oBAAoBnH,MAAiC,EAAwB;QACjF,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,mFAAmF;YAC7F8B,eAAe/B,OAAO+B,aAAa;YACnC+D,eAAe9F,OAAO8F,aAAa;YACnC5C,UAAUlD,OAAOkD,QAAQ;QAC3B;QAEA,MAAM,CAACuB,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,uBACNgD,MAAM,CAAC;YACNzC,QAAQ;YACRgE,QAAQtB,KAAKC,SAAS,CAACxD,OAAO6E,MAAM;YACpChD,OAAO;YACP8B,aAAa,IAAI,CAAC7G,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCiD,UAAU,CAAC,GAAGtH,eAAe,oBAAoB,CAAC,EAClD+E,KAAK,CAAC,mBAAmB,IAAI,CAACtE,WAAW,EACzCsE,KAAK,CAAC,sBAAsBhC,OAAO+B,aAAa,EAChDC,KAAK,CAAC,SAAShC,OAAO8F,aAAa,EACnC9D,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,mBAAmB,IAAI,CAAClF,IAAI,CAACuH,GAAG,CAAC,oBACvCrC,KAAK,CAAC,SAAS,IAAI,CAAClF,IAAI,CAACuH,GAAG,CAAC,uBAC7BrC,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,gBAAgBhC,OAAOkD,QAAQ,EACrCvB,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,mDAAmD;gBAAE7B;YAAO;YACzE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;IAEA,MAAM2C,gBAAgBpH,MAA6B,EAAwB;QACzE,IAAI,CAAC,IAAI,CAACnC,WAAW,EAAE;YACrB,MAAM,IAAI0B,MAAM;QAClB;QAEAjC,OAAO2C,IAAI,CAAC,gFAAgF;YAC1F8B,eAAe/B,OAAO+B,aAAa;YACnC+D,eAAe9F,OAAO8F,aAAa;YACnC5C,UAAUlD,OAAOkD,QAAQ;QAC3B;QACA5F,OAAO2C,IAAI,CAAC,0BAA0B;YAAE4B,OAAO7B,OAAO6B,KAAK,CAAC4B,OAAO;QAAC;QAEpE,MAAM,CAACgB,QAAQ,GAAG,MAAM,IAAI,CAAC3H,IAAI,CAC9BuD,UAAU,CAACpD,gBACXqD,KAAK,CAAC,uBACNgD,MAAM,CAAC;YACNzC,QAAQ;YACRgE,QAAQ;YACRhD,OAAO0B,KAAKC,SAAS,CAACxD,OAAO6B,KAAK;YAClC8B,aAAa,IAAI,CAAC7G,IAAI,CAACuE,EAAE,CAACC,GAAG;YAC7BI,YAAY,IAAI,CAAC5E,IAAI,CAACuE,EAAE,CAACC,GAAG;QAC9B,GACCiD,UAAU,CAAC,GAAGtH,eAAe,oBAAoB,CAAC,EAClD+E,KAAK,CAAC,mBAAmB,IAAI,CAACtE,WAAW,EACzCsE,KAAK,CAAC,sBAAsBhC,OAAO+B,aAAa,EAChDC,KAAK,CAAC,SAAShC,OAAO8F,aAAa,EACnC9D,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,mBAAmB,IAAI,CAAClF,IAAI,CAACuH,GAAG,CAAC,oBACvCrC,KAAK,CAAC,SAAS,IAAI,CAAClF,IAAI,CAACuH,GAAG,CAAC,uBAC7BrC,KAAK,CAAC,aAAa,WACnBA,KAAK,CAAC,gBAAgBhC,OAAOkD,QAAQ,EACrCvB,SAAS,CAAC;QAEb,IAAI,CAAC8C,SAAS;YACZnH,OAAOuE,KAAK,CAAC,gDAAgD;gBAAE7B;YAAO;YACtE,MAAM,IAAIT,MAAM;QAClB;QAEA,OAAOkF;IACT;AACF;AAaA,SAASoC,aAAaQ,IAAY;IAChC,MAAMC,UAAUC,OAAOzD,IAAI,CACzBP,KAAKC,SAAS,CAAC;QAAET,WAAWsE,KAAKtE,SAAS,CAACC,WAAW;QAAIvC,IAAI4G,KAAK5G,EAAE;IAAC,IACtE+G,QAAQ,CAAC;IACX,OAAOF;AACT;AAEA,OAAO,SAAS9E,aAAaD,MAAc;IACzC,MAAMkF,UAAUF,OAAOzD,IAAI,CAACvB,QAAQ,UAAUiF,QAAQ,CAAC;IACvD,MAAME,SAASnE,KAAKoE,KAAK,CAACF;IAC1B,OAAO;QACL1E,WAAW,IAAI6E,KAAKF,OAAO3E,SAAS;QACpCtC,IAAIiH,OAAOjH,EAAE;IACf;AACF"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pubsub.d.ts","sourceRoot":"","sources":["../../src/database/pubsub.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AACjC,OAAO,EAAW,KAAK,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAEtD,MAAM,MAAM,YAAY,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAEnF,qBAAa,cAAc;IAQL,OAAO,CAAC,QAAQ,CAAC,IAAI;IAPzC,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,SAAS,CAAsB;IACvC,OAAO,CAAC,UAAU,CAAwC;IAG1D,OAAO,CAAC,WAAW,CAAoB;IAEvC,OAAO;IAWP,IAAI,SAAS,YAEZ;IAGK,OAAO;IAoCP,OAAO;
|
|
1
|
+
{"version":3,"file":"pubsub.d.ts","sourceRoot":"","sources":["../../src/database/pubsub.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,MAAM,CAAC;AACjC,OAAO,EAAW,KAAK,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAEtD,MAAM,MAAM,YAAY,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;AAEnF,qBAAa,cAAc;IAQL,OAAO,CAAC,QAAQ,CAAC,IAAI;IAPzC,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,SAAS,CAAsB;IACvC,OAAO,CAAC,UAAU,CAAwC;IAG1D,OAAO,CAAC,WAAW,CAAoB;IAEvC,OAAO;IAWP,IAAI,SAAS,YAEZ;IAGK,OAAO;IAoCP,OAAO;WAaA,MAAM,CAAC,IAAI,EAAE,IAAI;IAO9B,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,QAAQ,EAAE,YAAY;CAWpD"}
|
package/dist/database/pubsub.js
CHANGED
|
@@ -44,9 +44,15 @@ export class PostgresPubSub {
|
|
|
44
44
|
}
|
|
45
45
|
// destroy the listener and close the connection, do not destroy the knex connection
|
|
46
46
|
async destroy() {
|
|
47
|
-
this._destroyed
|
|
48
|
-
|
|
49
|
-
|
|
47
|
+
if (this._destroyed) {
|
|
48
|
+
return;
|
|
49
|
+
}
|
|
50
|
+
try {
|
|
51
|
+
this._connection.off("close", this._onClosed);
|
|
52
|
+
await this.knex.client.destroyRawConnection(this._connection);
|
|
53
|
+
} finally{
|
|
54
|
+
this._destroyed = true;
|
|
55
|
+
}
|
|
50
56
|
}
|
|
51
57
|
// create a new listener and connect to the database
|
|
52
58
|
static async create(knex) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/database/pubsub.ts"],"sourcesContent":["import assert from \"assert\";\nimport type { Knex } from \"knex\";\nimport { err, ok, type Result } from \"../core/result\";\n\nexport type OnSubscribed = (result: Result<string | null>) => void | Promise<void>;\n\nexport class PostgresPubSub {\n private _destroyed = false;\n private _onClosed: () => Promise<void>;\n private _listeners = new Map<string, Set<OnSubscribed>>();\n\n // biome-ignore lint/suspicious/noExplicitAny: Knex exposes a connection as any\n private _connection: any | null = null;\n\n private constructor(private readonly knex: Knex) {\n // Re-connect to the database when the connection is closed and not destroyed manually\n this._onClosed = (async () => {\n if (this._destroyed) {\n return;\n }\n\n await this.connect();\n }).bind(this);\n }\n\n get destroyed() {\n return this._destroyed;\n }\n\n // acquire new raw connection and set up listeners\n async connect() {\n const connection = await this.knex.client.acquireRawConnection();\n connection.on(\"close\", this._onClosed);\n connection.on(\n \"notification\",\n async ({ channel, payload: rawPayload }: { channel: string; payload: unknown }) => {\n const payload =\n typeof rawPayload === \"string\" && rawPayload.length !== 0 ? rawPayload : null;\n const listeners = this._listeners.get(channel);\n if (!listeners) {\n return;\n }\n\n const result = ok(payload);\n await Promise.allSettled(\n Array.from(listeners.values()).map((listener) => Promise.resolve(listener(result))),\n );\n },\n );\n connection.on(\"error\", async (error: Error) => {\n const result = err(error);\n await Promise.allSettled(\n Array.from(this._listeners.values())\n .flatMap((listeners) => Array.from(listeners))\n .map((listener) => Promise.resolve(listener(result))),\n );\n });\n\n for (const channel of this._listeners.keys()) {\n connection.query(`LISTEN ${channel}`);\n }\n\n this._connection = connection;\n }\n\n // destroy the listener and close the connection, do not destroy the knex connection\n async destroy() {\n this._destroyed
|
|
1
|
+
{"version":3,"sources":["../../src/database/pubsub.ts"],"sourcesContent":["import assert from \"assert\";\nimport type { Knex } from \"knex\";\nimport { err, ok, type Result } from \"../core/result\";\n\nexport type OnSubscribed = (result: Result<string | null>) => void | Promise<void>;\n\nexport class PostgresPubSub {\n private _destroyed = false;\n private _onClosed: () => Promise<void>;\n private _listeners = new Map<string, Set<OnSubscribed>>();\n\n // biome-ignore lint/suspicious/noExplicitAny: Knex exposes a connection as any\n private _connection: any | null = null;\n\n private constructor(private readonly knex: Knex) {\n // Re-connect to the database when the connection is closed and not destroyed manually\n this._onClosed = (async () => {\n if (this._destroyed) {\n return;\n }\n\n await this.connect();\n }).bind(this);\n }\n\n get destroyed() {\n return this._destroyed;\n }\n\n // acquire new raw connection and set up listeners\n async connect() {\n const connection = await this.knex.client.acquireRawConnection();\n connection.on(\"close\", this._onClosed);\n connection.on(\n \"notification\",\n async ({ channel, payload: rawPayload }: { channel: string; payload: unknown }) => {\n const payload =\n typeof rawPayload === \"string\" && rawPayload.length !== 0 ? rawPayload : null;\n const listeners = this._listeners.get(channel);\n if (!listeners) {\n return;\n }\n\n const result = ok(payload);\n await Promise.allSettled(\n Array.from(listeners.values()).map((listener) => Promise.resolve(listener(result))),\n );\n },\n );\n connection.on(\"error\", async (error: Error) => {\n const result = err(error);\n await Promise.allSettled(\n Array.from(this._listeners.values())\n .flatMap((listeners) => Array.from(listeners))\n .map((listener) => Promise.resolve(listener(result))),\n );\n });\n\n for (const channel of this._listeners.keys()) {\n connection.query(`LISTEN ${channel}`);\n }\n\n this._connection = connection;\n }\n\n // destroy the listener and close the connection, do not destroy the knex connection\n async destroy() {\n if (this._destroyed) {\n return;\n }\n try {\n this._connection.off(\"close\", this._onClosed);\n await this.knex.client.destroyRawConnection(this._connection);\n } finally {\n this._destroyed = true;\n }\n }\n\n // create a new listener and connect to the database\n static async create(knex: Knex) {\n const listener = new PostgresPubSub(knex);\n await listener.connect();\n return listener;\n }\n\n // add a new listener to the channel\n listenEvent(channel: string, callback: OnSubscribed) {\n if (!this._listeners.has(channel)) {\n this._connection?.query(`LISTEN ${channel}`);\n this._listeners.set(channel, new Set<OnSubscribed>().add(callback));\n return;\n }\n\n const listeners = this._listeners.get(channel);\n assert(listeners, \"Listener channel not found\");\n listeners.add(callback);\n }\n}\n"],"names":["assert","err","ok","PostgresPubSub","_destroyed","_onClosed","_listeners","Map","_connection","knex","connect","bind","destroyed","connection","client","acquireRawConnection","on","channel","payload","rawPayload","length","listeners","get","result","Promise","allSettled","Array","from","values","map","listener","resolve","error","flatMap","keys","query","destroy","off","destroyRawConnection","create","listenEvent","callback","has","set","Set","add"],"mappings":"AAAA,OAAOA,YAAY,SAAS;AAE5B,SAASC,GAAG,EAAEC,EAAE,QAAqB,oBAAiB;AAItD,OAAO,MAAMC;;IACHC,aAAa,MAAM;IACnBC,UAA+B;IAC/BC,aAAa,IAAIC,MAAiC;IAE1D,+EAA+E;IACvEC,cAA0B,KAAK;IAEvC,YAAoB,AAAiBC,IAAU,CAAE;aAAZA,OAAAA;QACnC,sFAAsF;QACtF,IAAI,CAACJ,SAAS,GAAG,AAAC,CAAA;YAChB,IAAI,IAAI,CAACD,UAAU,EAAE;gBACnB;YACF;YAEA,MAAM,IAAI,CAACM,OAAO;QACpB,CAAA,EAAGC,IAAI,CAAC,IAAI;IACd;IAEA,IAAIC,YAAY;QACd,OAAO,IAAI,CAACR,UAAU;IACxB;IAEA,kDAAkD;IAClD,MAAMM,UAAU;QACd,MAAMG,aAAa,MAAM,IAAI,CAACJ,IAAI,CAACK,MAAM,CAACC,oBAAoB;QAC9DF,WAAWG,EAAE,CAAC,SAAS,IAAI,CAACX,SAAS;QACrCQ,WAAWG,EAAE,CACX,gBACA,OAAO,EAAEC,OAAO,EAAEC,SAASC,UAAU,EAAyC;YAC5E,MAAMD,UACJ,OAAOC,eAAe,YAAYA,WAAWC,MAAM,KAAK,IAAID,aAAa;YAC3E,MAAME,YAAY,IAAI,CAACf,UAAU,CAACgB,GAAG,CAACL;YACtC,IAAI,CAACI,WAAW;gBACd;YACF;YAEA,MAAME,SAASrB,GAAGgB;YAClB,MAAMM,QAAQC,UAAU,CACtBC,MAAMC,IAAI,CAACN,UAAUO,MAAM,IAAIC,GAAG,CAAC,CAACC,WAAaN,QAAQO,OAAO,CAACD,SAASP;QAE9E;QAEFV,WAAWG,EAAE,CAAC,SAAS,OAAOgB;YAC5B,MAAMT,SAAStB,IAAI+B;YACnB,MAAMR,QAAQC,UAAU,CACtBC,MAAMC,IAAI,CAAC,IAAI,CAACrB,UAAU,CAACsB,MAAM,IAC9BK,OAAO,CAAC,CAACZ,YAAcK,MAAMC,IAAI,CAACN,YAClCQ,GAAG,CAAC,CAACC,WAAaN,QAAQO,OAAO,CAACD,SAASP;QAElD;QAEA,KAAK,MAAMN,WAAW,IAAI,CAACX,UAAU,CAAC4B,IAAI,GAAI;YAC5CrB,WAAWsB,KAAK,CAAC,CAAC,OAAO,EAAElB,SAAS;QACtC;QAEA,IAAI,CAACT,WAAW,GAAGK;IACrB;IAEA,oFAAoF;IACpF,MAAMuB,UAAU;QACd,IAAI,IAAI,CAAChC,UAAU,EAAE;YACnB;QACF;QACA,IAAI;YACF,IAAI,CAACI,WAAW,CAAC6B,GAAG,CAAC,SAAS,IAAI,CAAChC,SAAS;YAC5C,MAAM,IAAI,CAACI,IAAI,CAACK,MAAM,CAACwB,oBAAoB,CAAC,IAAI,CAAC9B,WAAW;QAC9D,SAAU;YACR,IAAI,CAACJ,UAAU,GAAG;QACpB;IACF;IAEA,oDAAoD;IACpD,aAAamC,OAAO9B,IAAU,EAAE;QAC9B,MAAMqB,WAAW,IAAI3B,eAAeM;QACpC,MAAMqB,SAASpB,OAAO;QACtB,OAAOoB;IACT;IAEA,oCAAoC;IACpCU,YAAYvB,OAAe,EAAEwB,QAAsB,EAAE;QACnD,IAAI,CAAC,IAAI,CAACnC,UAAU,CAACoC,GAAG,CAACzB,UAAU;YACjC,IAAI,CAACT,WAAW,EAAE2B,MAAM,CAAC,OAAO,EAAElB,SAAS;YAC3C,IAAI,CAACX,UAAU,CAACqC,GAAG,CAAC1B,SAAS,IAAI2B,MAAoBC,GAAG,CAACJ;YACzD;QACF;QAEA,MAAMpB,YAAY,IAAI,CAACf,UAAU,CAACgB,GAAG,CAACL;QACtCjB,OAAOqB,WAAW;QAClBA,UAAUwB,GAAG,CAACJ;IAChB;AACF"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@sonamu-kit/tasks",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "Sonamu Task - Simple & Distributed Task Queue",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -34,17 +34,19 @@
|
|
|
34
34
|
"c12": "^3.3.2"
|
|
35
35
|
},
|
|
36
36
|
"devDependencies": {
|
|
37
|
-
"@
|
|
37
|
+
"@types/inflection": "^2.0.0",
|
|
38
|
+
"@biomejs/biome": "^2.3.10",
|
|
38
39
|
"@swc/cli": "^0.7.8",
|
|
39
|
-
"@swc/core": "^1.
|
|
40
|
-
"@types/node": "
|
|
40
|
+
"@swc/core": "^1.13.5",
|
|
41
|
+
"@types/node": "25.0.3",
|
|
41
42
|
"nodemon": "^3.1.10",
|
|
42
43
|
"tsx": "^4.20.6",
|
|
43
44
|
"typescript": "^5.9.3",
|
|
44
45
|
"vitest": "^4.0.10"
|
|
45
46
|
},
|
|
46
47
|
"peerDependencies": {
|
|
47
|
-
"knex": "^3.1.0"
|
|
48
|
+
"knex": "^3.1.0",
|
|
49
|
+
"@logtape/logtape": "1.3.5"
|
|
48
50
|
},
|
|
49
51
|
"optionalDependencies": {
|
|
50
52
|
"pg-native": "^3.5.2"
|
package/src/client.ts
CHANGED
|
@@ -184,7 +184,6 @@ export class OpenWorkflow {
|
|
|
184
184
|
* Declare a workflow without providing its implementation (which is provided
|
|
185
185
|
* separately via `implementWorkflow`). Returns a lightweight WorkflowSpec
|
|
186
186
|
* that can be used to schedule workflow runs.
|
|
187
|
-
* @param config - Workflow config
|
|
188
187
|
* @param spec - Workflow spec
|
|
189
188
|
* @returns Workflow spec
|
|
190
189
|
* @example
|
package/src/core/duration.ts
CHANGED
|
@@ -14,7 +14,7 @@ type UnitAnyCase = Capitalize<Unit> | Uppercase<Unit> | Lowercase<Unit>;
|
|
|
14
14
|
export type DurationString = `${number}` | `${number}${UnitAnyCase}` | `${number} ${UnitAnyCase}`;
|
|
15
15
|
|
|
16
16
|
/**
|
|
17
|
-
* Parse a duration string into milliseconds.
|
|
17
|
+
* Parse a duration string into milliseconds. Examples:
|
|
18
18
|
* - short units: "1ms", "5s", "30m", "2h", "7d", "3w", "1y"
|
|
19
19
|
* - long units: "1 millisecond", "5 seconds", "30 minutes", "2 hours", "7 days", "3 weeks", "1 year"
|
|
20
20
|
* @param str - Duration string
|
package/src/core/workflow.ts
CHANGED
|
@@ -116,7 +116,8 @@ export async function validateInput<RunInput, Input>(
|
|
|
116
116
|
/**
|
|
117
117
|
* Check if a workflow run status represents a terminal state.
|
|
118
118
|
* @param status - The workflow run status
|
|
119
|
-
* @returns True if the status is terminal (completed, failed, or canceled)
|
|
119
|
+
* @returns True if the status is terminal (succeeded, completed, failed, or canceled).
|
|
120
|
+
* Note: 'succeeded' is deprecated in favor of 'completed'.
|
|
120
121
|
*/
|
|
121
122
|
export function isTerminalStatus(
|
|
122
123
|
status: string,
|
package/src/database/backend.ts
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { getLogger } from "@logtape/logtape";
|
|
1
2
|
import { camelize } from "inflection";
|
|
2
3
|
import knex, { type Knex } from "knex";
|
|
3
4
|
import {
|
|
@@ -36,6 +37,9 @@ interface BackendPostgresOptions {
|
|
|
36
37
|
usePubSub?: boolean;
|
|
37
38
|
}
|
|
38
39
|
|
|
40
|
+
const logger = getLogger(["sonamu", "internal", "tasks"]);
|
|
41
|
+
const queryLogger = getLogger(["sonamu", "internal", "tasks", "query"]);
|
|
42
|
+
|
|
39
43
|
/**
|
|
40
44
|
* Manages a connection to a Postgres database for workflow operations.
|
|
41
45
|
*/
|
|
@@ -51,6 +55,12 @@ export class BackendPostgres implements Backend {
|
|
|
51
55
|
private get knex(): Knex {
|
|
52
56
|
if (!this._knex) {
|
|
53
57
|
this._knex = knex(this.config);
|
|
58
|
+
this._knex.on("query", (query) => {
|
|
59
|
+
queryLogger.debug("SQL: {query}, Values: {bindings}", {
|
|
60
|
+
query: query.sql,
|
|
61
|
+
bindings: query.bindings,
|
|
62
|
+
});
|
|
63
|
+
});
|
|
54
64
|
}
|
|
55
65
|
|
|
56
66
|
return this._knex;
|
|
@@ -152,6 +162,11 @@ export class BackendPostgres implements Backend {
|
|
|
152
162
|
throw new Error("Backend not initialized");
|
|
153
163
|
}
|
|
154
164
|
|
|
165
|
+
logger.info("Creating workflow run: {workflowName}:{version}", {
|
|
166
|
+
workflowName: params.workflowName,
|
|
167
|
+
version: params.version,
|
|
168
|
+
});
|
|
169
|
+
|
|
155
170
|
const qb = this.knex
|
|
156
171
|
.withSchema(DEFAULT_SCHEMA)
|
|
157
172
|
.table("workflow_runs")
|
|
@@ -175,6 +190,7 @@ export class BackendPostgres implements Backend {
|
|
|
175
190
|
|
|
176
191
|
const workflowRun = await qb;
|
|
177
192
|
if (!workflowRun[0]) {
|
|
193
|
+
logger.error("Failed to create workflow run: {params}", { params });
|
|
178
194
|
throw new Error("Failed to create workflow run");
|
|
179
195
|
}
|
|
180
196
|
|
|
@@ -186,6 +202,7 @@ export class BackendPostgres implements Backend {
|
|
|
186
202
|
throw new Error("Backend not initialized");
|
|
187
203
|
}
|
|
188
204
|
|
|
205
|
+
logger.info("Getting workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
189
206
|
const workflowRun = await this.knex
|
|
190
207
|
.withSchema(DEFAULT_SCHEMA)
|
|
191
208
|
.table("workflow_runs")
|
|
@@ -224,6 +241,10 @@ export class BackendPostgres implements Backend {
|
|
|
224
241
|
throw new Error("Backend not initialized");
|
|
225
242
|
}
|
|
226
243
|
|
|
244
|
+
logger.info("Listing workflow runs: {after}, {before}", {
|
|
245
|
+
after: params.after,
|
|
246
|
+
before: params.before,
|
|
247
|
+
});
|
|
227
248
|
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
228
249
|
const { after, before } = params;
|
|
229
250
|
|
|
@@ -271,6 +292,10 @@ export class BackendPostgres implements Backend {
|
|
|
271
292
|
throw new Error("Backend not initialized");
|
|
272
293
|
}
|
|
273
294
|
|
|
295
|
+
logger.info("Claiming workflow run: {workerId}, {leaseDurationMs}", {
|
|
296
|
+
workerId: params.workerId,
|
|
297
|
+
leaseDurationMs: params.leaseDurationMs,
|
|
298
|
+
});
|
|
274
299
|
const claimed = await this.knex
|
|
275
300
|
.with("expired", (qb) =>
|
|
276
301
|
qb
|
|
@@ -331,6 +356,11 @@ export class BackendPostgres implements Backend {
|
|
|
331
356
|
throw new Error("Backend not initialized");
|
|
332
357
|
}
|
|
333
358
|
|
|
359
|
+
logger.info("Extending workflow run lease: {workflowRunId}, {workerId}, {leaseDurationMs}", {
|
|
360
|
+
workflowRunId: params.workflowRunId,
|
|
361
|
+
workerId: params.workerId,
|
|
362
|
+
leaseDurationMs: params.leaseDurationMs,
|
|
363
|
+
});
|
|
334
364
|
const [updated] = await this.knex
|
|
335
365
|
.withSchema(DEFAULT_SCHEMA)
|
|
336
366
|
.table("workflow_runs")
|
|
@@ -345,6 +375,7 @@ export class BackendPostgres implements Backend {
|
|
|
345
375
|
.returning("*");
|
|
346
376
|
|
|
347
377
|
if (!updated) {
|
|
378
|
+
logger.error("Failed to extend lease for workflow run: {params}", { params });
|
|
348
379
|
throw new Error("Failed to extend lease for workflow run");
|
|
349
380
|
}
|
|
350
381
|
|
|
@@ -356,6 +387,12 @@ export class BackendPostgres implements Backend {
|
|
|
356
387
|
throw new Error("Backend not initialized");
|
|
357
388
|
}
|
|
358
389
|
|
|
390
|
+
logger.info("Sleeping workflow run: {workflowRunId}, {workerId}, {availableAt}", {
|
|
391
|
+
workflowRunId: params.workflowRunId,
|
|
392
|
+
workerId: params.workerId,
|
|
393
|
+
availableAt: params.availableAt,
|
|
394
|
+
});
|
|
395
|
+
|
|
359
396
|
// 'succeeded' status is deprecated
|
|
360
397
|
const [updated] = await this.knex
|
|
361
398
|
.withSchema(DEFAULT_SCHEMA)
|
|
@@ -373,6 +410,7 @@ export class BackendPostgres implements Backend {
|
|
|
373
410
|
.returning("*");
|
|
374
411
|
|
|
375
412
|
if (!updated) {
|
|
413
|
+
logger.error("Failed to sleep workflow run: {params}", { params });
|
|
376
414
|
throw new Error("Failed to sleep workflow run");
|
|
377
415
|
}
|
|
378
416
|
|
|
@@ -384,6 +422,12 @@ export class BackendPostgres implements Backend {
|
|
|
384
422
|
throw new Error("Backend not initialized");
|
|
385
423
|
}
|
|
386
424
|
|
|
425
|
+
logger.info("Completing workflow run: {workflowRunId}, {workerId}, {output}", {
|
|
426
|
+
workflowRunId: params.workflowRunId,
|
|
427
|
+
workerId: params.workerId,
|
|
428
|
+
output: params.output,
|
|
429
|
+
});
|
|
430
|
+
|
|
387
431
|
const [updated] = await this.knex
|
|
388
432
|
.withSchema(DEFAULT_SCHEMA)
|
|
389
433
|
.table("workflow_runs")
|
|
@@ -403,6 +447,7 @@ export class BackendPostgres implements Backend {
|
|
|
403
447
|
.returning("*");
|
|
404
448
|
|
|
405
449
|
if (!updated) {
|
|
450
|
+
logger.error("Failed to complete workflow run: {params}", { params });
|
|
406
451
|
throw new Error("Failed to complete workflow run");
|
|
407
452
|
}
|
|
408
453
|
|
|
@@ -426,6 +471,12 @@ export class BackendPostgres implements Backend {
|
|
|
426
471
|
const retryIntervalExpr = `LEAST(${initialIntervalMs} * POWER(${backoffCoefficient}, "attempts" - 1), ${maximumIntervalMs}) * INTERVAL '1 millisecond'`;
|
|
427
472
|
const deadlineExceededCondition = `"deadline_at" IS NOT NULL AND NOW() + (${retryIntervalExpr}) >= "deadline_at"`;
|
|
428
473
|
|
|
474
|
+
logger.info("Failing workflow run: {workflowRunId}, {workerId}, {error}", {
|
|
475
|
+
workflowRunId: params.workflowRunId,
|
|
476
|
+
workerId: params.workerId,
|
|
477
|
+
error: params.error,
|
|
478
|
+
});
|
|
479
|
+
|
|
429
480
|
const [updated] = await this.knex
|
|
430
481
|
.withSchema(DEFAULT_SCHEMA)
|
|
431
482
|
.table("workflow_runs")
|
|
@@ -451,6 +502,7 @@ export class BackendPostgres implements Backend {
|
|
|
451
502
|
.returning("*");
|
|
452
503
|
|
|
453
504
|
if (!updated) {
|
|
505
|
+
logger.error("Failed to mark workflow run failed: {params}", { params });
|
|
454
506
|
throw new Error("Failed to mark workflow run failed");
|
|
455
507
|
}
|
|
456
508
|
|
|
@@ -462,6 +514,8 @@ export class BackendPostgres implements Backend {
|
|
|
462
514
|
throw new Error("Backend not initialized");
|
|
463
515
|
}
|
|
464
516
|
|
|
517
|
+
logger.info("Canceling workflow run: {workflowRunId}", { workflowRunId: params.workflowRunId });
|
|
518
|
+
|
|
465
519
|
const [updated] = await this.knex
|
|
466
520
|
.withSchema(DEFAULT_SCHEMA)
|
|
467
521
|
.table("workflow_runs")
|
|
@@ -494,11 +548,16 @@ export class BackendPostgres implements Backend {
|
|
|
494
548
|
// throw error for completed/failed workflows
|
|
495
549
|
// 'succeeded' status is deprecated
|
|
496
550
|
if (["succeeded", "completed", "failed"].includes(existing.status)) {
|
|
551
|
+
logger.error("Cannot cancel workflow run: {params} with status {status}", {
|
|
552
|
+
params,
|
|
553
|
+
status: existing.status,
|
|
554
|
+
});
|
|
497
555
|
throw new Error(
|
|
498
556
|
`Cannot cancel workflow run ${params.workflowRunId} with status ${existing.status}`,
|
|
499
557
|
);
|
|
500
558
|
}
|
|
501
559
|
|
|
560
|
+
logger.error("Failed to cancel workflow run: {params}", { params });
|
|
502
561
|
throw new Error("Failed to cancel workflow run");
|
|
503
562
|
}
|
|
504
563
|
|
|
@@ -510,6 +569,12 @@ export class BackendPostgres implements Backend {
|
|
|
510
569
|
throw new Error("Backend not initialized");
|
|
511
570
|
}
|
|
512
571
|
|
|
572
|
+
logger.info("Creating step attempt: {workflowRunId}, {stepName}, {kind}", {
|
|
573
|
+
workflowRunId: params.workflowRunId,
|
|
574
|
+
stepName: params.stepName,
|
|
575
|
+
kind: params.kind,
|
|
576
|
+
});
|
|
577
|
+
|
|
513
578
|
const [stepAttempt] = await this.knex
|
|
514
579
|
.withSchema(DEFAULT_SCHEMA)
|
|
515
580
|
.table("step_attempts")
|
|
@@ -529,6 +594,7 @@ export class BackendPostgres implements Backend {
|
|
|
529
594
|
.returning("*");
|
|
530
595
|
|
|
531
596
|
if (!stepAttempt) {
|
|
597
|
+
logger.error("Failed to create step attempt: {params}", { params });
|
|
532
598
|
throw new Error("Failed to create step attempt");
|
|
533
599
|
}
|
|
534
600
|
|
|
@@ -540,6 +606,8 @@ export class BackendPostgres implements Backend {
|
|
|
540
606
|
throw new Error("Backend not initialized");
|
|
541
607
|
}
|
|
542
608
|
|
|
609
|
+
logger.info("Getting step attempt: {stepAttemptId}", { stepAttemptId: params.stepAttemptId });
|
|
610
|
+
|
|
543
611
|
const stepAttempt = await this.knex
|
|
544
612
|
.withSchema(DEFAULT_SCHEMA)
|
|
545
613
|
.table("step_attempts")
|
|
@@ -555,6 +623,12 @@ export class BackendPostgres implements Backend {
|
|
|
555
623
|
throw new Error("Backend not initialized");
|
|
556
624
|
}
|
|
557
625
|
|
|
626
|
+
logger.info("Listing step attempts: {workflowRunId}, {after}, {before}", {
|
|
627
|
+
workflowRunId: params.workflowRunId,
|
|
628
|
+
after: params.after,
|
|
629
|
+
before: params.before,
|
|
630
|
+
});
|
|
631
|
+
|
|
558
632
|
const limit = params.limit ?? DEFAULT_PAGINATION_PAGE_SIZE;
|
|
559
633
|
const { after, before } = params;
|
|
560
634
|
|
|
@@ -639,11 +713,18 @@ export class BackendPostgres implements Backend {
|
|
|
639
713
|
};
|
|
640
714
|
}
|
|
641
715
|
|
|
716
|
+
// NOTE: 실제 서비스에서 이게 안 되는 것 같은데, 쿼리 등을 체크할 필요가 있음.
|
|
642
717
|
async completeStepAttempt(params: CompleteStepAttemptParams): Promise<StepAttempt> {
|
|
643
718
|
if (!this.initialized) {
|
|
644
719
|
throw new Error("Backend not initialized");
|
|
645
720
|
}
|
|
646
721
|
|
|
722
|
+
logger.info("Marking step attempt as completed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
723
|
+
workflowRunId: params.workflowRunId,
|
|
724
|
+
stepAttemptId: params.stepAttemptId,
|
|
725
|
+
workerId: params.workerId,
|
|
726
|
+
});
|
|
727
|
+
|
|
647
728
|
const [updated] = await this.knex
|
|
648
729
|
.withSchema(DEFAULT_SCHEMA)
|
|
649
730
|
.table("step_attempts as sa")
|
|
@@ -666,6 +747,7 @@ export class BackendPostgres implements Backend {
|
|
|
666
747
|
.returning("sa.*");
|
|
667
748
|
|
|
668
749
|
if (!updated) {
|
|
750
|
+
logger.error("Failed to mark step attempt completed: {params}", { params });
|
|
669
751
|
throw new Error("Failed to mark step attempt completed");
|
|
670
752
|
}
|
|
671
753
|
|
|
@@ -677,6 +759,13 @@ export class BackendPostgres implements Backend {
|
|
|
677
759
|
throw new Error("Backend not initialized");
|
|
678
760
|
}
|
|
679
761
|
|
|
762
|
+
logger.info("Marking step attempt as failed: {workflowRunId}, {stepAttemptId}, {workerId}", {
|
|
763
|
+
workflowRunId: params.workflowRunId,
|
|
764
|
+
stepAttemptId: params.stepAttemptId,
|
|
765
|
+
workerId: params.workerId,
|
|
766
|
+
});
|
|
767
|
+
logger.info("Error: {error.message}", { error: params.error.message });
|
|
768
|
+
|
|
680
769
|
const [updated] = await this.knex
|
|
681
770
|
.withSchema(DEFAULT_SCHEMA)
|
|
682
771
|
.table("step_attempts as sa")
|
|
@@ -699,6 +788,7 @@ export class BackendPostgres implements Backend {
|
|
|
699
788
|
.returning("sa.*");
|
|
700
789
|
|
|
701
790
|
if (!updated) {
|
|
791
|
+
logger.error("Failed to mark step attempt failed: {params}", { params });
|
|
702
792
|
throw new Error("Failed to mark step attempt failed");
|
|
703
793
|
}
|
|
704
794
|
|
package/src/database/pubsub.ts
CHANGED
|
@@ -65,9 +65,15 @@ export class PostgresPubSub {
|
|
|
65
65
|
|
|
66
66
|
// destroy the listener and close the connection, do not destroy the knex connection
|
|
67
67
|
async destroy() {
|
|
68
|
-
this._destroyed
|
|
69
|
-
|
|
70
|
-
|
|
68
|
+
if (this._destroyed) {
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
try {
|
|
72
|
+
this._connection.off("close", this._onClosed);
|
|
73
|
+
await this.knex.client.destroyRawConnection(this._connection);
|
|
74
|
+
} finally {
|
|
75
|
+
this._destroyed = true;
|
|
76
|
+
}
|
|
71
77
|
}
|
|
72
78
|
|
|
73
79
|
// create a new listener and connect to the database
|