@coji/durably 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,74 @@
1
+ # @coji/durably
2
+
3
+ Step-oriented resumable batch execution for Node.js and browsers using SQLite.
4
+
5
+ **[Documentation](https://coji.github.io/durably/)** | **[GitHub](https://github.com/coji/durably)** | **[Live Demo](https://durably-demo.vercel.app)**
6
+
7
+ ## Features
8
+
9
+ - Resumable batch processing with step-level persistence
10
+ - Works in both Node.js and browsers
11
+ - Uses SQLite for state management (better-sqlite3/libsql for Node.js, SQLite WASM for browsers)
12
+ - Minimal dependencies - just Kysely and Zod as peer dependencies
13
+ - Event system for monitoring and extensibility
14
+ - Type-safe input/output with Zod schemas
15
+
16
+ ## Installation
17
+
18
+ ```bash
19
+ # Node.js with better-sqlite3
20
+ npm install @coji/durably kysely zod better-sqlite3
21
+
22
+ # Node.js with libsql
23
+ npm install @coji/durably kysely zod @libsql/client @libsql/kysely-libsql
24
+
25
+ # Browser with SQLocal
26
+ npm install @coji/durably kysely zod sqlocal
27
+ ```
28
+
29
+ ## Usage
30
+
31
+ ```ts
32
+ import { createDurably } from '@coji/durably'
33
+ import SQLite from 'better-sqlite3'
34
+ import { SqliteDialect } from 'kysely'
35
+ import { z } from 'zod'
36
+
37
+ const dialect = new SqliteDialect({
38
+ database: new SQLite('local.db'),
39
+ })
40
+
41
+ const durably = createDurably({ dialect })
42
+
43
+ const syncUsers = durably.defineJob(
44
+ {
45
+ name: 'sync-users',
46
+ input: z.object({ orgId: z.string() }),
47
+ output: z.object({ syncedCount: z.number() }),
48
+ },
49
+ async (step, payload) => {
50
+ const users = await step.run('fetch-users', async () => {
51
+ return api.fetchUsers(payload.orgId)
52
+ })
53
+
54
+ await step.run('save-to-db', async () => {
55
+ await db.upsertUsers(users)
56
+ })
57
+
58
+ return { syncedCount: users.length }
59
+ },
60
+ )
61
+
62
+ await durably.migrate()
63
+ durably.start()
64
+
65
+ await syncUsers.trigger({ orgId: 'org_123' })
66
+ ```
67
+
68
+ ## Documentation
69
+
70
+ For full documentation, visit [coji.github.io/durably](https://coji.github.io/durably/).
71
+
72
+ ## License
73
+
74
+ MIT
package/dist/index.d.ts CHANGED
@@ -299,9 +299,9 @@ interface Storage {
299
299
  }
300
300
 
301
301
  /**
302
- * Job context passed to the job function
302
+ * Step context passed to the job function
303
303
  */
304
- interface JobContext {
304
+ interface StepContext {
305
305
  /**
306
306
  * The ID of the current run
307
307
  */
@@ -326,7 +326,7 @@ interface JobContext {
326
326
  /**
327
327
  * Job function type
328
328
  */
329
- type JobFunction<TInput, TOutput> = (context: JobContext, payload: TInput) => Promise<TOutput>;
329
+ type JobFunction<TInput, TOutput> = (step: StepContext, payload: TInput) => Promise<TOutput>;
330
330
  /**
331
331
  * Job definition options
332
332
  */
@@ -506,4 +506,4 @@ declare class CancelledError extends Error {
506
506
  constructor(runId: string);
507
507
  }
508
508
 
509
- export { CancelledError, type Database, type Durably, type DurablyEvent, type DurablyOptions, type DurablyPlugin, type ErrorHandler, type EventType, type JobContext, type JobHandle, type Log, type LogWriteEvent, type LogsTable, type Run, type RunCompleteEvent, type RunFailEvent, type RunFilter$1 as RunFilter, type RunStartEvent, type RunsTable, type SchemaVersionsTable, type Step, type StepCompleteEvent, type StepFailEvent, type StepStartEvent, type StepsTable, type TriggerAndWaitResult, type WorkerErrorEvent, createDurably, withLogPersistence };
509
+ export { CancelledError, type Database, type Durably, type DurablyEvent, type DurablyOptions, type DurablyPlugin, type ErrorHandler, type EventType, type JobHandle, type Log, type LogWriteEvent, type LogsTable, type Run, type RunCompleteEvent, type RunFailEvent, type RunFilter$1 as RunFilter, type RunStartEvent, type RunsTable, type SchemaVersionsTable, type Step, type StepCompleteEvent, type StepContext, type StepFailEvent, type StepStartEvent, type StepsTable, type TriggerAndWaitResult, type WorkerErrorEvent, createDurably, withLogPersistence };
package/dist/index.js CHANGED
@@ -463,7 +463,7 @@ var CancelledError = class extends Error {
463
463
  };
464
464
 
465
465
  // src/context.ts
466
- function createJobContext(run, jobName, storage, eventEmitter) {
466
+ function createStepContext(run, jobName, storage, eventEmitter) {
467
467
  let stepIndex = run.currentStepIndex;
468
468
  let currentStepName = null;
469
469
  return {
@@ -666,8 +666,8 @@ function createWorker(config, storage, eventEmitter, jobRegistry) {
666
666
  });
667
667
  const startTime = Date.now();
668
668
  try {
669
- const context = createJobContext(run, run.jobName, storage, eventEmitter);
670
- const output = await job.fn(context, run.payload);
669
+ const step = createStepContext(run, run.jobName, storage, eventEmitter);
670
+ const output = await job.fn(step, run.payload);
671
671
  if (job.outputSchema) {
672
672
  const parseResult = job.outputSchema.safeParse(output);
673
673
  if (!parseResult.success) {
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/durably.ts","../src/events.ts","../src/job.ts","../src/migrations.ts","../src/storage.ts","../src/errors.ts","../src/context.ts","../src/worker.ts","../src/plugins/log-persistence.ts"],"sourcesContent":["import type { Dialect } from 'kysely'\nimport { Kysely } from 'kysely'\nimport type { z } from 'zod'\nimport {\n type AnyEventInput,\n type ErrorHandler,\n type EventListener,\n type EventType,\n type Unsubscribe,\n createEventEmitter,\n} from './events'\nimport {\n type JobDefinition,\n type JobFunction,\n type JobHandle,\n createJobHandle,\n createJobRegistry,\n} from './job'\nimport { runMigrations } from './migrations'\nimport type { Database } from './schema'\nimport {\n type Run,\n type RunFilter,\n type Storage,\n createKyselyStorage,\n} from './storage'\nimport { createWorker } from './worker'\n\n/**\n * Options for creating a Durably instance\n */\nexport interface DurablyOptions {\n dialect: Dialect\n pollingInterval?: number\n heartbeatInterval?: number\n staleThreshold?: number\n}\n\n/**\n * Default configuration values\n */\nconst DEFAULTS = {\n pollingInterval: 1000,\n heartbeatInterval: 5000,\n staleThreshold: 30000,\n} as const\n\n/**\n * Plugin interface for extending Durably\n */\nexport interface DurablyPlugin {\n name: string\n install(durably: Durably): void\n}\n\n/**\n * Durably instance\n */\nexport interface Durably {\n /**\n * Run database migrations\n * This is idempotent and safe to call multiple times\n */\n migrate(): Promise<void>\n\n /**\n * Get the underlying Kysely database instance\n * Useful for testing and advanced use cases\n */\n readonly db: Kysely<Database>\n\n /**\n * Storage layer for database operations\n */\n readonly storage: Storage\n\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Define a job\n */\n defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n >(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n ): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Retry a failed run by resetting it to pending\n * @throws Error if run is not in failed status\n */\n retry(runId: string): Promise<void>\n\n /**\n * Cancel a pending or running run\n * @throws Error if run is already completed, failed, or cancelled\n */\n cancel(runId: string): Promise<void>\n\n /**\n * Delete a completed, failed, or cancelled run and its associated steps and logs\n * @throws Error if run is pending or running, or does not exist\n */\n deleteRun(runId: string): Promise<void>\n\n /**\n * Get a run by ID (returns unknown output type)\n */\n getRun(runId: string): Promise<Run | null>\n\n /**\n * Get runs with optional filtering\n */\n getRuns(filter?: RunFilter): Promise<Run[]>\n\n /**\n * Register a plugin\n */\n use(plugin: DurablyPlugin): void\n}\n\n/**\n * Create a Durably instance\n */\nexport function createDurably(options: DurablyOptions): Durably {\n const config = {\n pollingInterval: options.pollingInterval ?? DEFAULTS.pollingInterval,\n heartbeatInterval: options.heartbeatInterval ?? DEFAULTS.heartbeatInterval,\n staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold,\n }\n\n const db = new Kysely<Database>({ dialect: options.dialect })\n const storage = createKyselyStorage(db)\n const eventEmitter = createEventEmitter()\n const jobRegistry = createJobRegistry()\n const worker = createWorker(config, storage, eventEmitter, jobRegistry)\n\n // Track migration state for idempotency\n let migrating: Promise<void> | null = null\n let migrated = false\n\n const durably: Durably = {\n db,\n storage,\n on: eventEmitter.on,\n emit: eventEmitter.emit,\n onError: eventEmitter.onError,\n start: worker.start,\n stop: worker.stop,\n\n defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n >(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n ): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n > {\n return createJobHandle(definition, fn, storage, eventEmitter, jobRegistry)\n },\n\n getRun: storage.getRun,\n getRuns: storage.getRuns,\n\n use(plugin: DurablyPlugin): void {\n plugin.install(durably)\n },\n\n async retry(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot retry completed run: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot retry pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot retry running run: ${runId}`)\n }\n // Only failed runs can be retried\n await storage.updateRun(runId, {\n status: 'pending',\n error: null,\n })\n },\n\n async cancel(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot cancel completed run: ${runId}`)\n }\n if (run.status === 'failed') {\n throw new Error(`Cannot cancel failed run: ${runId}`)\n }\n if (run.status === 'cancelled') {\n throw new Error(`Cannot cancel already cancelled run: ${runId}`)\n }\n // pending or running can be cancelled\n await storage.updateRun(runId, {\n status: 'cancelled',\n })\n },\n\n async deleteRun(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot delete pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot delete running run: ${runId}`)\n }\n // completed, failed, or cancelled can be deleted\n await storage.deleteRun(runId)\n },\n\n async migrate(): Promise<void> {\n // Already migrated\n if (migrated) {\n return\n }\n\n // Migration in progress, wait for it\n if (migrating) {\n return migrating\n }\n\n // Start migration\n migrating = runMigrations(db)\n .then(() => {\n migrated = true\n })\n .finally(() => {\n migrating = null\n })\n\n return migrating\n },\n }\n\n return durably\n}\n","/**\n * Base event interface\n */\nexport interface BaseEvent {\n type: string\n timestamp: string\n sequence: number\n}\n\n/**\n * Run start event\n */\nexport interface RunStartEvent extends BaseEvent {\n type: 'run:start'\n runId: string\n jobName: string\n payload: unknown\n}\n\n/**\n * Run complete event\n */\nexport interface RunCompleteEvent extends BaseEvent {\n type: 'run:complete'\n runId: string\n jobName: string\n output: unknown\n duration: number\n}\n\n/**\n * Run fail event\n */\nexport interface RunFailEvent extends BaseEvent {\n type: 'run:fail'\n runId: string\n jobName: string\n error: string\n failedStepName: string\n}\n\n/**\n * Step start event\n */\nexport interface StepStartEvent extends BaseEvent {\n type: 'step:start'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n}\n\n/**\n * Step complete event\n */\nexport interface StepCompleteEvent extends BaseEvent {\n type: 'step:complete'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n output: unknown\n duration: number\n}\n\n/**\n * Step fail event\n */\nexport interface StepFailEvent extends BaseEvent {\n type: 'step:fail'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n error: string\n}\n\n/**\n * Log write event\n */\nexport interface LogWriteEvent extends BaseEvent {\n type: 'log:write'\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown\n}\n\n/**\n * Worker error event (internal errors like heartbeat failures)\n */\nexport interface WorkerErrorEvent extends BaseEvent {\n type: 'worker:error'\n error: string\n context: string\n runId?: string\n}\n\n/**\n * All event types as discriminated union\n */\nexport type DurablyEvent =\n | RunStartEvent\n | RunCompleteEvent\n | RunFailEvent\n | StepStartEvent\n | StepCompleteEvent\n | StepFailEvent\n | LogWriteEvent\n | WorkerErrorEvent\n\n/**\n * Event types for type-safe event names\n */\nexport type EventType = DurablyEvent['type']\n\n/**\n * Extract event by type\n */\nexport type EventByType<T extends EventType> = Extract<\n DurablyEvent,\n { type: T }\n>\n\n/**\n * Event input (without auto-generated fields)\n */\nexport type EventInput<T extends EventType> = Omit<\n EventByType<T>,\n 'timestamp' | 'sequence'\n>\n\n/**\n * All possible event inputs as a union (properly distributed)\n */\nexport type AnyEventInput =\n | EventInput<'run:start'>\n | EventInput<'run:complete'>\n | EventInput<'run:fail'>\n | EventInput<'step:start'>\n | EventInput<'step:complete'>\n | EventInput<'step:fail'>\n | EventInput<'log:write'>\n | EventInput<'worker:error'>\n\n/**\n * Event listener function\n */\nexport type EventListener<T extends EventType> = (event: EventByType<T>) => void\n\n/**\n * Unsubscribe function returned by on()\n */\nexport type Unsubscribe = () => void\n\n/**\n * Error handler function for listener exceptions\n */\nexport type ErrorHandler = (error: Error, event: DurablyEvent) => void\n\n/**\n * Event emitter interface\n */\nexport interface EventEmitter {\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n}\n\n/**\n * Create an event emitter\n */\nexport function createEventEmitter(): EventEmitter {\n const listeners = new Map<EventType, Set<EventListener<EventType>>>()\n let sequence = 0\n let errorHandler: ErrorHandler | null = null\n\n return {\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe {\n if (!listeners.has(type)) {\n listeners.set(type, new Set())\n }\n\n const typeListeners = listeners.get(type)\n typeListeners?.add(listener as unknown as EventListener<EventType>)\n\n return () => {\n typeListeners?.delete(listener as unknown as EventListener<EventType>)\n }\n },\n\n onError(handler: ErrorHandler): void {\n errorHandler = handler\n },\n\n emit(event: AnyEventInput): void {\n sequence++\n const fullEvent = {\n ...event,\n timestamp: new Date().toISOString(),\n sequence,\n } as DurablyEvent\n\n const typeListeners = listeners.get(event.type)\n if (!typeListeners) {\n return\n }\n\n for (const listener of typeListeners) {\n try {\n listener(fullEvent)\n } catch (error) {\n if (errorHandler) {\n errorHandler(\n error instanceof Error ? error : new Error(String(error)),\n fullEvent,\n )\n }\n // Continue to next listener regardless of error\n }\n }\n },\n }\n}\n","import type { z } from 'zod'\nimport type { EventEmitter } from './events'\nimport type { Run, Storage } from './storage'\n\n/**\n * Job context passed to the job function\n */\nexport interface JobContext {\n /**\n * The ID of the current run\n */\n readonly runId: string\n\n /**\n * Execute a step with automatic persistence and replay\n */\n run<T>(name: string, fn: () => T | Promise<T>): Promise<T>\n\n /**\n * Report progress for the current run\n */\n progress(current: number, total?: number, message?: string): void\n\n /**\n * Log a message\n */\n log: {\n info(message: string, data?: unknown): void\n warn(message: string, data?: unknown): void\n error(message: string, data?: unknown): void\n }\n}\n\n/**\n * Job function type\n */\nexport type JobFunction<TInput, TOutput> = (\n context: JobContext,\n payload: TInput,\n) => Promise<TOutput>\n\n/**\n * Job definition options\n */\nexport interface JobDefinition<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n> {\n name: TName\n input: TInputSchema\n output?: TOutputSchema\n}\n\n/**\n * Trigger options\n */\nexport interface TriggerOptions {\n idempotencyKey?: string\n concurrencyKey?: string\n /** Timeout in milliseconds for triggerAndWait() */\n timeout?: number\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter {\n status?: 'pending' | 'running' | 'completed' | 'failed'\n jobName?: string\n}\n\n/**\n * Typed run with output type\n */\nexport interface TypedRun<TOutput> extends Omit<Run, 'output'> {\n output: TOutput | null\n}\n\n/**\n * Batch trigger input - either just the input or input with options\n */\nexport type BatchTriggerInput<TInput> =\n | TInput\n | { input: TInput; options?: TriggerOptions }\n\n/**\n * Result of triggerAndWait\n */\nexport interface TriggerAndWaitResult<TOutput> {\n id: string\n output: TOutput\n}\n\n/**\n * Job handle returned by defineJob\n */\nexport interface JobHandle<TName extends string, TInput, TOutput> {\n readonly name: TName\n\n /**\n * Trigger a new run\n */\n trigger(input: TInput, options?: TriggerOptions): Promise<TypedRun<TOutput>>\n\n /**\n * Trigger a new run and wait for completion\n * Returns the output directly, throws if the run fails\n */\n triggerAndWait(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TriggerAndWaitResult<TOutput>>\n\n /**\n * Trigger multiple runs in a batch\n * All inputs are validated before any runs are created\n */\n batchTrigger(\n inputs: BatchTriggerInput<TInput>[],\n ): Promise<TypedRun<TOutput>[]>\n\n /**\n * Get a run by ID\n */\n getRun(id: string): Promise<TypedRun<TOutput> | null>\n\n /**\n * Get runs with optional filter\n */\n getRuns(filter?: Omit<RunFilter, 'jobName'>): Promise<TypedRun<TOutput>[]>\n}\n\n/**\n * Internal job registration\n */\nexport interface RegisteredJob<TInput, TOutput> {\n name: string\n inputSchema: z.ZodType\n outputSchema: z.ZodType | undefined\n fn: JobFunction<TInput, TOutput>\n}\n\n/**\n * Job registry for managing registered jobs\n */\nexport interface JobRegistry {\n /**\n * Register a job\n */\n register<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void\n\n /**\n * Get a registered job by name\n */\n get(name: string): RegisteredJob<unknown, unknown> | undefined\n\n /**\n * Check if a job is registered\n */\n has(name: string): boolean\n}\n\n/**\n * Create a job registry\n */\nexport function createJobRegistry(): JobRegistry {\n const jobs = new Map<string, RegisteredJob<unknown, unknown>>()\n\n return {\n register<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void {\n if (jobs.has(job.name)) {\n throw new Error(`Job \"${job.name}\" is already registered`)\n }\n jobs.set(job.name, job as RegisteredJob<unknown, unknown>)\n },\n\n get(name: string): RegisteredJob<unknown, unknown> | undefined {\n return jobs.get(name)\n },\n\n has(name: string): boolean {\n return jobs.has(name)\n },\n }\n}\n\n/**\n * Create a job handle\n */\nexport function createJobHandle<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n>(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n storage: Storage,\n _eventEmitter: EventEmitter,\n registry: JobRegistry,\n): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : undefined\n> {\n type TInput = z.infer<TInputSchema>\n type TOutput = TOutputSchema extends z.ZodType\n ? z.infer<TOutputSchema>\n : undefined\n\n // Register the job\n registry.register({\n name: definition.name,\n inputSchema: definition.input,\n outputSchema: definition.output,\n fn: fn as JobFunction<unknown, unknown>,\n })\n\n return {\n name: definition.name,\n\n async trigger(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TypedRun<TOutput>> {\n // Validate input\n const parseResult = definition.input.safeParse(input)\n if (!parseResult.success) {\n throw new Error(`Invalid input: ${parseResult.error.message}`)\n }\n\n // Create the run\n const run = await storage.createRun({\n jobName: definition.name,\n payload: parseResult.data,\n idempotencyKey: options?.idempotencyKey,\n concurrencyKey: options?.concurrencyKey,\n })\n\n return run as TypedRun<TOutput>\n },\n\n async triggerAndWait(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TriggerAndWaitResult<TOutput>> {\n // Trigger the run\n const run = await this.trigger(input, options)\n\n // Wait for completion via event subscription\n return new Promise((resolve, reject) => {\n let timeoutId: ReturnType<typeof setTimeout> | undefined\n let resolved = false\n\n const cleanup = () => {\n if (resolved) return\n resolved = true\n unsubscribeComplete()\n unsubscribeFail()\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n }\n\n const unsubscribeComplete = _eventEmitter.on(\n 'run:complete',\n (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n resolve({\n id: run.id,\n output: event.output as TOutput,\n })\n }\n },\n )\n\n const unsubscribeFail = _eventEmitter.on('run:fail', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n reject(new Error(event.error))\n }\n })\n\n // Check current status after subscribing (race condition mitigation)\n // If the run completed before we subscribed, we need to handle it\n storage.getRun(run.id).then((currentRun) => {\n if (resolved || !currentRun) return\n if (currentRun.status === 'completed') {\n cleanup()\n resolve({\n id: run.id,\n output: currentRun.output as TOutput,\n })\n } else if (currentRun.status === 'failed') {\n cleanup()\n reject(new Error(currentRun.error || 'Run failed'))\n }\n })\n\n // Set timeout if specified\n if (options?.timeout !== undefined) {\n timeoutId = setTimeout(() => {\n if (!resolved) {\n cleanup()\n reject(\n new Error(`triggerAndWait timeout after ${options.timeout}ms`),\n )\n }\n }, options.timeout)\n }\n })\n },\n\n async batchTrigger(\n inputs: (TInput | { input: TInput; options?: TriggerOptions })[],\n ): Promise<TypedRun<TOutput>[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Normalize inputs to { input, options } format\n const normalized = inputs.map((item) => {\n if (item && typeof item === 'object' && 'input' in item) {\n return item as { input: TInput; options?: TriggerOptions }\n }\n return { input: item as TInput, options: undefined }\n })\n\n // Validate all inputs first (before creating any runs)\n const validated: { payload: unknown; options?: TriggerOptions }[] = []\n for (let i = 0; i < normalized.length; i++) {\n const parseResult = definition.input.safeParse(normalized[i].input)\n if (!parseResult.success) {\n throw new Error(\n `Invalid input at index ${i}: ${parseResult.error.message}`,\n )\n }\n validated.push({\n payload: parseResult.data,\n options: normalized[i].options,\n })\n }\n\n // Create all runs\n const runs = await storage.batchCreateRuns(\n validated.map((v) => ({\n jobName: definition.name,\n payload: v.payload,\n idempotencyKey: v.options?.idempotencyKey,\n concurrencyKey: v.options?.concurrencyKey,\n })),\n )\n\n return runs as TypedRun<TOutput>[]\n },\n\n async getRun(id: string): Promise<TypedRun<TOutput> | null> {\n const run = await storage.getRun(id)\n if (!run || run.jobName !== definition.name) {\n return null\n }\n return run as TypedRun<TOutput>\n },\n\n async getRuns(\n filter?: Omit<RunFilter, 'jobName'>,\n ): Promise<TypedRun<TOutput>[]> {\n const runs = await storage.getRuns({\n ...filter,\n jobName: definition.name,\n })\n return runs as TypedRun<TOutput>[]\n },\n }\n}\n","import type { Kysely } from 'kysely'\nimport type { Database } from './schema'\n\n/**\n * Migration definitions\n */\ninterface Migration {\n version: number\n up: (db: Kysely<Database>) => Promise<void>\n}\n\nconst migrations: Migration[] = [\n {\n version: 1,\n up: async (db) => {\n // Create runs table\n await db.schema\n .createTable('durably_runs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('job_name', 'text', (col) => col.notNull())\n .addColumn('payload', 'text', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('idempotency_key', 'text')\n .addColumn('concurrency_key', 'text')\n .addColumn('current_step_index', 'integer', (col) =>\n col.notNull().defaultTo(0),\n )\n .addColumn('progress', 'text')\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('heartbeat_at', 'text', (col) => col.notNull())\n .addColumn('created_at', 'text', (col) => col.notNull())\n .addColumn('updated_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create runs indexes\n await db.schema\n .createIndex('idx_durably_runs_job_idempotency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['job_name', 'idempotency_key'])\n .unique()\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_concurrency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'concurrency_key'])\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_created')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'created_at'])\n .execute()\n\n // Create steps table\n await db.schema\n .createTable('durably_steps')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('name', 'text', (col) => col.notNull())\n .addColumn('index', 'integer', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('started_at', 'text', (col) => col.notNull())\n .addColumn('completed_at', 'text')\n .execute()\n\n // Create steps index\n await db.schema\n .createIndex('idx_durably_steps_run_index')\n .ifNotExists()\n .on('durably_steps')\n .columns(['run_id', 'index'])\n .execute()\n\n // Create logs table\n await db.schema\n .createTable('durably_logs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('step_name', 'text')\n .addColumn('level', 'text', (col) => col.notNull())\n .addColumn('message', 'text', (col) => col.notNull())\n .addColumn('data', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create logs index\n await db.schema\n .createIndex('idx_durably_logs_run_created')\n .ifNotExists()\n .on('durably_logs')\n .columns(['run_id', 'created_at'])\n .execute()\n\n // Create schema_versions table\n await db.schema\n .createTable('durably_schema_versions')\n .ifNotExists()\n .addColumn('version', 'integer', (col) => col.primaryKey())\n .addColumn('applied_at', 'text', (col) => col.notNull())\n .execute()\n },\n },\n]\n\n/**\n * Get the current schema version from the database\n */\nasync function getCurrentVersion(db: Kysely<Database>): Promise<number> {\n try {\n const result = await db\n .selectFrom('durably_schema_versions')\n .select('version')\n .orderBy('version', 'desc')\n .limit(1)\n .executeTakeFirst()\n\n return result?.version ?? 0\n } catch {\n // Table doesn't exist yet\n return 0\n }\n}\n\n/**\n * Run pending migrations\n */\nexport async function runMigrations(db: Kysely<Database>): Promise<void> {\n const currentVersion = await getCurrentVersion(db)\n\n for (const migration of migrations) {\n if (migration.version > currentVersion) {\n await migration.up(db)\n\n await db\n .insertInto('durably_schema_versions')\n .values({\n version: migration.version,\n applied_at: new Date().toISOString(),\n })\n .execute()\n }\n }\n}\n","import type { Kysely } from 'kysely'\nimport { ulid } from 'ulidx'\nimport type { Database } from './schema'\n\n/**\n * Run data for creating a new run\n */\nexport interface CreateRunInput {\n jobName: string\n payload: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n}\n\n/**\n * Run data returned from storage\n */\nexport interface Run {\n id: string\n jobName: string\n payload: unknown\n status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n idempotencyKey: string | null\n concurrencyKey: string | null\n currentStepIndex: number\n progress: { current: number; total?: number; message?: string } | null\n output: unknown | null\n error: string | null\n heartbeatAt: string\n createdAt: string\n updatedAt: string\n}\n\n/**\n * Run update data\n */\nexport interface UpdateRunInput {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n currentStepIndex?: number\n progress?: { current: number; total?: number; message?: string } | null\n output?: unknown\n error?: string | null\n heartbeatAt?: string\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n jobName?: string\n /** Maximum number of runs to return */\n limit?: number\n /** Number of runs to skip (for pagination) */\n offset?: number\n}\n\n/**\n * Step data for creating a new step\n */\nexport interface CreateStepInput {\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed'\n output?: unknown\n error?: string\n startedAt: string // ISO8601 timestamp when step execution started\n}\n\n/**\n * Step data returned from storage\n */\nexport interface Step {\n id: string\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed'\n output: unknown | null\n error: string | null\n startedAt: string\n completedAt: string | null\n}\n\n/**\n * Log data for creating a new log\n */\nexport interface CreateLogInput {\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n}\n\n/**\n * Log data returned from storage\n */\nexport interface Log {\n id: string\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown | null\n createdAt: string\n}\n\n/**\n * Storage interface for database operations\n */\nexport interface Storage {\n // Run operations\n createRun(input: CreateRunInput): Promise<Run>\n batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]>\n updateRun(runId: string, data: UpdateRunInput): Promise<void>\n deleteRun(runId: string): Promise<void>\n getRun(runId: string): Promise<Run | null>\n getRuns(filter?: RunFilter): Promise<Run[]>\n getNextPendingRun(excludeConcurrencyKeys: string[]): Promise<Run | null>\n\n // Step operations\n createStep(input: CreateStepInput): Promise<Step>\n getSteps(runId: string): Promise<Step[]>\n getCompletedStep(runId: string, name: string): Promise<Step | null>\n\n // Log operations\n createLog(input: CreateLogInput): Promise<Log>\n getLogs(runId: string): Promise<Log[]>\n}\n\n/**\n * Convert database row to Run object\n */\nfunction rowToRun(row: Database['durably_runs']): Run {\n return {\n id: row.id,\n jobName: row.job_name,\n payload: JSON.parse(row.payload),\n status: row.status,\n idempotencyKey: row.idempotency_key,\n concurrencyKey: row.concurrency_key,\n currentStepIndex: row.current_step_index,\n progress: row.progress ? JSON.parse(row.progress) : null,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n heartbeatAt: row.heartbeat_at,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n }\n}\n\n/**\n * Convert database row to Step object\n */\nfunction rowToStep(row: Database['durably_steps']): Step {\n return {\n id: row.id,\n runId: row.run_id,\n name: row.name,\n index: row.index,\n status: row.status,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n }\n}\n\n/**\n * Convert database row to Log object\n */\nfunction rowToLog(row: Database['durably_logs']): Log {\n return {\n id: row.id,\n runId: row.run_id,\n stepName: row.step_name,\n level: row.level,\n message: row.message,\n data: row.data ? JSON.parse(row.data) : null,\n createdAt: row.created_at,\n }\n}\n\n/**\n * Create a Kysely-based Storage implementation\n */\nexport function createKyselyStorage(db: Kysely<Database>): Storage {\n return {\n async createRun(input: CreateRunInput): Promise<Run> {\n const now = new Date().toISOString()\n\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n return rowToRun(existing)\n }\n }\n\n const id = ulid()\n const run: Database['durably_runs'] = {\n id,\n job_name: input.jobName,\n payload: JSON.stringify(input.payload),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n heartbeat_at: now,\n created_at: now,\n updated_at: now,\n }\n\n await db.insertInto('durably_runs').values(run).execute()\n\n return rowToRun(run)\n },\n\n async batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Use transaction to ensure atomicity of idempotency checks and inserts\n return await db.transaction().execute(async (trx) => {\n const now = new Date().toISOString()\n const runs: Database['durably_runs'][] = []\n\n // Process inputs - check idempotency keys and create run objects\n for (const input of inputs) {\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await trx\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n runs.push(existing)\n continue\n }\n }\n\n const id = ulid()\n runs.push({\n id,\n job_name: input.jobName,\n payload: JSON.stringify(input.payload),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n heartbeat_at: now,\n created_at: now,\n updated_at: now,\n })\n }\n\n // Insert all new runs in a single batch\n const newRuns = runs.filter((r) => r.created_at === now)\n if (newRuns.length > 0) {\n await trx.insertInto('durably_runs').values(newRuns).execute()\n }\n\n return runs.map(rowToRun)\n })\n },\n\n async updateRun(runId: string, data: UpdateRunInput): Promise<void> {\n const now = new Date().toISOString()\n const updates: Partial<Database['durably_runs']> = {\n updated_at: now,\n }\n\n if (data.status !== undefined) updates.status = data.status\n if (data.currentStepIndex !== undefined)\n updates.current_step_index = data.currentStepIndex\n if (data.progress !== undefined)\n updates.progress = data.progress ? JSON.stringify(data.progress) : null\n if (data.output !== undefined)\n updates.output = JSON.stringify(data.output)\n if (data.error !== undefined) updates.error = data.error\n if (data.heartbeatAt !== undefined)\n updates.heartbeat_at = data.heartbeatAt\n\n await db\n .updateTable('durably_runs')\n .set(updates)\n .where('id', '=', runId)\n .execute()\n },\n\n async deleteRun(runId: string): Promise<void> {\n // Delete in order: logs -> steps -> run (due to foreign key constraints)\n await db.deleteFrom('durably_logs').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_steps').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_runs').where('id', '=', runId).execute()\n },\n\n async getRun(runId: string): Promise<Run | null> {\n const row = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('id', '=', runId)\n .executeTakeFirst()\n\n return row ? rowToRun(row) : null\n },\n\n async getRuns(filter?: RunFilter): Promise<Run[]> {\n let query = db.selectFrom('durably_runs').selectAll()\n\n if (filter?.status) {\n query = query.where('status', '=', filter.status)\n }\n if (filter?.jobName) {\n query = query.where('job_name', '=', filter.jobName)\n }\n\n query = query.orderBy('created_at', 'desc')\n\n if (filter?.limit !== undefined) {\n query = query.limit(filter.limit)\n }\n if (filter?.offset !== undefined) {\n // SQLite requires LIMIT when using OFFSET\n if (filter.limit === undefined) {\n query = query.limit(-1) // -1 means unlimited in SQLite\n }\n query = query.offset(filter.offset)\n }\n\n const rows = await query.execute()\n return rows.map(rowToRun)\n },\n\n async getNextPendingRun(\n excludeConcurrencyKeys: string[],\n ): Promise<Run | null> {\n let query = db\n .selectFrom('durably_runs')\n .selectAll()\n .where('status', '=', 'pending')\n .orderBy('created_at', 'asc')\n .limit(1)\n\n if (excludeConcurrencyKeys.length > 0) {\n query = query.where((eb) =>\n eb.or([\n eb('concurrency_key', 'is', null),\n eb('concurrency_key', 'not in', excludeConcurrencyKeys),\n ]),\n )\n }\n\n const row = await query.executeTakeFirst()\n return row ? rowToRun(row) : null\n },\n\n async createStep(input: CreateStepInput): Promise<Step> {\n const completedAt = new Date().toISOString()\n const id = ulid()\n\n const step: Database['durably_steps'] = {\n id,\n run_id: input.runId,\n name: input.name,\n index: input.index,\n status: input.status,\n output:\n input.output !== undefined ? JSON.stringify(input.output) : null,\n error: input.error ?? null,\n started_at: input.startedAt,\n completed_at: completedAt,\n }\n\n await db.insertInto('durably_steps').values(step).execute()\n\n return rowToStep(step)\n },\n\n async getSteps(runId: string): Promise<Step[]> {\n const rows = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('index', 'asc')\n .execute()\n\n return rows.map(rowToStep)\n },\n\n async getCompletedStep(runId: string, name: string): Promise<Step | null> {\n const row = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .where('name', '=', name)\n .where('status', '=', 'completed')\n .executeTakeFirst()\n\n return row ? rowToStep(row) : null\n },\n\n async createLog(input: CreateLogInput): Promise<Log> {\n const now = new Date().toISOString()\n const id = ulid()\n\n const log: Database['durably_logs'] = {\n id,\n run_id: input.runId,\n step_name: input.stepName,\n level: input.level,\n message: input.message,\n data: input.data !== undefined ? JSON.stringify(input.data) : null,\n created_at: now,\n }\n\n await db.insertInto('durably_logs').values(log).execute()\n\n return rowToLog(log)\n },\n\n async getLogs(runId: string): Promise<Log[]> {\n const rows = await db\n .selectFrom('durably_logs')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('created_at', 'asc')\n .execute()\n\n return rows.map(rowToLog)\n },\n }\n}\n","/**\n * Error thrown when a run is cancelled during execution.\n * The worker catches this error and treats it specially - it does not\n * mark the run as failed, as the run status is already 'cancelled'.\n */\nexport class CancelledError extends Error {\n constructor(runId: string) {\n super(`Run was cancelled: ${runId}`)\n this.name = 'CancelledError'\n }\n}\n","import { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { JobContext } from './job'\nimport type { Run, Storage } from './storage'\n\n/**\n * Create a job context for executing a run\n */\nexport function createJobContext(\n run: Run,\n jobName: string,\n storage: Storage,\n eventEmitter: EventEmitter,\n): JobContext {\n let stepIndex = run.currentStepIndex\n let currentStepName: string | null = null\n\n return {\n get runId(): string {\n return run.id\n },\n\n async run<T>(name: string, fn: () => T | Promise<T>): Promise<T> {\n // Check if run was cancelled before executing this step\n const currentRun = await storage.getRun(run.id)\n if (currentRun?.status === 'cancelled') {\n throw new CancelledError(run.id)\n }\n\n // Check if step was already completed\n const existingStep = await storage.getCompletedStep(run.id, name)\n if (existingStep) {\n stepIndex++\n return existingStep.output as T\n }\n\n // Track current step for log attribution\n currentStepName = name\n\n // Record step start time\n const startedAt = new Date().toISOString()\n const startTime = Date.now()\n\n // Emit step:start event\n eventEmitter.emit({\n type: 'step:start',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n })\n\n try {\n // Execute the step\n const result = await fn()\n\n // Save step result\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'completed',\n output: result,\n startedAt,\n })\n\n // Update run's current step index\n stepIndex++\n await storage.updateRun(run.id, { currentStepIndex: stepIndex })\n\n // Emit step:complete event\n eventEmitter.emit({\n type: 'step:complete',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex: stepIndex - 1,\n output: result,\n duration: Date.now() - startTime,\n })\n\n return result\n } catch (error) {\n // Save failed step\n const errorMessage =\n error instanceof Error ? error.message : String(error)\n\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'failed',\n error: errorMessage,\n startedAt,\n })\n\n // Emit step:fail event\n eventEmitter.emit({\n type: 'step:fail',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n error: errorMessage,\n })\n\n throw error\n } finally {\n // Clear current step after execution\n currentStepName = null\n }\n },\n\n progress(current: number, total?: number, message?: string): void {\n // Fire and forget - don't await\n storage.updateRun(run.id, {\n progress: { current, total, message },\n })\n },\n\n log: {\n info(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'info',\n message,\n data,\n })\n },\n\n warn(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'warn',\n message,\n data,\n })\n },\n\n error(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'error',\n message,\n data,\n })\n },\n },\n }\n}\n","import { createJobContext } from './context'\nimport { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { JobRegistry } from './job'\nimport type { Storage } from './storage'\n\n/**\n * Worker configuration\n */\nexport interface WorkerConfig {\n pollingInterval: number\n heartbeatInterval: number\n staleThreshold: number\n}\n\n/**\n * Worker state\n */\nexport interface Worker {\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Check if worker is running\n */\n readonly isRunning: boolean\n}\n\n/**\n * Create a worker instance\n */\nexport function createWorker(\n config: WorkerConfig,\n storage: Storage,\n eventEmitter: EventEmitter,\n jobRegistry: JobRegistry,\n): Worker {\n let running = false\n let currentRunPromise: Promise<void> | null = null\n let pollingTimeout: ReturnType<typeof setTimeout> | null = null\n let stopResolver: (() => void) | null = null\n let heartbeatInterval: ReturnType<typeof setInterval> | null = null\n let currentRunId: string | null = null\n\n /**\n * Recover stale runs by resetting them to pending\n */\n async function recoverStaleRuns(): Promise<void> {\n const staleThreshold = new Date(\n Date.now() - config.staleThreshold,\n ).toISOString()\n const runningRuns = await storage.getRuns({ status: 'running' })\n\n for (const run of runningRuns) {\n if (run.heartbeatAt < staleThreshold) {\n // This run is stale - reset to pending\n await storage.updateRun(run.id, {\n status: 'pending',\n })\n }\n }\n }\n\n /**\n * Update heartbeat for current run\n */\n async function updateHeartbeat(): Promise<void> {\n if (currentRunId) {\n await storage.updateRun(currentRunId, {\n heartbeatAt: new Date().toISOString(),\n })\n }\n }\n\n /**\n * Extract error message from unknown error\n */\n function getErrorMessage(error: unknown): string {\n return error instanceof Error ? error.message : String(error)\n }\n\n /**\n * Handle successful run completion\n */\n async function handleRunSuccess(\n runId: string,\n jobName: string,\n output: unknown,\n startTime: number,\n ): Promise<void> {\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (currentRun?.status === 'cancelled') {\n return\n }\n\n await storage.updateRun(runId, {\n status: 'completed',\n output,\n })\n\n eventEmitter.emit({\n type: 'run:complete',\n runId,\n jobName,\n output,\n duration: Date.now() - startTime,\n })\n }\n\n /**\n * Handle failed run\n */\n async function handleRunFailure(\n runId: string,\n jobName: string,\n error: unknown,\n ): Promise<void> {\n // If the error is CancelledError, don't treat it as a failure\n // The run status is already 'cancelled'\n if (error instanceof CancelledError) {\n return\n }\n\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (currentRun?.status === 'cancelled') {\n return\n }\n\n const errorMessage = getErrorMessage(error)\n\n // Get the failed step name if available\n const steps = await storage.getSteps(runId)\n const failedStep = steps.find((s) => s.status === 'failed')\n\n await storage.updateRun(runId, {\n status: 'failed',\n error: errorMessage,\n })\n\n eventEmitter.emit({\n type: 'run:fail',\n runId,\n jobName,\n error: errorMessage,\n failedStepName: failedStep?.name ?? 'unknown',\n })\n }\n\n /**\n * Execute a run with heartbeat management\n */\n async function executeRun(\n run: Awaited<ReturnType<typeof storage.getRun>> & { id: string },\n job: NonNullable<ReturnType<typeof jobRegistry.get>>,\n ): Promise<void> {\n // Track current run for heartbeat updates\n currentRunId = run.id\n\n // Start heartbeat interval\n // Errors are emitted as events but don't stop execution\n heartbeatInterval = setInterval(() => {\n updateHeartbeat().catch((error) => {\n eventEmitter.emit({\n type: 'worker:error',\n error: error instanceof Error ? error.message : String(error),\n context: 'heartbeat',\n runId: run.id,\n })\n })\n }, config.heartbeatInterval)\n\n // Emit run:start event\n eventEmitter.emit({\n type: 'run:start',\n runId: run.id,\n jobName: run.jobName,\n payload: run.payload,\n })\n\n const startTime = Date.now()\n\n try {\n // Create context and execute job\n const context = createJobContext(run, run.jobName, storage, eventEmitter)\n const output = await job.fn(context, run.payload)\n\n // Validate output if schema exists\n if (job.outputSchema) {\n const parseResult = job.outputSchema.safeParse(output)\n if (!parseResult.success) {\n throw new Error(`Invalid output: ${parseResult.error.message}`)\n }\n }\n\n await handleRunSuccess(run.id, run.jobName, output, startTime)\n } catch (error) {\n await handleRunFailure(run.id, run.jobName, error)\n } finally {\n // Stop heartbeat interval\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n currentRunId = null\n }\n }\n\n async function processNextRun(): Promise<boolean> {\n // Get running runs to exclude their concurrency keys\n const runningRuns = await storage.getRuns({ status: 'running' })\n const excludeConcurrencyKeys = runningRuns\n .filter(\n (r): r is typeof r & { concurrencyKey: string } =>\n r.concurrencyKey !== null,\n )\n .map((r) => r.concurrencyKey)\n\n // Get next pending run\n const run = await storage.getNextPendingRun(excludeConcurrencyKeys)\n if (!run) {\n return false\n }\n\n // Get the job definition\n const job = jobRegistry.get(run.jobName)\n if (!job) {\n // Unknown job - mark as failed\n await storage.updateRun(run.id, {\n status: 'failed',\n error: `Unknown job: ${run.jobName}`,\n })\n return true\n }\n\n // Transition to running\n await storage.updateRun(run.id, {\n status: 'running',\n heartbeatAt: new Date().toISOString(),\n })\n\n await executeRun(run, job)\n\n return true\n }\n\n async function poll(): Promise<void> {\n if (!running) {\n return\n }\n\n const doWork = async () => {\n // Recover stale runs before processing\n await recoverStaleRuns()\n await processNextRun()\n }\n\n try {\n currentRunPromise = doWork()\n await currentRunPromise\n } finally {\n currentRunPromise = null\n }\n\n if (running) {\n pollingTimeout = setTimeout(() => poll(), config.pollingInterval)\n } else if (stopResolver) {\n stopResolver()\n stopResolver = null\n }\n }\n\n return {\n get isRunning(): boolean {\n return running\n },\n\n start(): void {\n if (running) {\n return\n }\n running = true\n poll()\n },\n\n async stop(): Promise<void> {\n if (!running) {\n return\n }\n\n running = false\n\n if (pollingTimeout) {\n clearTimeout(pollingTimeout)\n pollingTimeout = null\n }\n\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n\n if (currentRunPromise) {\n // Wait for current run to complete\n return new Promise<void>((resolve) => {\n stopResolver = resolve\n })\n }\n },\n }\n}\n","import type { DurablyPlugin } from '../durably'\n\n/**\n * Plugin that persists log events to the database\n */\nexport function withLogPersistence(): DurablyPlugin {\n return {\n name: 'log-persistence',\n install(durably) {\n durably.on('log:write', async (event) => {\n await durably.storage.createLog({\n runId: event.runId,\n stepName: event.stepName,\n level: event.level,\n message: event.message,\n data: event.data,\n })\n })\n },\n }\n}\n"],"mappings":";AACA,SAAS,cAAc;;;ACwLhB,SAAS,qBAAmC;AACjD,QAAM,YAAY,oBAAI,IAA8C;AACpE,MAAI,WAAW;AACf,MAAI,eAAoC;AAExC,SAAO;AAAA,IACL,GAAwB,MAAS,UAAyC;AACxE,UAAI,CAAC,UAAU,IAAI,IAAI,GAAG;AACxB,kBAAU,IAAI,MAAM,oBAAI,IAAI,CAAC;AAAA,MAC/B;AAEA,YAAM,gBAAgB,UAAU,IAAI,IAAI;AACxC,qBAAe,IAAI,QAA+C;AAElE,aAAO,MAAM;AACX,uBAAe,OAAO,QAA+C;AAAA,MACvE;AAAA,IACF;AAAA,IAEA,QAAQ,SAA6B;AACnC,qBAAe;AAAA,IACjB;AAAA,IAEA,KAAK,OAA4B;AAC/B;AACA,YAAM,YAAY;AAAA,QAChB,GAAG;AAAA,QACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF;AAEA,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAC9C,UAAI,CAAC,eAAe;AAClB;AAAA,MACF;AAEA,iBAAW,YAAY,eAAe;AACpC,YAAI;AACF,mBAAS,SAAS;AAAA,QACpB,SAAS,OAAO;AACd,cAAI,cAAc;AAChB;AAAA,cACE,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,cACxD;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACtEO,SAAS,oBAAiC;AAC/C,QAAM,OAAO,oBAAI,IAA6C;AAE9D,SAAO;AAAA,IACL,SAA0B,KAA2C;AACnE,UAAI,KAAK,IAAI,IAAI,IAAI,GAAG;AACtB,cAAM,IAAI,MAAM,QAAQ,IAAI,IAAI,yBAAyB;AAAA,MAC3D;AACA,WAAK,IAAI,IAAI,MAAM,GAAsC;AAAA,IAC3D;AAAA,IAEA,IAAI,MAA2D;AAC7D,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,IAEA,IAAI,MAAuB;AACzB,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,EACF;AACF;AAKO,SAAS,gBAKd,YACA,IAIA,SACA,eACA,UAKA;AAOA,WAAS,SAAS;AAAA,IAChB,MAAM,WAAW;AAAA,IACjB,aAAa,WAAW;AAAA,IACxB,cAAc,WAAW;AAAA,IACzB;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,MAAM,WAAW;AAAA,IAEjB,MAAM,QACJ,OACA,SAC4B;AAE5B,YAAM,cAAc,WAAW,MAAM,UAAU,KAAK;AACpD,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,MAAM,kBAAkB,YAAY,MAAM,OAAO,EAAE;AAAA,MAC/D;AAGA,YAAM,MAAM,MAAM,QAAQ,UAAU;AAAA,QAClC,SAAS,WAAW;AAAA,QACpB,SAAS,YAAY;AAAA,QACrB,gBAAgB,SAAS;AAAA,QACzB,gBAAgB,SAAS;AAAA,MAC3B,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,eACJ,OACA,SACwC;AAExC,YAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,OAAO;AAG7C,aAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAI;AACJ,YAAI,WAAW;AAEf,cAAM,UAAU,MAAM;AACpB,cAAI,SAAU;AACd,qBAAW;AACX,8BAAoB;AACpB,0BAAgB;AAChB,cAAI,WAAW;AACb,yBAAa,SAAS;AAAA,UACxB;AAAA,QACF;AAEA,cAAM,sBAAsB,cAAc;AAAA,UACxC;AAAA,UACA,CAAC,UAAU;AACT,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,sBAAQ;AAAA,gBACN,IAAI,IAAI;AAAA,gBACR,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,cAAM,kBAAkB,cAAc,GAAG,YAAY,CAAC,UAAU;AAC9D,cAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,oBAAQ;AACR,mBAAO,IAAI,MAAM,MAAM,KAAK,CAAC;AAAA,UAC/B;AAAA,QACF,CAAC;AAID,gBAAQ,OAAO,IAAI,EAAE,EAAE,KAAK,CAAC,eAAe;AAC1C,cAAI,YAAY,CAAC,WAAY;AAC7B,cAAI,WAAW,WAAW,aAAa;AACrC,oBAAQ;AACR,oBAAQ;AAAA,cACN,IAAI,IAAI;AAAA,cACR,QAAQ,WAAW;AAAA,YACrB,CAAC;AAAA,UACH,WAAW,WAAW,WAAW,UAAU;AACzC,oBAAQ;AACR,mBAAO,IAAI,MAAM,WAAW,SAAS,YAAY,CAAC;AAAA,UACpD;AAAA,QACF,CAAC;AAGD,YAAI,SAAS,YAAY,QAAW;AAClC,sBAAY,WAAW,MAAM;AAC3B,gBAAI,CAAC,UAAU;AACb,sBAAQ;AACR;AAAA,gBACE,IAAI,MAAM,gCAAgC,QAAQ,OAAO,IAAI;AAAA,cAC/D;AAAA,YACF;AAAA,UACF,GAAG,QAAQ,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,aACJ,QAC8B;AAC9B,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,YAAM,aAAa,OAAO,IAAI,CAAC,SAAS;AACtC,YAAI,QAAQ,OAAO,SAAS,YAAY,WAAW,MAAM;AACvD,iBAAO;AAAA,QACT;AACA,eAAO,EAAE,OAAO,MAAgB,SAAS,OAAU;AAAA,MACrD,CAAC;AAGD,YAAM,YAA8D,CAAC;AACrE,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,cAAc,WAAW,MAAM,UAAU,WAAW,CAAC,EAAE,KAAK;AAClE,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI;AAAA,YACR,0BAA0B,CAAC,KAAK,YAAY,MAAM,OAAO;AAAA,UAC3D;AAAA,QACF;AACA,kBAAU,KAAK;AAAA,UACb,SAAS,YAAY;AAAA,UACrB,SAAS,WAAW,CAAC,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAGA,YAAM,OAAO,MAAM,QAAQ;AAAA,QACzB,UAAU,IAAI,CAAC,OAAO;AAAA,UACpB,SAAS,WAAW;AAAA,UACpB,SAAS,EAAE;AAAA,UACX,gBAAgB,EAAE,SAAS;AAAA,UAC3B,gBAAgB,EAAE,SAAS;AAAA,QAC7B,EAAE;AAAA,MACJ;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,OAAO,IAA+C;AAC1D,YAAM,MAAM,MAAM,QAAQ,OAAO,EAAE;AACnC,UAAI,CAAC,OAAO,IAAI,YAAY,WAAW,MAAM;AAC3C,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QACJ,QAC8B;AAC9B,YAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACjC,GAAG;AAAA,QACH,SAAS,WAAW;AAAA,MACtB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AC/WA,IAAM,aAA0B;AAAA,EAC9B;AAAA,IACE,SAAS;AAAA,IACT,IAAI,OAAO,OAAO;AAEhB,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,YAAY,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,mBAAmB,MAAM,EACnC,UAAU,mBAAmB,MAAM,EACnC;AAAA,QAAU;AAAA,QAAsB;AAAA,QAAW,CAAC,QAC3C,IAAI,QAAQ,EAAE,UAAU,CAAC;AAAA,MAC3B,EACC,UAAU,YAAY,MAAM,EAC5B,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,gBAAgB,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACxD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,kCAAkC,EAC9C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,YAAY,iBAAiB,CAAC,EACvC,OAAO,EACP,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,qCAAqC,EACjD,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,iBAAiB,CAAC,EACrC,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,iCAAiC,EAC7C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,eAAe,EAC3B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,QAAQ,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAChD,UAAU,SAAS,WAAW,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,gBAAgB,MAAM,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,6BAA6B,EACzC,YAAY,EACZ,GAAG,eAAe,EAClB,QAAQ,CAAC,UAAU,OAAO,CAAC,EAC3B,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,aAAa,MAAM,EAC7B,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,QAAQ,MAAM,EACxB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,8BAA8B,EAC1C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,yBAAyB,EACrC,YAAY,EACZ,UAAU,WAAW,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,EACzD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAAA,IACb;AAAA,EACF;AACF;AAKA,eAAe,kBAAkB,IAAuC;AACtE,MAAI;AACF,UAAM,SAAS,MAAM,GAClB,WAAW,yBAAyB,EACpC,OAAO,SAAS,EAChB,QAAQ,WAAW,MAAM,EACzB,MAAM,CAAC,EACP,iBAAiB;AAEpB,WAAO,QAAQ,WAAW;AAAA,EAC5B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cAAc,IAAqC;AACvE,QAAM,iBAAiB,MAAM,kBAAkB,EAAE;AAEjD,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,UAAU,gBAAgB;AACtC,YAAM,UAAU,GAAG,EAAE;AAErB,YAAM,GACH,WAAW,yBAAyB,EACpC,OAAO;AAAA,QACN,SAAS,UAAU;AAAA,QACnB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC,CAAC,EACA,QAAQ;AAAA,IACb;AAAA,EACF;AACF;;;ACvJA,SAAS,YAAY;AAsIrB,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,SAAS,IAAI;AAAA,IACb,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,IAC/B,QAAQ,IAAI;AAAA,IACZ,gBAAgB,IAAI;AAAA,IACpB,gBAAgB,IAAI;AAAA,IACpB,kBAAkB,IAAI;AAAA,IACtB,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,IACpD,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,EACjB;AACF;AAKA,SAAS,UAAU,KAAsC;AACvD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAM,IAAI;AAAA,IACV,OAAO,IAAI;AAAA,IACX,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,EACnB;AACF;AAKA,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,UAAU,IAAI;AAAA,IACd,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,IAAI,IAAI;AAAA,IACxC,WAAW,IAAI;AAAA,EACjB;AACF;AAKO,SAAS,oBAAoB,IAA+B;AACjE,SAAO;AAAA,IACL,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAGnC,UAAI,MAAM,gBAAgB;AACxB,cAAM,WAAW,MAAM,GACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,YAAI,UAAU;AACZ,iBAAO,SAAS,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,YAAM,KAAK,KAAK;AAChB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,UAAU,MAAM;AAAA,QAChB,SAAS,KAAK,UAAU,MAAM,OAAO;AAAA,QACrC,QAAQ;AAAA,QACR,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,oBAAoB;AAAA,QACpB,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,gBAAgB,QAA0C;AAC9D,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,aAAO,MAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AACnD,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAmC,CAAC;AAG1C,mBAAW,SAAS,QAAQ;AAE1B,cAAI,MAAM,gBAAgB;AACxB,kBAAM,WAAW,MAAM,IACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,gBAAI,UAAU;AACZ,mBAAK,KAAK,QAAQ;AAClB;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK,KAAK;AAChB,eAAK,KAAK;AAAA,YACR;AAAA,YACA,UAAU,MAAM;AAAA,YAChB,SAAS,KAAK,UAAU,MAAM,OAAO;AAAA,YACrC,QAAQ;AAAA,YACR,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,oBAAoB;AAAA,YACpB,UAAU;AAAA,YACV,QAAQ;AAAA,YACR,OAAO;AAAA,YACP,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,cAAM,UAAU,KAAK,OAAO,CAAC,MAAM,EAAE,eAAe,GAAG;AACvD,YAAI,QAAQ,SAAS,GAAG;AACtB,gBAAM,IAAI,WAAW,cAAc,EAAE,OAAO,OAAO,EAAE,QAAQ;AAAA,QAC/D;AAEA,eAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAAe,MAAqC;AAClE,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,UAA6C;AAAA,QACjD,YAAY;AAAA,MACd;AAEA,UAAI,KAAK,WAAW,OAAW,SAAQ,SAAS,KAAK;AACrD,UAAI,KAAK,qBAAqB;AAC5B,gBAAQ,qBAAqB,KAAK;AACpC,UAAI,KAAK,aAAa;AACpB,gBAAQ,WAAW,KAAK,WAAW,KAAK,UAAU,KAAK,QAAQ,IAAI;AACrE,UAAI,KAAK,WAAW;AAClB,gBAAQ,SAAS,KAAK,UAAU,KAAK,MAAM;AAC7C,UAAI,KAAK,UAAU,OAAW,SAAQ,QAAQ,KAAK;AACnD,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAE9B,YAAM,GACH,YAAY,cAAc,EAC1B,IAAI,OAAO,EACX,MAAM,MAAM,KAAK,KAAK,EACtB,QAAQ;AAAA,IACb;AAAA,IAEA,MAAM,UAAU,OAA8B;AAE5C,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACxE,YAAM,GAAG,WAAW,eAAe,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACzE,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,MAAM,KAAK,KAAK,EAAE,QAAQ;AAAA,IACtE;AAAA,IAEA,MAAM,OAAO,OAAoC;AAC/C,YAAM,MAAM,MAAM,GACf,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,MAAM,KAAK,KAAK,EACtB,iBAAiB;AAEpB,aAAO,MAAM,SAAS,GAAG,IAAI;AAAA,IAC/B;AAAA,IAEA,MAAM,QAAQ,QAAoC;AAChD,UAAI,QAAQ,GAAG,WAAW,cAAc,EAAE,UAAU;AAEpD,UAAI,QAAQ,QAAQ;AAClB,gBAAQ,MAAM,MAAM,UAAU,KAAK,OAAO,MAAM;AAAA,MAClD;AACA,UAAI,QAAQ,SAAS;AACnB,gBAAQ,MAAM,MAAM,YAAY,KAAK,OAAO,OAAO;AAAA,MACrD;AAEA,cAAQ,MAAM,QAAQ,cAAc,MAAM;AAE1C,UAAI,QAAQ,UAAU,QAAW;AAC/B,gBAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,MAClC;AACA,UAAI,QAAQ,WAAW,QAAW;AAEhC,YAAI,OAAO,UAAU,QAAW;AAC9B,kBAAQ,MAAM,MAAM,EAAE;AAAA,QACxB;AACA,gBAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,IAEA,MAAM,kBACJ,wBACqB;AACrB,UAAI,QAAQ,GACT,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,SAAS,EAC9B,QAAQ,cAAc,KAAK,EAC3B,MAAM,CAAC;AAEV,UAAI,uBAAuB,SAAS,GAAG;AACrC,gBAAQ,MAAM;AAAA,UAAM,CAAC,OACnB,GAAG,GAAG;AAAA,YACJ,GAAG,mBAAmB,MAAM,IAAI;AAAA,YAChC,GAAG,mBAAmB,UAAU,sBAAsB;AAAA,UACxD,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,MAAM,MAAM,MAAM,iBAAiB;AACzC,aAAO,MAAM,SAAS,GAAG,IAAI;AAAA,IAC/B;AAAA,IAEA,MAAM,WAAW,OAAuC;AACtD,YAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,YAAM,KAAK,KAAK;AAEhB,YAAM,OAAkC;AAAA,QACtC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd,QACE,MAAM,WAAW,SAAY,KAAK,UAAU,MAAM,MAAM,IAAI;AAAA,QAC9D,OAAO,MAAM,SAAS;AAAA,QACtB,YAAY,MAAM;AAAA,QAClB,cAAc;AAAA,MAChB;AAEA,YAAM,GAAG,WAAW,eAAe,EAAE,OAAO,IAAI,EAAE,QAAQ;AAE1D,aAAO,UAAU,IAAI;AAAA,IACvB;AAAA,IAEA,MAAM,SAAS,OAAgC;AAC7C,YAAM,OAAO,MAAM,GAChB,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,SAAS,KAAK,EACtB,QAAQ;AAEX,aAAO,KAAK,IAAI,SAAS;AAAA,IAC3B;AAAA,IAEA,MAAM,iBAAiB,OAAe,MAAoC;AACxE,YAAM,MAAM,MAAM,GACf,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,MAAM,QAAQ,KAAK,IAAI,EACvB,MAAM,UAAU,KAAK,WAAW,EAChC,iBAAiB;AAEpB,aAAO,MAAM,UAAU,GAAG,IAAI;AAAA,IAChC;AAAA,IAEA,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,KAAK,KAAK;AAEhB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,SAAS,SAAY,KAAK,UAAU,MAAM,IAAI,IAAI;AAAA,QAC9D,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,QAAQ,OAA+B;AAC3C,YAAM,OAAO,MAAM,GAChB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,cAAc,KAAK,EAC3B,QAAQ;AAEX,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;;;AC7bO,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YAAY,OAAe;AACzB,UAAM,sBAAsB,KAAK,EAAE;AACnC,SAAK,OAAO;AAAA,EACd;AACF;;;ACFO,SAAS,iBACd,KACA,SACA,SACA,cACY;AACZ,MAAI,YAAY,IAAI;AACpB,MAAI,kBAAiC;AAErC,SAAO;AAAA,IACL,IAAI,QAAgB;AAClB,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,MAAM,IAAO,MAAc,IAAsC;AAE/D,YAAM,aAAa,MAAM,QAAQ,OAAO,IAAI,EAAE;AAC9C,UAAI,YAAY,WAAW,aAAa;AACtC,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,YAAM,eAAe,MAAM,QAAQ,iBAAiB,IAAI,IAAI,IAAI;AAChE,UAAI,cAAc;AAChB;AACA,eAAO,aAAa;AAAA,MACtB;AAGA,wBAAkB;AAGlB,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY,KAAK,IAAI;AAG3B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,SAAS,MAAM,GAAG;AAGxB,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR;AAAA,QACF,CAAC;AAGD;AACA,cAAM,QAAQ,UAAU,IAAI,IAAI,EAAE,kBAAkB,UAAU,CAAC;AAG/D,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV,WAAW,YAAY;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACzB,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AAEd,cAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAEvD,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAGD,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA,OAAO;AAAA,QACT,CAAC;AAED,cAAM;AAAA,MACR,UAAE;AAEA,0BAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IAEA,SAAS,SAAiB,OAAgB,SAAwB;AAEhE,cAAQ,UAAU,IAAI,IAAI;AAAA,QACxB,UAAU,EAAE,SAAS,OAAO,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,IAEA,KAAK;AAAA,MACH,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,SAAiB,MAAsB;AAC3C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;ACrHO,SAAS,aACd,QACA,SACA,cACA,aACQ;AACR,MAAI,UAAU;AACd,MAAI,oBAA0C;AAC9C,MAAI,iBAAuD;AAC3D,MAAI,eAAoC;AACxC,MAAI,oBAA2D;AAC/D,MAAI,eAA8B;AAKlC,iBAAe,mBAAkC;AAC/C,UAAM,iBAAiB,IAAI;AAAA,MACzB,KAAK,IAAI,IAAI,OAAO;AAAA,IACtB,EAAE,YAAY;AACd,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAE/D,eAAW,OAAO,aAAa;AAC7B,UAAI,IAAI,cAAc,gBAAgB;AAEpC,cAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,UAC9B,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBAAiC;AAC9C,QAAI,cAAc;AAChB,YAAM,QAAQ,UAAU,cAAc;AAAA,QACpC,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAKA,WAAS,gBAAgB,OAAwB;AAC/C,WAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,EAC9D;AAKA,iBAAe,iBACb,OACA,SACA,QACA,WACe;AAEf,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,YAAY,WAAW,aAAa;AACtC;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,KAAK,IAAI,IAAI;AAAA,IACzB,CAAC;AAAA,EACH;AAKA,iBAAe,iBACb,OACA,SACA,OACe;AAGf,QAAI,iBAAiB,gBAAgB;AACnC;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,YAAY,WAAW,aAAa;AACtC;AAAA,IACF;AAEA,UAAM,eAAe,gBAAgB,KAAK;AAG1C,UAAM,QAAQ,MAAM,QAAQ,SAAS,KAAK;AAC1C,UAAM,aAAa,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,QAAQ;AAE1D,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR,OAAO;AAAA,IACT,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,gBAAgB,YAAY,QAAQ;AAAA,IACtC,CAAC;AAAA,EACH;AAKA,iBAAe,WACb,KACA,KACe;AAEf,mBAAe,IAAI;AAInB,wBAAoB,YAAY,MAAM;AACpC,sBAAgB,EAAE,MAAM,CAAC,UAAU;AACjC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,UAC5D,SAAS;AAAA,UACT,OAAO,IAAI;AAAA,QACb,CAAC;AAAA,MACH,CAAC;AAAA,IACH,GAAG,OAAO,iBAAiB;AAG3B,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,OAAO,IAAI;AAAA,MACX,SAAS,IAAI;AAAA,MACb,SAAS,IAAI;AAAA,IACf,CAAC;AAED,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEF,YAAM,UAAU,iBAAiB,KAAK,IAAI,SAAS,SAAS,YAAY;AACxE,YAAM,SAAS,MAAM,IAAI,GAAG,SAAS,IAAI,OAAO;AAGhD,UAAI,IAAI,cAAc;AACpB,cAAM,cAAc,IAAI,aAAa,UAAU,MAAM;AACrD,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI,MAAM,mBAAmB,YAAY,MAAM,OAAO,EAAE;AAAA,QAChE;AAAA,MACF;AAEA,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,QAAQ,SAAS;AAAA,IAC/D,SAAS,OAAO;AACd,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,KAAK;AAAA,IACnD,UAAE;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AACA,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,iBAAe,iBAAmC;AAEhD,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAC/D,UAAM,yBAAyB,YAC5B;AAAA,MACC,CAAC,MACC,EAAE,mBAAmB;AAAA,IACzB,EACC,IAAI,CAAC,MAAM,EAAE,cAAc;AAG9B,UAAM,MAAM,MAAM,QAAQ,kBAAkB,sBAAsB;AAClE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,YAAY,IAAI,IAAI,OAAO;AACvC,QAAI,CAAC,KAAK;AAER,YAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,QAC9B,QAAQ;AAAA,QACR,OAAO,gBAAgB,IAAI,OAAO;AAAA,MACpC,CAAC;AACD,aAAO;AAAA,IACT;AAGA,UAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,MAC9B,QAAQ;AAAA,MACR,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,UAAM,WAAW,KAAK,GAAG;AAEzB,WAAO;AAAA,EACT;AAEA,iBAAe,OAAsB;AACnC,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,YAAY;AAEzB,YAAM,iBAAiB;AACvB,YAAM,eAAe;AAAA,IACvB;AAEA,QAAI;AACF,0BAAoB,OAAO;AAC3B,YAAM;AAAA,IACR,UAAE;AACA,0BAAoB;AAAA,IACtB;AAEA,QAAI,SAAS;AACX,uBAAiB,WAAW,MAAM,KAAK,GAAG,OAAO,eAAe;AAAA,IAClE,WAAW,cAAc;AACvB,mBAAa;AACb,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,YAAqB;AACvB,aAAO;AAAA,IACT;AAAA,IAEA,QAAc;AACZ,UAAI,SAAS;AACX;AAAA,MACF;AACA,gBAAU;AACV,WAAK;AAAA,IACP;AAAA,IAEA,MAAM,OAAsB;AAC1B,UAAI,CAAC,SAAS;AACZ;AAAA,MACF;AAEA,gBAAU;AAEV,UAAI,gBAAgB;AAClB,qBAAa,cAAc;AAC3B,yBAAiB;AAAA,MACnB;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AAEA,UAAI,mBAAmB;AAErB,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,yBAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;APrRA,IAAM,WAAW;AAAA,EACf,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,gBAAgB;AAClB;AAiHO,SAAS,cAAc,SAAkC;AAC9D,QAAM,SAAS;AAAA,IACb,iBAAiB,QAAQ,mBAAmB,SAAS;AAAA,IACrD,mBAAmB,QAAQ,qBAAqB,SAAS;AAAA,IACzD,gBAAgB,QAAQ,kBAAkB,SAAS;AAAA,EACrD;AAEA,QAAM,KAAK,IAAI,OAAiB,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC5D,QAAM,UAAU,oBAAoB,EAAE;AACtC,QAAM,eAAe,mBAAmB;AACxC,QAAM,cAAc,kBAAkB;AACtC,QAAM,SAAS,aAAa,QAAQ,SAAS,cAAc,WAAW;AAGtE,MAAI,YAAkC;AACtC,MAAI,WAAW;AAEf,QAAM,UAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,IAAI,aAAa;AAAA,IACjB,MAAM,aAAa;AAAA,IACnB,SAAS,aAAa;AAAA,IACtB,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA,IAEb,UAKE,YACA,IAQA;AACA,aAAO,gBAAgB,YAAY,IAAI,SAAS,cAAc,WAAW;AAAA,IAC3E;AAAA,IAEA,QAAQ,QAAQ;AAAA,IAChB,SAAS,QAAQ;AAAA,IAEjB,IAAI,QAA6B;AAC/B,aAAO,QAAQ,OAAO;AAAA,IACxB;AAAA,IAEA,MAAM,MAAM,OAA8B;AACxC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,+BAA+B,KAAK,EAAE;AAAA,MACxD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AAEA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,QACR,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,OAAO,OAA8B;AACzC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AACA,UAAI,IAAI,WAAW,UAAU;AAC3B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,wCAAwC,KAAK,EAAE;AAAA,MACjE;AAEA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAA8B;AAC5C,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AAEA,YAAM,QAAQ,UAAU,KAAK;AAAA,IAC/B;AAAA,IAEA,MAAM,UAAyB;AAE7B,UAAI,UAAU;AACZ;AAAA,MACF;AAGA,UAAI,WAAW;AACb,eAAO;AAAA,MACT;AAGA,kBAAY,cAAc,EAAE,EACzB,KAAK,MAAM;AACV,mBAAW;AAAA,MACb,CAAC,EACA,QAAQ,MAAM;AACb,oBAAY;AAAA,MACd,CAAC;AAEH,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;AQ7RO,SAAS,qBAAoC;AAClD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAAQ,SAAS;AACf,cAAQ,GAAG,aAAa,OAAO,UAAU;AACvC,cAAM,QAAQ,QAAQ,UAAU;AAAA,UAC9B,OAAO,MAAM;AAAA,UACb,UAAU,MAAM;AAAA,UAChB,OAAO,MAAM;AAAA,UACb,SAAS,MAAM;AAAA,UACf,MAAM,MAAM;AAAA,QACd,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/durably.ts","../src/events.ts","../src/job.ts","../src/migrations.ts","../src/storage.ts","../src/errors.ts","../src/context.ts","../src/worker.ts","../src/plugins/log-persistence.ts"],"sourcesContent":["import type { Dialect } from 'kysely'\nimport { Kysely } from 'kysely'\nimport type { z } from 'zod'\nimport {\n type AnyEventInput,\n type ErrorHandler,\n type EventListener,\n type EventType,\n type Unsubscribe,\n createEventEmitter,\n} from './events'\nimport {\n type JobDefinition,\n type JobFunction,\n type JobHandle,\n createJobHandle,\n createJobRegistry,\n} from './job'\nimport { runMigrations } from './migrations'\nimport type { Database } from './schema'\nimport {\n type Run,\n type RunFilter,\n type Storage,\n createKyselyStorage,\n} from './storage'\nimport { createWorker } from './worker'\n\n/**\n * Options for creating a Durably instance\n */\nexport interface DurablyOptions {\n dialect: Dialect\n pollingInterval?: number\n heartbeatInterval?: number\n staleThreshold?: number\n}\n\n/**\n * Default configuration values\n */\nconst DEFAULTS = {\n pollingInterval: 1000,\n heartbeatInterval: 5000,\n staleThreshold: 30000,\n} as const\n\n/**\n * Plugin interface for extending Durably\n */\nexport interface DurablyPlugin {\n name: string\n install(durably: Durably): void\n}\n\n/**\n * Durably instance\n */\nexport interface Durably {\n /**\n * Run database migrations\n * This is idempotent and safe to call multiple times\n */\n migrate(): Promise<void>\n\n /**\n * Get the underlying Kysely database instance\n * Useful for testing and advanced use cases\n */\n readonly db: Kysely<Database>\n\n /**\n * Storage layer for database operations\n */\n readonly storage: Storage\n\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Define a job\n */\n defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n >(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n ): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >\n\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Retry a failed run by resetting it to pending\n * @throws Error if run is not in failed status\n */\n retry(runId: string): Promise<void>\n\n /**\n * Cancel a pending or running run\n * @throws Error if run is already completed, failed, or cancelled\n */\n cancel(runId: string): Promise<void>\n\n /**\n * Delete a completed, failed, or cancelled run and its associated steps and logs\n * @throws Error if run is pending or running, or does not exist\n */\n deleteRun(runId: string): Promise<void>\n\n /**\n * Get a run by ID (returns unknown output type)\n */\n getRun(runId: string): Promise<Run | null>\n\n /**\n * Get runs with optional filtering\n */\n getRuns(filter?: RunFilter): Promise<Run[]>\n\n /**\n * Register a plugin\n */\n use(plugin: DurablyPlugin): void\n}\n\n/**\n * Create a Durably instance\n */\nexport function createDurably(options: DurablyOptions): Durably {\n const config = {\n pollingInterval: options.pollingInterval ?? DEFAULTS.pollingInterval,\n heartbeatInterval: options.heartbeatInterval ?? DEFAULTS.heartbeatInterval,\n staleThreshold: options.staleThreshold ?? DEFAULTS.staleThreshold,\n }\n\n const db = new Kysely<Database>({ dialect: options.dialect })\n const storage = createKyselyStorage(db)\n const eventEmitter = createEventEmitter()\n const jobRegistry = createJobRegistry()\n const worker = createWorker(config, storage, eventEmitter, jobRegistry)\n\n // Track migration state for idempotency\n let migrating: Promise<void> | null = null\n let migrated = false\n\n const durably: Durably = {\n db,\n storage,\n on: eventEmitter.on,\n emit: eventEmitter.emit,\n onError: eventEmitter.onError,\n start: worker.start,\n stop: worker.stop,\n\n defineJob<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined = undefined,\n >(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n ): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n > {\n return createJobHandle(definition, fn, storage, eventEmitter, jobRegistry)\n },\n\n getRun: storage.getRun,\n getRuns: storage.getRuns,\n\n use(plugin: DurablyPlugin): void {\n plugin.install(durably)\n },\n\n async retry(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot retry completed run: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot retry pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot retry running run: ${runId}`)\n }\n // Only failed runs can be retried\n await storage.updateRun(runId, {\n status: 'pending',\n error: null,\n })\n },\n\n async cancel(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'completed') {\n throw new Error(`Cannot cancel completed run: ${runId}`)\n }\n if (run.status === 'failed') {\n throw new Error(`Cannot cancel failed run: ${runId}`)\n }\n if (run.status === 'cancelled') {\n throw new Error(`Cannot cancel already cancelled run: ${runId}`)\n }\n // pending or running can be cancelled\n await storage.updateRun(runId, {\n status: 'cancelled',\n })\n },\n\n async deleteRun(runId: string): Promise<void> {\n const run = await storage.getRun(runId)\n if (!run) {\n throw new Error(`Run not found: ${runId}`)\n }\n if (run.status === 'pending') {\n throw new Error(`Cannot delete pending run: ${runId}`)\n }\n if (run.status === 'running') {\n throw new Error(`Cannot delete running run: ${runId}`)\n }\n // completed, failed, or cancelled can be deleted\n await storage.deleteRun(runId)\n },\n\n async migrate(): Promise<void> {\n // Already migrated\n if (migrated) {\n return\n }\n\n // Migration in progress, wait for it\n if (migrating) {\n return migrating\n }\n\n // Start migration\n migrating = runMigrations(db)\n .then(() => {\n migrated = true\n })\n .finally(() => {\n migrating = null\n })\n\n return migrating\n },\n }\n\n return durably\n}\n","/**\n * Base event interface\n */\nexport interface BaseEvent {\n type: string\n timestamp: string\n sequence: number\n}\n\n/**\n * Run start event\n */\nexport interface RunStartEvent extends BaseEvent {\n type: 'run:start'\n runId: string\n jobName: string\n payload: unknown\n}\n\n/**\n * Run complete event\n */\nexport interface RunCompleteEvent extends BaseEvent {\n type: 'run:complete'\n runId: string\n jobName: string\n output: unknown\n duration: number\n}\n\n/**\n * Run fail event\n */\nexport interface RunFailEvent extends BaseEvent {\n type: 'run:fail'\n runId: string\n jobName: string\n error: string\n failedStepName: string\n}\n\n/**\n * Step start event\n */\nexport interface StepStartEvent extends BaseEvent {\n type: 'step:start'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n}\n\n/**\n * Step complete event\n */\nexport interface StepCompleteEvent extends BaseEvent {\n type: 'step:complete'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n output: unknown\n duration: number\n}\n\n/**\n * Step fail event\n */\nexport interface StepFailEvent extends BaseEvent {\n type: 'step:fail'\n runId: string\n jobName: string\n stepName: string\n stepIndex: number\n error: string\n}\n\n/**\n * Log write event\n */\nexport interface LogWriteEvent extends BaseEvent {\n type: 'log:write'\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown\n}\n\n/**\n * Worker error event (internal errors like heartbeat failures)\n */\nexport interface WorkerErrorEvent extends BaseEvent {\n type: 'worker:error'\n error: string\n context: string\n runId?: string\n}\n\n/**\n * All event types as discriminated union\n */\nexport type DurablyEvent =\n | RunStartEvent\n | RunCompleteEvent\n | RunFailEvent\n | StepStartEvent\n | StepCompleteEvent\n | StepFailEvent\n | LogWriteEvent\n | WorkerErrorEvent\n\n/**\n * Event types for type-safe event names\n */\nexport type EventType = DurablyEvent['type']\n\n/**\n * Extract event by type\n */\nexport type EventByType<T extends EventType> = Extract<\n DurablyEvent,\n { type: T }\n>\n\n/**\n * Event input (without auto-generated fields)\n */\nexport type EventInput<T extends EventType> = Omit<\n EventByType<T>,\n 'timestamp' | 'sequence'\n>\n\n/**\n * All possible event inputs as a union (properly distributed)\n */\nexport type AnyEventInput =\n | EventInput<'run:start'>\n | EventInput<'run:complete'>\n | EventInput<'run:fail'>\n | EventInput<'step:start'>\n | EventInput<'step:complete'>\n | EventInput<'step:fail'>\n | EventInput<'log:write'>\n | EventInput<'worker:error'>\n\n/**\n * Event listener function\n */\nexport type EventListener<T extends EventType> = (event: EventByType<T>) => void\n\n/**\n * Unsubscribe function returned by on()\n */\nexport type Unsubscribe = () => void\n\n/**\n * Error handler function for listener exceptions\n */\nexport type ErrorHandler = (error: Error, event: DurablyEvent) => void\n\n/**\n * Event emitter interface\n */\nexport interface EventEmitter {\n /**\n * Register an event listener\n * @returns Unsubscribe function\n */\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe\n\n /**\n * Register an error handler for listener exceptions\n */\n onError(handler: ErrorHandler): void\n\n /**\n * Emit an event (auto-assigns timestamp and sequence)\n */\n emit(event: AnyEventInput): void\n}\n\n/**\n * Create an event emitter\n */\nexport function createEventEmitter(): EventEmitter {\n const listeners = new Map<EventType, Set<EventListener<EventType>>>()\n let sequence = 0\n let errorHandler: ErrorHandler | null = null\n\n return {\n on<T extends EventType>(type: T, listener: EventListener<T>): Unsubscribe {\n if (!listeners.has(type)) {\n listeners.set(type, new Set())\n }\n\n const typeListeners = listeners.get(type)\n typeListeners?.add(listener as unknown as EventListener<EventType>)\n\n return () => {\n typeListeners?.delete(listener as unknown as EventListener<EventType>)\n }\n },\n\n onError(handler: ErrorHandler): void {\n errorHandler = handler\n },\n\n emit(event: AnyEventInput): void {\n sequence++\n const fullEvent = {\n ...event,\n timestamp: new Date().toISOString(),\n sequence,\n } as DurablyEvent\n\n const typeListeners = listeners.get(event.type)\n if (!typeListeners) {\n return\n }\n\n for (const listener of typeListeners) {\n try {\n listener(fullEvent)\n } catch (error) {\n if (errorHandler) {\n errorHandler(\n error instanceof Error ? error : new Error(String(error)),\n fullEvent,\n )\n }\n // Continue to next listener regardless of error\n }\n }\n },\n }\n}\n","import type { z } from 'zod'\nimport type { EventEmitter } from './events'\nimport type { Run, Storage } from './storage'\n\n/**\n * Step context passed to the job function\n */\nexport interface StepContext {\n /**\n * The ID of the current run\n */\n readonly runId: string\n\n /**\n * Execute a step with automatic persistence and replay\n */\n run<T>(name: string, fn: () => T | Promise<T>): Promise<T>\n\n /**\n * Report progress for the current run\n */\n progress(current: number, total?: number, message?: string): void\n\n /**\n * Log a message\n */\n log: {\n info(message: string, data?: unknown): void\n warn(message: string, data?: unknown): void\n error(message: string, data?: unknown): void\n }\n}\n\n/**\n * Job function type\n */\nexport type JobFunction<TInput, TOutput> = (\n step: StepContext,\n payload: TInput,\n) => Promise<TOutput>\n\n/**\n * Job definition options\n */\nexport interface JobDefinition<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n> {\n name: TName\n input: TInputSchema\n output?: TOutputSchema\n}\n\n/**\n * Trigger options\n */\nexport interface TriggerOptions {\n idempotencyKey?: string\n concurrencyKey?: string\n /** Timeout in milliseconds for triggerAndWait() */\n timeout?: number\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter {\n status?: 'pending' | 'running' | 'completed' | 'failed'\n jobName?: string\n}\n\n/**\n * Typed run with output type\n */\nexport interface TypedRun<TOutput> extends Omit<Run, 'output'> {\n output: TOutput | null\n}\n\n/**\n * Batch trigger input - either just the input or input with options\n */\nexport type BatchTriggerInput<TInput> =\n | TInput\n | { input: TInput; options?: TriggerOptions }\n\n/**\n * Result of triggerAndWait\n */\nexport interface TriggerAndWaitResult<TOutput> {\n id: string\n output: TOutput\n}\n\n/**\n * Job handle returned by defineJob\n */\nexport interface JobHandle<TName extends string, TInput, TOutput> {\n readonly name: TName\n\n /**\n * Trigger a new run\n */\n trigger(input: TInput, options?: TriggerOptions): Promise<TypedRun<TOutput>>\n\n /**\n * Trigger a new run and wait for completion\n * Returns the output directly, throws if the run fails\n */\n triggerAndWait(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TriggerAndWaitResult<TOutput>>\n\n /**\n * Trigger multiple runs in a batch\n * All inputs are validated before any runs are created\n */\n batchTrigger(\n inputs: BatchTriggerInput<TInput>[],\n ): Promise<TypedRun<TOutput>[]>\n\n /**\n * Get a run by ID\n */\n getRun(id: string): Promise<TypedRun<TOutput> | null>\n\n /**\n * Get runs with optional filter\n */\n getRuns(filter?: Omit<RunFilter, 'jobName'>): Promise<TypedRun<TOutput>[]>\n}\n\n/**\n * Internal job registration\n */\nexport interface RegisteredJob<TInput, TOutput> {\n name: string\n inputSchema: z.ZodType\n outputSchema: z.ZodType | undefined\n fn: JobFunction<TInput, TOutput>\n}\n\n/**\n * Job registry for managing registered jobs\n */\nexport interface JobRegistry {\n /**\n * Register a job\n */\n register<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void\n\n /**\n * Get a registered job by name\n */\n get(name: string): RegisteredJob<unknown, unknown> | undefined\n\n /**\n * Check if a job is registered\n */\n has(name: string): boolean\n}\n\n/**\n * Create a job registry\n */\nexport function createJobRegistry(): JobRegistry {\n const jobs = new Map<string, RegisteredJob<unknown, unknown>>()\n\n return {\n register<TInput, TOutput>(job: RegisteredJob<TInput, TOutput>): void {\n if (jobs.has(job.name)) {\n throw new Error(`Job \"${job.name}\" is already registered`)\n }\n jobs.set(job.name, job as RegisteredJob<unknown, unknown>)\n },\n\n get(name: string): RegisteredJob<unknown, unknown> | undefined {\n return jobs.get(name)\n },\n\n has(name: string): boolean {\n return jobs.has(name)\n },\n }\n}\n\n/**\n * Create a job handle\n */\nexport function createJobHandle<\n TName extends string,\n TInputSchema extends z.ZodType,\n TOutputSchema extends z.ZodType | undefined,\n>(\n definition: JobDefinition<TName, TInputSchema, TOutputSchema>,\n fn: JobFunction<\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : void\n >,\n storage: Storage,\n _eventEmitter: EventEmitter,\n registry: JobRegistry,\n): JobHandle<\n TName,\n z.infer<TInputSchema>,\n TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : undefined\n> {\n type TInput = z.infer<TInputSchema>\n type TOutput = TOutputSchema extends z.ZodType\n ? z.infer<TOutputSchema>\n : undefined\n\n // Register the job\n registry.register({\n name: definition.name,\n inputSchema: definition.input,\n outputSchema: definition.output,\n fn: fn as JobFunction<unknown, unknown>,\n })\n\n return {\n name: definition.name,\n\n async trigger(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TypedRun<TOutput>> {\n // Validate input\n const parseResult = definition.input.safeParse(input)\n if (!parseResult.success) {\n throw new Error(`Invalid input: ${parseResult.error.message}`)\n }\n\n // Create the run\n const run = await storage.createRun({\n jobName: definition.name,\n payload: parseResult.data,\n idempotencyKey: options?.idempotencyKey,\n concurrencyKey: options?.concurrencyKey,\n })\n\n return run as TypedRun<TOutput>\n },\n\n async triggerAndWait(\n input: TInput,\n options?: TriggerOptions,\n ): Promise<TriggerAndWaitResult<TOutput>> {\n // Trigger the run\n const run = await this.trigger(input, options)\n\n // Wait for completion via event subscription\n return new Promise((resolve, reject) => {\n let timeoutId: ReturnType<typeof setTimeout> | undefined\n let resolved = false\n\n const cleanup = () => {\n if (resolved) return\n resolved = true\n unsubscribeComplete()\n unsubscribeFail()\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n }\n\n const unsubscribeComplete = _eventEmitter.on(\n 'run:complete',\n (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n resolve({\n id: run.id,\n output: event.output as TOutput,\n })\n }\n },\n )\n\n const unsubscribeFail = _eventEmitter.on('run:fail', (event) => {\n if (event.runId === run.id && !resolved) {\n cleanup()\n reject(new Error(event.error))\n }\n })\n\n // Check current status after subscribing (race condition mitigation)\n // If the run completed before we subscribed, we need to handle it\n storage.getRun(run.id).then((currentRun) => {\n if (resolved || !currentRun) return\n if (currentRun.status === 'completed') {\n cleanup()\n resolve({\n id: run.id,\n output: currentRun.output as TOutput,\n })\n } else if (currentRun.status === 'failed') {\n cleanup()\n reject(new Error(currentRun.error || 'Run failed'))\n }\n })\n\n // Set timeout if specified\n if (options?.timeout !== undefined) {\n timeoutId = setTimeout(() => {\n if (!resolved) {\n cleanup()\n reject(\n new Error(`triggerAndWait timeout after ${options.timeout}ms`),\n )\n }\n }, options.timeout)\n }\n })\n },\n\n async batchTrigger(\n inputs: (TInput | { input: TInput; options?: TriggerOptions })[],\n ): Promise<TypedRun<TOutput>[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Normalize inputs to { input, options } format\n const normalized = inputs.map((item) => {\n if (item && typeof item === 'object' && 'input' in item) {\n return item as { input: TInput; options?: TriggerOptions }\n }\n return { input: item as TInput, options: undefined }\n })\n\n // Validate all inputs first (before creating any runs)\n const validated: { payload: unknown; options?: TriggerOptions }[] = []\n for (let i = 0; i < normalized.length; i++) {\n const parseResult = definition.input.safeParse(normalized[i].input)\n if (!parseResult.success) {\n throw new Error(\n `Invalid input at index ${i}: ${parseResult.error.message}`,\n )\n }\n validated.push({\n payload: parseResult.data,\n options: normalized[i].options,\n })\n }\n\n // Create all runs\n const runs = await storage.batchCreateRuns(\n validated.map((v) => ({\n jobName: definition.name,\n payload: v.payload,\n idempotencyKey: v.options?.idempotencyKey,\n concurrencyKey: v.options?.concurrencyKey,\n })),\n )\n\n return runs as TypedRun<TOutput>[]\n },\n\n async getRun(id: string): Promise<TypedRun<TOutput> | null> {\n const run = await storage.getRun(id)\n if (!run || run.jobName !== definition.name) {\n return null\n }\n return run as TypedRun<TOutput>\n },\n\n async getRuns(\n filter?: Omit<RunFilter, 'jobName'>,\n ): Promise<TypedRun<TOutput>[]> {\n const runs = await storage.getRuns({\n ...filter,\n jobName: definition.name,\n })\n return runs as TypedRun<TOutput>[]\n },\n }\n}\n","import type { Kysely } from 'kysely'\nimport type { Database } from './schema'\n\n/**\n * Migration definitions\n */\ninterface Migration {\n version: number\n up: (db: Kysely<Database>) => Promise<void>\n}\n\nconst migrations: Migration[] = [\n {\n version: 1,\n up: async (db) => {\n // Create runs table\n await db.schema\n .createTable('durably_runs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('job_name', 'text', (col) => col.notNull())\n .addColumn('payload', 'text', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('idempotency_key', 'text')\n .addColumn('concurrency_key', 'text')\n .addColumn('current_step_index', 'integer', (col) =>\n col.notNull().defaultTo(0),\n )\n .addColumn('progress', 'text')\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('heartbeat_at', 'text', (col) => col.notNull())\n .addColumn('created_at', 'text', (col) => col.notNull())\n .addColumn('updated_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create runs indexes\n await db.schema\n .createIndex('idx_durably_runs_job_idempotency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['job_name', 'idempotency_key'])\n .unique()\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_concurrency')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'concurrency_key'])\n .execute()\n\n await db.schema\n .createIndex('idx_durably_runs_status_created')\n .ifNotExists()\n .on('durably_runs')\n .columns(['status', 'created_at'])\n .execute()\n\n // Create steps table\n await db.schema\n .createTable('durably_steps')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('name', 'text', (col) => col.notNull())\n .addColumn('index', 'integer', (col) => col.notNull())\n .addColumn('status', 'text', (col) => col.notNull())\n .addColumn('output', 'text')\n .addColumn('error', 'text')\n .addColumn('started_at', 'text', (col) => col.notNull())\n .addColumn('completed_at', 'text')\n .execute()\n\n // Create steps index\n await db.schema\n .createIndex('idx_durably_steps_run_index')\n .ifNotExists()\n .on('durably_steps')\n .columns(['run_id', 'index'])\n .execute()\n\n // Create logs table\n await db.schema\n .createTable('durably_logs')\n .ifNotExists()\n .addColumn('id', 'text', (col) => col.primaryKey())\n .addColumn('run_id', 'text', (col) => col.notNull())\n .addColumn('step_name', 'text')\n .addColumn('level', 'text', (col) => col.notNull())\n .addColumn('message', 'text', (col) => col.notNull())\n .addColumn('data', 'text')\n .addColumn('created_at', 'text', (col) => col.notNull())\n .execute()\n\n // Create logs index\n await db.schema\n .createIndex('idx_durably_logs_run_created')\n .ifNotExists()\n .on('durably_logs')\n .columns(['run_id', 'created_at'])\n .execute()\n\n // Create schema_versions table\n await db.schema\n .createTable('durably_schema_versions')\n .ifNotExists()\n .addColumn('version', 'integer', (col) => col.primaryKey())\n .addColumn('applied_at', 'text', (col) => col.notNull())\n .execute()\n },\n },\n]\n\n/**\n * Get the current schema version from the database\n */\nasync function getCurrentVersion(db: Kysely<Database>): Promise<number> {\n try {\n const result = await db\n .selectFrom('durably_schema_versions')\n .select('version')\n .orderBy('version', 'desc')\n .limit(1)\n .executeTakeFirst()\n\n return result?.version ?? 0\n } catch {\n // Table doesn't exist yet\n return 0\n }\n}\n\n/**\n * Run pending migrations\n */\nexport async function runMigrations(db: Kysely<Database>): Promise<void> {\n const currentVersion = await getCurrentVersion(db)\n\n for (const migration of migrations) {\n if (migration.version > currentVersion) {\n await migration.up(db)\n\n await db\n .insertInto('durably_schema_versions')\n .values({\n version: migration.version,\n applied_at: new Date().toISOString(),\n })\n .execute()\n }\n }\n}\n","import type { Kysely } from 'kysely'\nimport { ulid } from 'ulidx'\nimport type { Database } from './schema'\n\n/**\n * Run data for creating a new run\n */\nexport interface CreateRunInput {\n jobName: string\n payload: unknown\n idempotencyKey?: string\n concurrencyKey?: string\n}\n\n/**\n * Run data returned from storage\n */\nexport interface Run {\n id: string\n jobName: string\n payload: unknown\n status: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n idempotencyKey: string | null\n concurrencyKey: string | null\n currentStepIndex: number\n progress: { current: number; total?: number; message?: string } | null\n output: unknown | null\n error: string | null\n heartbeatAt: string\n createdAt: string\n updatedAt: string\n}\n\n/**\n * Run update data\n */\nexport interface UpdateRunInput {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n currentStepIndex?: number\n progress?: { current: number; total?: number; message?: string } | null\n output?: unknown\n error?: string | null\n heartbeatAt?: string\n}\n\n/**\n * Run filter options\n */\nexport interface RunFilter {\n status?: 'pending' | 'running' | 'completed' | 'failed' | 'cancelled'\n jobName?: string\n /** Maximum number of runs to return */\n limit?: number\n /** Number of runs to skip (for pagination) */\n offset?: number\n}\n\n/**\n * Step data for creating a new step\n */\nexport interface CreateStepInput {\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed'\n output?: unknown\n error?: string\n startedAt: string // ISO8601 timestamp when step execution started\n}\n\n/**\n * Step data returned from storage\n */\nexport interface Step {\n id: string\n runId: string\n name: string\n index: number\n status: 'completed' | 'failed'\n output: unknown | null\n error: string | null\n startedAt: string\n completedAt: string | null\n}\n\n/**\n * Log data for creating a new log\n */\nexport interface CreateLogInput {\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data?: unknown\n}\n\n/**\n * Log data returned from storage\n */\nexport interface Log {\n id: string\n runId: string\n stepName: string | null\n level: 'info' | 'warn' | 'error'\n message: string\n data: unknown | null\n createdAt: string\n}\n\n/**\n * Storage interface for database operations\n */\nexport interface Storage {\n // Run operations\n createRun(input: CreateRunInput): Promise<Run>\n batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]>\n updateRun(runId: string, data: UpdateRunInput): Promise<void>\n deleteRun(runId: string): Promise<void>\n getRun(runId: string): Promise<Run | null>\n getRuns(filter?: RunFilter): Promise<Run[]>\n getNextPendingRun(excludeConcurrencyKeys: string[]): Promise<Run | null>\n\n // Step operations\n createStep(input: CreateStepInput): Promise<Step>\n getSteps(runId: string): Promise<Step[]>\n getCompletedStep(runId: string, name: string): Promise<Step | null>\n\n // Log operations\n createLog(input: CreateLogInput): Promise<Log>\n getLogs(runId: string): Promise<Log[]>\n}\n\n/**\n * Convert database row to Run object\n */\nfunction rowToRun(row: Database['durably_runs']): Run {\n return {\n id: row.id,\n jobName: row.job_name,\n payload: JSON.parse(row.payload),\n status: row.status,\n idempotencyKey: row.idempotency_key,\n concurrencyKey: row.concurrency_key,\n currentStepIndex: row.current_step_index,\n progress: row.progress ? JSON.parse(row.progress) : null,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n heartbeatAt: row.heartbeat_at,\n createdAt: row.created_at,\n updatedAt: row.updated_at,\n }\n}\n\n/**\n * Convert database row to Step object\n */\nfunction rowToStep(row: Database['durably_steps']): Step {\n return {\n id: row.id,\n runId: row.run_id,\n name: row.name,\n index: row.index,\n status: row.status,\n output: row.output ? JSON.parse(row.output) : null,\n error: row.error,\n startedAt: row.started_at,\n completedAt: row.completed_at,\n }\n}\n\n/**\n * Convert database row to Log object\n */\nfunction rowToLog(row: Database['durably_logs']): Log {\n return {\n id: row.id,\n runId: row.run_id,\n stepName: row.step_name,\n level: row.level,\n message: row.message,\n data: row.data ? JSON.parse(row.data) : null,\n createdAt: row.created_at,\n }\n}\n\n/**\n * Create a Kysely-based Storage implementation\n */\nexport function createKyselyStorage(db: Kysely<Database>): Storage {\n return {\n async createRun(input: CreateRunInput): Promise<Run> {\n const now = new Date().toISOString()\n\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n return rowToRun(existing)\n }\n }\n\n const id = ulid()\n const run: Database['durably_runs'] = {\n id,\n job_name: input.jobName,\n payload: JSON.stringify(input.payload),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n heartbeat_at: now,\n created_at: now,\n updated_at: now,\n }\n\n await db.insertInto('durably_runs').values(run).execute()\n\n return rowToRun(run)\n },\n\n async batchCreateRuns(inputs: CreateRunInput[]): Promise<Run[]> {\n if (inputs.length === 0) {\n return []\n }\n\n // Use transaction to ensure atomicity of idempotency checks and inserts\n return await db.transaction().execute(async (trx) => {\n const now = new Date().toISOString()\n const runs: Database['durably_runs'][] = []\n\n // Process inputs - check idempotency keys and create run objects\n for (const input of inputs) {\n // Check for existing run with same idempotency key\n if (input.idempotencyKey) {\n const existing = await trx\n .selectFrom('durably_runs')\n .selectAll()\n .where('job_name', '=', input.jobName)\n .where('idempotency_key', '=', input.idempotencyKey)\n .executeTakeFirst()\n\n if (existing) {\n runs.push(existing)\n continue\n }\n }\n\n const id = ulid()\n runs.push({\n id,\n job_name: input.jobName,\n payload: JSON.stringify(input.payload),\n status: 'pending',\n idempotency_key: input.idempotencyKey ?? null,\n concurrency_key: input.concurrencyKey ?? null,\n current_step_index: 0,\n progress: null,\n output: null,\n error: null,\n heartbeat_at: now,\n created_at: now,\n updated_at: now,\n })\n }\n\n // Insert all new runs in a single batch\n const newRuns = runs.filter((r) => r.created_at === now)\n if (newRuns.length > 0) {\n await trx.insertInto('durably_runs').values(newRuns).execute()\n }\n\n return runs.map(rowToRun)\n })\n },\n\n async updateRun(runId: string, data: UpdateRunInput): Promise<void> {\n const now = new Date().toISOString()\n const updates: Partial<Database['durably_runs']> = {\n updated_at: now,\n }\n\n if (data.status !== undefined) updates.status = data.status\n if (data.currentStepIndex !== undefined)\n updates.current_step_index = data.currentStepIndex\n if (data.progress !== undefined)\n updates.progress = data.progress ? JSON.stringify(data.progress) : null\n if (data.output !== undefined)\n updates.output = JSON.stringify(data.output)\n if (data.error !== undefined) updates.error = data.error\n if (data.heartbeatAt !== undefined)\n updates.heartbeat_at = data.heartbeatAt\n\n await db\n .updateTable('durably_runs')\n .set(updates)\n .where('id', '=', runId)\n .execute()\n },\n\n async deleteRun(runId: string): Promise<void> {\n // Delete in order: logs -> steps -> run (due to foreign key constraints)\n await db.deleteFrom('durably_logs').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_steps').where('run_id', '=', runId).execute()\n await db.deleteFrom('durably_runs').where('id', '=', runId).execute()\n },\n\n async getRun(runId: string): Promise<Run | null> {\n const row = await db\n .selectFrom('durably_runs')\n .selectAll()\n .where('id', '=', runId)\n .executeTakeFirst()\n\n return row ? rowToRun(row) : null\n },\n\n async getRuns(filter?: RunFilter): Promise<Run[]> {\n let query = db.selectFrom('durably_runs').selectAll()\n\n if (filter?.status) {\n query = query.where('status', '=', filter.status)\n }\n if (filter?.jobName) {\n query = query.where('job_name', '=', filter.jobName)\n }\n\n query = query.orderBy('created_at', 'desc')\n\n if (filter?.limit !== undefined) {\n query = query.limit(filter.limit)\n }\n if (filter?.offset !== undefined) {\n // SQLite requires LIMIT when using OFFSET\n if (filter.limit === undefined) {\n query = query.limit(-1) // -1 means unlimited in SQLite\n }\n query = query.offset(filter.offset)\n }\n\n const rows = await query.execute()\n return rows.map(rowToRun)\n },\n\n async getNextPendingRun(\n excludeConcurrencyKeys: string[],\n ): Promise<Run | null> {\n let query = db\n .selectFrom('durably_runs')\n .selectAll()\n .where('status', '=', 'pending')\n .orderBy('created_at', 'asc')\n .limit(1)\n\n if (excludeConcurrencyKeys.length > 0) {\n query = query.where((eb) =>\n eb.or([\n eb('concurrency_key', 'is', null),\n eb('concurrency_key', 'not in', excludeConcurrencyKeys),\n ]),\n )\n }\n\n const row = await query.executeTakeFirst()\n return row ? rowToRun(row) : null\n },\n\n async createStep(input: CreateStepInput): Promise<Step> {\n const completedAt = new Date().toISOString()\n const id = ulid()\n\n const step: Database['durably_steps'] = {\n id,\n run_id: input.runId,\n name: input.name,\n index: input.index,\n status: input.status,\n output:\n input.output !== undefined ? JSON.stringify(input.output) : null,\n error: input.error ?? null,\n started_at: input.startedAt,\n completed_at: completedAt,\n }\n\n await db.insertInto('durably_steps').values(step).execute()\n\n return rowToStep(step)\n },\n\n async getSteps(runId: string): Promise<Step[]> {\n const rows = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('index', 'asc')\n .execute()\n\n return rows.map(rowToStep)\n },\n\n async getCompletedStep(runId: string, name: string): Promise<Step | null> {\n const row = await db\n .selectFrom('durably_steps')\n .selectAll()\n .where('run_id', '=', runId)\n .where('name', '=', name)\n .where('status', '=', 'completed')\n .executeTakeFirst()\n\n return row ? rowToStep(row) : null\n },\n\n async createLog(input: CreateLogInput): Promise<Log> {\n const now = new Date().toISOString()\n const id = ulid()\n\n const log: Database['durably_logs'] = {\n id,\n run_id: input.runId,\n step_name: input.stepName,\n level: input.level,\n message: input.message,\n data: input.data !== undefined ? JSON.stringify(input.data) : null,\n created_at: now,\n }\n\n await db.insertInto('durably_logs').values(log).execute()\n\n return rowToLog(log)\n },\n\n async getLogs(runId: string): Promise<Log[]> {\n const rows = await db\n .selectFrom('durably_logs')\n .selectAll()\n .where('run_id', '=', runId)\n .orderBy('created_at', 'asc')\n .execute()\n\n return rows.map(rowToLog)\n },\n }\n}\n","/**\n * Error thrown when a run is cancelled during execution.\n * The worker catches this error and treats it specially - it does not\n * mark the run as failed, as the run status is already 'cancelled'.\n */\nexport class CancelledError extends Error {\n constructor(runId: string) {\n super(`Run was cancelled: ${runId}`)\n this.name = 'CancelledError'\n }\n}\n","import { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { StepContext } from './job'\nimport type { Run, Storage } from './storage'\n\n/**\n * Create a step context for executing a run\n */\nexport function createStepContext(\n run: Run,\n jobName: string,\n storage: Storage,\n eventEmitter: EventEmitter,\n): StepContext {\n let stepIndex = run.currentStepIndex\n let currentStepName: string | null = null\n\n return {\n get runId(): string {\n return run.id\n },\n\n async run<T>(name: string, fn: () => T | Promise<T>): Promise<T> {\n // Check if run was cancelled before executing this step\n const currentRun = await storage.getRun(run.id)\n if (currentRun?.status === 'cancelled') {\n throw new CancelledError(run.id)\n }\n\n // Check if step was already completed\n const existingStep = await storage.getCompletedStep(run.id, name)\n if (existingStep) {\n stepIndex++\n return existingStep.output as T\n }\n\n // Track current step for log attribution\n currentStepName = name\n\n // Record step start time\n const startedAt = new Date().toISOString()\n const startTime = Date.now()\n\n // Emit step:start event\n eventEmitter.emit({\n type: 'step:start',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n })\n\n try {\n // Execute the step\n const result = await fn()\n\n // Save step result\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'completed',\n output: result,\n startedAt,\n })\n\n // Update run's current step index\n stepIndex++\n await storage.updateRun(run.id, { currentStepIndex: stepIndex })\n\n // Emit step:complete event\n eventEmitter.emit({\n type: 'step:complete',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex: stepIndex - 1,\n output: result,\n duration: Date.now() - startTime,\n })\n\n return result\n } catch (error) {\n // Save failed step\n const errorMessage =\n error instanceof Error ? error.message : String(error)\n\n await storage.createStep({\n runId: run.id,\n name,\n index: stepIndex,\n status: 'failed',\n error: errorMessage,\n startedAt,\n })\n\n // Emit step:fail event\n eventEmitter.emit({\n type: 'step:fail',\n runId: run.id,\n jobName,\n stepName: name,\n stepIndex,\n error: errorMessage,\n })\n\n throw error\n } finally {\n // Clear current step after execution\n currentStepName = null\n }\n },\n\n progress(current: number, total?: number, message?: string): void {\n // Fire and forget - don't await\n storage.updateRun(run.id, {\n progress: { current, total, message },\n })\n },\n\n log: {\n info(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'info',\n message,\n data,\n })\n },\n\n warn(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'warn',\n message,\n data,\n })\n },\n\n error(message: string, data?: unknown): void {\n eventEmitter.emit({\n type: 'log:write',\n runId: run.id,\n stepName: currentStepName,\n level: 'error',\n message,\n data,\n })\n },\n },\n }\n}\n","import { createStepContext } from './context'\nimport { CancelledError } from './errors'\nimport type { EventEmitter } from './events'\nimport type { JobRegistry } from './job'\nimport type { Storage } from './storage'\n\n/**\n * Worker configuration\n */\nexport interface WorkerConfig {\n pollingInterval: number\n heartbeatInterval: number\n staleThreshold: number\n}\n\n/**\n * Worker state\n */\nexport interface Worker {\n /**\n * Start the worker polling loop\n */\n start(): void\n\n /**\n * Stop the worker after current run completes\n */\n stop(): Promise<void>\n\n /**\n * Check if worker is running\n */\n readonly isRunning: boolean\n}\n\n/**\n * Create a worker instance\n */\nexport function createWorker(\n config: WorkerConfig,\n storage: Storage,\n eventEmitter: EventEmitter,\n jobRegistry: JobRegistry,\n): Worker {\n let running = false\n let currentRunPromise: Promise<void> | null = null\n let pollingTimeout: ReturnType<typeof setTimeout> | null = null\n let stopResolver: (() => void) | null = null\n let heartbeatInterval: ReturnType<typeof setInterval> | null = null\n let currentRunId: string | null = null\n\n /**\n * Recover stale runs by resetting them to pending\n */\n async function recoverStaleRuns(): Promise<void> {\n const staleThreshold = new Date(\n Date.now() - config.staleThreshold,\n ).toISOString()\n const runningRuns = await storage.getRuns({ status: 'running' })\n\n for (const run of runningRuns) {\n if (run.heartbeatAt < staleThreshold) {\n // This run is stale - reset to pending\n await storage.updateRun(run.id, {\n status: 'pending',\n })\n }\n }\n }\n\n /**\n * Update heartbeat for current run\n */\n async function updateHeartbeat(): Promise<void> {\n if (currentRunId) {\n await storage.updateRun(currentRunId, {\n heartbeatAt: new Date().toISOString(),\n })\n }\n }\n\n /**\n * Extract error message from unknown error\n */\n function getErrorMessage(error: unknown): string {\n return error instanceof Error ? error.message : String(error)\n }\n\n /**\n * Handle successful run completion\n */\n async function handleRunSuccess(\n runId: string,\n jobName: string,\n output: unknown,\n startTime: number,\n ): Promise<void> {\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (currentRun?.status === 'cancelled') {\n return\n }\n\n await storage.updateRun(runId, {\n status: 'completed',\n output,\n })\n\n eventEmitter.emit({\n type: 'run:complete',\n runId,\n jobName,\n output,\n duration: Date.now() - startTime,\n })\n }\n\n /**\n * Handle failed run\n */\n async function handleRunFailure(\n runId: string,\n jobName: string,\n error: unknown,\n ): Promise<void> {\n // If the error is CancelledError, don't treat it as a failure\n // The run status is already 'cancelled'\n if (error instanceof CancelledError) {\n return\n }\n\n // Check if run was cancelled during execution - don't overwrite cancelled status\n const currentRun = await storage.getRun(runId)\n if (currentRun?.status === 'cancelled') {\n return\n }\n\n const errorMessage = getErrorMessage(error)\n\n // Get the failed step name if available\n const steps = await storage.getSteps(runId)\n const failedStep = steps.find((s) => s.status === 'failed')\n\n await storage.updateRun(runId, {\n status: 'failed',\n error: errorMessage,\n })\n\n eventEmitter.emit({\n type: 'run:fail',\n runId,\n jobName,\n error: errorMessage,\n failedStepName: failedStep?.name ?? 'unknown',\n })\n }\n\n /**\n * Execute a run with heartbeat management\n */\n async function executeRun(\n run: Awaited<ReturnType<typeof storage.getRun>> & { id: string },\n job: NonNullable<ReturnType<typeof jobRegistry.get>>,\n ): Promise<void> {\n // Track current run for heartbeat updates\n currentRunId = run.id\n\n // Start heartbeat interval\n // Errors are emitted as events but don't stop execution\n heartbeatInterval = setInterval(() => {\n updateHeartbeat().catch((error) => {\n eventEmitter.emit({\n type: 'worker:error',\n error: error instanceof Error ? error.message : String(error),\n context: 'heartbeat',\n runId: run.id,\n })\n })\n }, config.heartbeatInterval)\n\n // Emit run:start event\n eventEmitter.emit({\n type: 'run:start',\n runId: run.id,\n jobName: run.jobName,\n payload: run.payload,\n })\n\n const startTime = Date.now()\n\n try {\n // Create step context and execute job\n const step = createStepContext(run, run.jobName, storage, eventEmitter)\n const output = await job.fn(step, run.payload)\n\n // Validate output if schema exists\n if (job.outputSchema) {\n const parseResult = job.outputSchema.safeParse(output)\n if (!parseResult.success) {\n throw new Error(`Invalid output: ${parseResult.error.message}`)\n }\n }\n\n await handleRunSuccess(run.id, run.jobName, output, startTime)\n } catch (error) {\n await handleRunFailure(run.id, run.jobName, error)\n } finally {\n // Stop heartbeat interval\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n currentRunId = null\n }\n }\n\n async function processNextRun(): Promise<boolean> {\n // Get running runs to exclude their concurrency keys\n const runningRuns = await storage.getRuns({ status: 'running' })\n const excludeConcurrencyKeys = runningRuns\n .filter(\n (r): r is typeof r & { concurrencyKey: string } =>\n r.concurrencyKey !== null,\n )\n .map((r) => r.concurrencyKey)\n\n // Get next pending run\n const run = await storage.getNextPendingRun(excludeConcurrencyKeys)\n if (!run) {\n return false\n }\n\n // Get the job definition\n const job = jobRegistry.get(run.jobName)\n if (!job) {\n // Unknown job - mark as failed\n await storage.updateRun(run.id, {\n status: 'failed',\n error: `Unknown job: ${run.jobName}`,\n })\n return true\n }\n\n // Transition to running\n await storage.updateRun(run.id, {\n status: 'running',\n heartbeatAt: new Date().toISOString(),\n })\n\n await executeRun(run, job)\n\n return true\n }\n\n async function poll(): Promise<void> {\n if (!running) {\n return\n }\n\n const doWork = async () => {\n // Recover stale runs before processing\n await recoverStaleRuns()\n await processNextRun()\n }\n\n try {\n currentRunPromise = doWork()\n await currentRunPromise\n } finally {\n currentRunPromise = null\n }\n\n if (running) {\n pollingTimeout = setTimeout(() => poll(), config.pollingInterval)\n } else if (stopResolver) {\n stopResolver()\n stopResolver = null\n }\n }\n\n return {\n get isRunning(): boolean {\n return running\n },\n\n start(): void {\n if (running) {\n return\n }\n running = true\n poll()\n },\n\n async stop(): Promise<void> {\n if (!running) {\n return\n }\n\n running = false\n\n if (pollingTimeout) {\n clearTimeout(pollingTimeout)\n pollingTimeout = null\n }\n\n if (heartbeatInterval) {\n clearInterval(heartbeatInterval)\n heartbeatInterval = null\n }\n\n if (currentRunPromise) {\n // Wait for current run to complete\n return new Promise<void>((resolve) => {\n stopResolver = resolve\n })\n }\n },\n }\n}\n","import type { DurablyPlugin } from '../durably'\n\n/**\n * Plugin that persists log events to the database\n */\nexport function withLogPersistence(): DurablyPlugin {\n return {\n name: 'log-persistence',\n install(durably) {\n durably.on('log:write', async (event) => {\n await durably.storage.createLog({\n runId: event.runId,\n stepName: event.stepName,\n level: event.level,\n message: event.message,\n data: event.data,\n })\n })\n },\n }\n}\n"],"mappings":";AACA,SAAS,cAAc;;;ACwLhB,SAAS,qBAAmC;AACjD,QAAM,YAAY,oBAAI,IAA8C;AACpE,MAAI,WAAW;AACf,MAAI,eAAoC;AAExC,SAAO;AAAA,IACL,GAAwB,MAAS,UAAyC;AACxE,UAAI,CAAC,UAAU,IAAI,IAAI,GAAG;AACxB,kBAAU,IAAI,MAAM,oBAAI,IAAI,CAAC;AAAA,MAC/B;AAEA,YAAM,gBAAgB,UAAU,IAAI,IAAI;AACxC,qBAAe,IAAI,QAA+C;AAElE,aAAO,MAAM;AACX,uBAAe,OAAO,QAA+C;AAAA,MACvE;AAAA,IACF;AAAA,IAEA,QAAQ,SAA6B;AACnC,qBAAe;AAAA,IACjB;AAAA,IAEA,KAAK,OAA4B;AAC/B;AACA,YAAM,YAAY;AAAA,QAChB,GAAG;AAAA,QACH,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC;AAAA,MACF;AAEA,YAAM,gBAAgB,UAAU,IAAI,MAAM,IAAI;AAC9C,UAAI,CAAC,eAAe;AAClB;AAAA,MACF;AAEA,iBAAW,YAAY,eAAe;AACpC,YAAI;AACF,mBAAS,SAAS;AAAA,QACpB,SAAS,OAAO;AACd,cAAI,cAAc;AAChB;AAAA,cACE,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AAAA,cACxD;AAAA,YACF;AAAA,UACF;AAAA,QAEF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACtEO,SAAS,oBAAiC;AAC/C,QAAM,OAAO,oBAAI,IAA6C;AAE9D,SAAO;AAAA,IACL,SAA0B,KAA2C;AACnE,UAAI,KAAK,IAAI,IAAI,IAAI,GAAG;AACtB,cAAM,IAAI,MAAM,QAAQ,IAAI,IAAI,yBAAyB;AAAA,MAC3D;AACA,WAAK,IAAI,IAAI,MAAM,GAAsC;AAAA,IAC3D;AAAA,IAEA,IAAI,MAA2D;AAC7D,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,IAEA,IAAI,MAAuB;AACzB,aAAO,KAAK,IAAI,IAAI;AAAA,IACtB;AAAA,EACF;AACF;AAKO,SAAS,gBAKd,YACA,IAIA,SACA,eACA,UAKA;AAOA,WAAS,SAAS;AAAA,IAChB,MAAM,WAAW;AAAA,IACjB,aAAa,WAAW;AAAA,IACxB,cAAc,WAAW;AAAA,IACzB;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,MAAM,WAAW;AAAA,IAEjB,MAAM,QACJ,OACA,SAC4B;AAE5B,YAAM,cAAc,WAAW,MAAM,UAAU,KAAK;AACpD,UAAI,CAAC,YAAY,SAAS;AACxB,cAAM,IAAI,MAAM,kBAAkB,YAAY,MAAM,OAAO,EAAE;AAAA,MAC/D;AAGA,YAAM,MAAM,MAAM,QAAQ,UAAU;AAAA,QAClC,SAAS,WAAW;AAAA,QACpB,SAAS,YAAY;AAAA,QACrB,gBAAgB,SAAS;AAAA,QACzB,gBAAgB,SAAS;AAAA,MAC3B,CAAC;AAED,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,eACJ,OACA,SACwC;AAExC,YAAM,MAAM,MAAM,KAAK,QAAQ,OAAO,OAAO;AAG7C,aAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,YAAI;AACJ,YAAI,WAAW;AAEf,cAAM,UAAU,MAAM;AACpB,cAAI,SAAU;AACd,qBAAW;AACX,8BAAoB;AACpB,0BAAgB;AAChB,cAAI,WAAW;AACb,yBAAa,SAAS;AAAA,UACxB;AAAA,QACF;AAEA,cAAM,sBAAsB,cAAc;AAAA,UACxC;AAAA,UACA,CAAC,UAAU;AACT,gBAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,sBAAQ;AACR,sBAAQ;AAAA,gBACN,IAAI,IAAI;AAAA,gBACR,QAAQ,MAAM;AAAA,cAChB,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAEA,cAAM,kBAAkB,cAAc,GAAG,YAAY,CAAC,UAAU;AAC9D,cAAI,MAAM,UAAU,IAAI,MAAM,CAAC,UAAU;AACvC,oBAAQ;AACR,mBAAO,IAAI,MAAM,MAAM,KAAK,CAAC;AAAA,UAC/B;AAAA,QACF,CAAC;AAID,gBAAQ,OAAO,IAAI,EAAE,EAAE,KAAK,CAAC,eAAe;AAC1C,cAAI,YAAY,CAAC,WAAY;AAC7B,cAAI,WAAW,WAAW,aAAa;AACrC,oBAAQ;AACR,oBAAQ;AAAA,cACN,IAAI,IAAI;AAAA,cACR,QAAQ,WAAW;AAAA,YACrB,CAAC;AAAA,UACH,WAAW,WAAW,WAAW,UAAU;AACzC,oBAAQ;AACR,mBAAO,IAAI,MAAM,WAAW,SAAS,YAAY,CAAC;AAAA,UACpD;AAAA,QACF,CAAC;AAGD,YAAI,SAAS,YAAY,QAAW;AAClC,sBAAY,WAAW,MAAM;AAC3B,gBAAI,CAAC,UAAU;AACb,sBAAQ;AACR;AAAA,gBACE,IAAI,MAAM,gCAAgC,QAAQ,OAAO,IAAI;AAAA,cAC/D;AAAA,YACF;AAAA,UACF,GAAG,QAAQ,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,aACJ,QAC8B;AAC9B,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,YAAM,aAAa,OAAO,IAAI,CAAC,SAAS;AACtC,YAAI,QAAQ,OAAO,SAAS,YAAY,WAAW,MAAM;AACvD,iBAAO;AAAA,QACT;AACA,eAAO,EAAE,OAAO,MAAgB,SAAS,OAAU;AAAA,MACrD,CAAC;AAGD,YAAM,YAA8D,CAAC;AACrE,eAAS,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;AAC1C,cAAM,cAAc,WAAW,MAAM,UAAU,WAAW,CAAC,EAAE,KAAK;AAClE,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI;AAAA,YACR,0BAA0B,CAAC,KAAK,YAAY,MAAM,OAAO;AAAA,UAC3D;AAAA,QACF;AACA,kBAAU,KAAK;AAAA,UACb,SAAS,YAAY;AAAA,UACrB,SAAS,WAAW,CAAC,EAAE;AAAA,QACzB,CAAC;AAAA,MACH;AAGA,YAAM,OAAO,MAAM,QAAQ;AAAA,QACzB,UAAU,IAAI,CAAC,OAAO;AAAA,UACpB,SAAS,WAAW;AAAA,UACpB,SAAS,EAAE;AAAA,UACX,gBAAgB,EAAE,SAAS;AAAA,UAC3B,gBAAgB,EAAE,SAAS;AAAA,QAC7B,EAAE;AAAA,MACJ;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,OAAO,IAA+C;AAC1D,YAAM,MAAM,MAAM,QAAQ,OAAO,EAAE;AACnC,UAAI,CAAC,OAAO,IAAI,YAAY,WAAW,MAAM;AAC3C,eAAO;AAAA,MACT;AACA,aAAO;AAAA,IACT;AAAA,IAEA,MAAM,QACJ,QAC8B;AAC9B,YAAM,OAAO,MAAM,QAAQ,QAAQ;AAAA,QACjC,GAAG;AAAA,QACH,SAAS,WAAW;AAAA,MACtB,CAAC;AACD,aAAO;AAAA,IACT;AAAA,EACF;AACF;;;AC/WA,IAAM,aAA0B;AAAA,EAC9B;AAAA,IACE,SAAS;AAAA,IACT,IAAI,OAAO,OAAO;AAEhB,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,YAAY,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,mBAAmB,MAAM,EACnC,UAAU,mBAAmB,MAAM,EACnC;AAAA,QAAU;AAAA,QAAsB;AAAA,QAAW,CAAC,QAC3C,IAAI,QAAQ,EAAE,UAAU,CAAC;AAAA,MAC3B,EACC,UAAU,YAAY,MAAM,EAC5B,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,gBAAgB,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACxD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,kCAAkC,EAC9C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,YAAY,iBAAiB,CAAC,EACvC,OAAO,EACP,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,qCAAqC,EACjD,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,iBAAiB,CAAC,EACrC,QAAQ;AAEX,YAAM,GAAG,OACN,YAAY,iCAAiC,EAC7C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,eAAe,EAC3B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,QAAQ,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAChD,UAAU,SAAS,WAAW,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACpD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,UAAU,MAAM,EAC1B,UAAU,SAAS,MAAM,EACzB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,UAAU,gBAAgB,MAAM,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,6BAA6B,EACzC,YAAY,EACZ,GAAG,eAAe,EAClB,QAAQ,CAAC,UAAU,OAAO,CAAC,EAC3B,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,cAAc,EAC1B,YAAY,EACZ,UAAU,MAAM,QAAQ,CAAC,QAAQ,IAAI,WAAW,CAAC,EACjD,UAAU,UAAU,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EAClD,UAAU,aAAa,MAAM,EAC7B,UAAU,SAAS,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACjD,UAAU,WAAW,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACnD,UAAU,QAAQ,MAAM,EACxB,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,8BAA8B,EAC1C,YAAY,EACZ,GAAG,cAAc,EACjB,QAAQ,CAAC,UAAU,YAAY,CAAC,EAChC,QAAQ;AAGX,YAAM,GAAG,OACN,YAAY,yBAAyB,EACrC,YAAY,EACZ,UAAU,WAAW,WAAW,CAAC,QAAQ,IAAI,WAAW,CAAC,EACzD,UAAU,cAAc,QAAQ,CAAC,QAAQ,IAAI,QAAQ,CAAC,EACtD,QAAQ;AAAA,IACb;AAAA,EACF;AACF;AAKA,eAAe,kBAAkB,IAAuC;AACtE,MAAI;AACF,UAAM,SAAS,MAAM,GAClB,WAAW,yBAAyB,EACpC,OAAO,SAAS,EAChB,QAAQ,WAAW,MAAM,EACzB,MAAM,CAAC,EACP,iBAAiB;AAEpB,WAAO,QAAQ,WAAW;AAAA,EAC5B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,cAAc,IAAqC;AACvE,QAAM,iBAAiB,MAAM,kBAAkB,EAAE;AAEjD,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,UAAU,gBAAgB;AACtC,YAAM,UAAU,GAAG,EAAE;AAErB,YAAM,GACH,WAAW,yBAAyB,EACpC,OAAO;AAAA,QACN,SAAS,UAAU;AAAA,QACnB,aAAY,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC,CAAC,EACA,QAAQ;AAAA,IACb;AAAA,EACF;AACF;;;ACvJA,SAAS,YAAY;AAsIrB,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,SAAS,IAAI;AAAA,IACb,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,IAC/B,QAAQ,IAAI;AAAA,IACZ,gBAAgB,IAAI;AAAA,IACpB,gBAAgB,IAAI;AAAA,IACpB,kBAAkB,IAAI;AAAA,IACtB,UAAU,IAAI,WAAW,KAAK,MAAM,IAAI,QAAQ,IAAI;AAAA,IACpD,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,WAAW,IAAI;AAAA,EACjB;AACF;AAKA,SAAS,UAAU,KAAsC;AACvD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,MAAM,IAAI;AAAA,IACV,OAAO,IAAI;AAAA,IACX,QAAQ,IAAI;AAAA,IACZ,QAAQ,IAAI,SAAS,KAAK,MAAM,IAAI,MAAM,IAAI;AAAA,IAC9C,OAAO,IAAI;AAAA,IACX,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,EACnB;AACF;AAKA,SAAS,SAAS,KAAoC;AACpD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,OAAO,IAAI;AAAA,IACX,UAAU,IAAI;AAAA,IACd,OAAO,IAAI;AAAA,IACX,SAAS,IAAI;AAAA,IACb,MAAM,IAAI,OAAO,KAAK,MAAM,IAAI,IAAI,IAAI;AAAA,IACxC,WAAW,IAAI;AAAA,EACjB;AACF;AAKO,SAAS,oBAAoB,IAA+B;AACjE,SAAO;AAAA,IACL,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAGnC,UAAI,MAAM,gBAAgB;AACxB,cAAM,WAAW,MAAM,GACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,YAAI,UAAU;AACZ,iBAAO,SAAS,QAAQ;AAAA,QAC1B;AAAA,MACF;AAEA,YAAM,KAAK,KAAK;AAChB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,UAAU,MAAM;AAAA,QAChB,SAAS,KAAK,UAAU,MAAM,OAAO;AAAA,QACrC,QAAQ;AAAA,QACR,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,iBAAiB,MAAM,kBAAkB;AAAA,QACzC,oBAAoB;AAAA,QACpB,UAAU;AAAA,QACV,QAAQ;AAAA,QACR,OAAO;AAAA,QACP,cAAc;AAAA,QACd,YAAY;AAAA,QACZ,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,gBAAgB,QAA0C;AAC9D,UAAI,OAAO,WAAW,GAAG;AACvB,eAAO,CAAC;AAAA,MACV;AAGA,aAAO,MAAM,GAAG,YAAY,EAAE,QAAQ,OAAO,QAAQ;AACnD,cAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,cAAM,OAAmC,CAAC;AAG1C,mBAAW,SAAS,QAAQ;AAE1B,cAAI,MAAM,gBAAgB;AACxB,kBAAM,WAAW,MAAM,IACpB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,YAAY,KAAK,MAAM,OAAO,EACpC,MAAM,mBAAmB,KAAK,MAAM,cAAc,EAClD,iBAAiB;AAEpB,gBAAI,UAAU;AACZ,mBAAK,KAAK,QAAQ;AAClB;AAAA,YACF;AAAA,UACF;AAEA,gBAAM,KAAK,KAAK;AAChB,eAAK,KAAK;AAAA,YACR;AAAA,YACA,UAAU,MAAM;AAAA,YAChB,SAAS,KAAK,UAAU,MAAM,OAAO;AAAA,YACrC,QAAQ;AAAA,YACR,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,iBAAiB,MAAM,kBAAkB;AAAA,YACzC,oBAAoB;AAAA,YACpB,UAAU;AAAA,YACV,QAAQ;AAAA,YACR,OAAO;AAAA,YACP,cAAc;AAAA,YACd,YAAY;AAAA,YACZ,YAAY;AAAA,UACd,CAAC;AAAA,QACH;AAGA,cAAM,UAAU,KAAK,OAAO,CAAC,MAAM,EAAE,eAAe,GAAG;AACvD,YAAI,QAAQ,SAAS,GAAG;AACtB,gBAAM,IAAI,WAAW,cAAc,EAAE,OAAO,OAAO,EAAE,QAAQ;AAAA,QAC/D;AAEA,eAAO,KAAK,IAAI,QAAQ;AAAA,MAC1B,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAAe,MAAqC;AAClE,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,UAA6C;AAAA,QACjD,YAAY;AAAA,MACd;AAEA,UAAI,KAAK,WAAW,OAAW,SAAQ,SAAS,KAAK;AACrD,UAAI,KAAK,qBAAqB;AAC5B,gBAAQ,qBAAqB,KAAK;AACpC,UAAI,KAAK,aAAa;AACpB,gBAAQ,WAAW,KAAK,WAAW,KAAK,UAAU,KAAK,QAAQ,IAAI;AACrE,UAAI,KAAK,WAAW;AAClB,gBAAQ,SAAS,KAAK,UAAU,KAAK,MAAM;AAC7C,UAAI,KAAK,UAAU,OAAW,SAAQ,QAAQ,KAAK;AACnD,UAAI,KAAK,gBAAgB;AACvB,gBAAQ,eAAe,KAAK;AAE9B,YAAM,GACH,YAAY,cAAc,EAC1B,IAAI,OAAO,EACX,MAAM,MAAM,KAAK,KAAK,EACtB,QAAQ;AAAA,IACb;AAAA,IAEA,MAAM,UAAU,OAA8B;AAE5C,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACxE,YAAM,GAAG,WAAW,eAAe,EAAE,MAAM,UAAU,KAAK,KAAK,EAAE,QAAQ;AACzE,YAAM,GAAG,WAAW,cAAc,EAAE,MAAM,MAAM,KAAK,KAAK,EAAE,QAAQ;AAAA,IACtE;AAAA,IAEA,MAAM,OAAO,OAAoC;AAC/C,YAAM,MAAM,MAAM,GACf,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,MAAM,KAAK,KAAK,EACtB,iBAAiB;AAEpB,aAAO,MAAM,SAAS,GAAG,IAAI;AAAA,IAC/B;AAAA,IAEA,MAAM,QAAQ,QAAoC;AAChD,UAAI,QAAQ,GAAG,WAAW,cAAc,EAAE,UAAU;AAEpD,UAAI,QAAQ,QAAQ;AAClB,gBAAQ,MAAM,MAAM,UAAU,KAAK,OAAO,MAAM;AAAA,MAClD;AACA,UAAI,QAAQ,SAAS;AACnB,gBAAQ,MAAM,MAAM,YAAY,KAAK,OAAO,OAAO;AAAA,MACrD;AAEA,cAAQ,MAAM,QAAQ,cAAc,MAAM;AAE1C,UAAI,QAAQ,UAAU,QAAW;AAC/B,gBAAQ,MAAM,MAAM,OAAO,KAAK;AAAA,MAClC;AACA,UAAI,QAAQ,WAAW,QAAW;AAEhC,YAAI,OAAO,UAAU,QAAW;AAC9B,kBAAQ,MAAM,MAAM,EAAE;AAAA,QACxB;AACA,gBAAQ,MAAM,OAAO,OAAO,MAAM;AAAA,MACpC;AAEA,YAAM,OAAO,MAAM,MAAM,QAAQ;AACjC,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,IAEA,MAAM,kBACJ,wBACqB;AACrB,UAAI,QAAQ,GACT,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,SAAS,EAC9B,QAAQ,cAAc,KAAK,EAC3B,MAAM,CAAC;AAEV,UAAI,uBAAuB,SAAS,GAAG;AACrC,gBAAQ,MAAM;AAAA,UAAM,CAAC,OACnB,GAAG,GAAG;AAAA,YACJ,GAAG,mBAAmB,MAAM,IAAI;AAAA,YAChC,GAAG,mBAAmB,UAAU,sBAAsB;AAAA,UACxD,CAAC;AAAA,QACH;AAAA,MACF;AAEA,YAAM,MAAM,MAAM,MAAM,iBAAiB;AACzC,aAAO,MAAM,SAAS,GAAG,IAAI;AAAA,IAC/B;AAAA,IAEA,MAAM,WAAW,OAAuC;AACtD,YAAM,eAAc,oBAAI,KAAK,GAAE,YAAY;AAC3C,YAAM,KAAK,KAAK;AAEhB,YAAM,OAAkC;AAAA,QACtC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,MAAM,MAAM;AAAA,QACZ,OAAO,MAAM;AAAA,QACb,QAAQ,MAAM;AAAA,QACd,QACE,MAAM,WAAW,SAAY,KAAK,UAAU,MAAM,MAAM,IAAI;AAAA,QAC9D,OAAO,MAAM,SAAS;AAAA,QACtB,YAAY,MAAM;AAAA,QAClB,cAAc;AAAA,MAChB;AAEA,YAAM,GAAG,WAAW,eAAe,EAAE,OAAO,IAAI,EAAE,QAAQ;AAE1D,aAAO,UAAU,IAAI;AAAA,IACvB;AAAA,IAEA,MAAM,SAAS,OAAgC;AAC7C,YAAM,OAAO,MAAM,GAChB,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,SAAS,KAAK,EACtB,QAAQ;AAEX,aAAO,KAAK,IAAI,SAAS;AAAA,IAC3B;AAAA,IAEA,MAAM,iBAAiB,OAAe,MAAoC;AACxE,YAAM,MAAM,MAAM,GACf,WAAW,eAAe,EAC1B,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,MAAM,QAAQ,KAAK,IAAI,EACvB,MAAM,UAAU,KAAK,WAAW,EAChC,iBAAiB;AAEpB,aAAO,MAAM,UAAU,GAAG,IAAI;AAAA,IAChC;AAAA,IAEA,MAAM,UAAU,OAAqC;AACnD,YAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,YAAM,KAAK,KAAK;AAEhB,YAAM,MAAgC;AAAA,QACpC;AAAA,QACA,QAAQ,MAAM;AAAA,QACd,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,QACb,SAAS,MAAM;AAAA,QACf,MAAM,MAAM,SAAS,SAAY,KAAK,UAAU,MAAM,IAAI,IAAI;AAAA,QAC9D,YAAY;AAAA,MACd;AAEA,YAAM,GAAG,WAAW,cAAc,EAAE,OAAO,GAAG,EAAE,QAAQ;AAExD,aAAO,SAAS,GAAG;AAAA,IACrB;AAAA,IAEA,MAAM,QAAQ,OAA+B;AAC3C,YAAM,OAAO,MAAM,GAChB,WAAW,cAAc,EACzB,UAAU,EACV,MAAM,UAAU,KAAK,KAAK,EAC1B,QAAQ,cAAc,KAAK,EAC3B,QAAQ;AAEX,aAAO,KAAK,IAAI,QAAQ;AAAA,IAC1B;AAAA,EACF;AACF;;;AC7bO,IAAM,iBAAN,cAA6B,MAAM;AAAA,EACxC,YAAY,OAAe;AACzB,UAAM,sBAAsB,KAAK,EAAE;AACnC,SAAK,OAAO;AAAA,EACd;AACF;;;ACFO,SAAS,kBACd,KACA,SACA,SACA,cACa;AACb,MAAI,YAAY,IAAI;AACpB,MAAI,kBAAiC;AAErC,SAAO;AAAA,IACL,IAAI,QAAgB;AAClB,aAAO,IAAI;AAAA,IACb;AAAA,IAEA,MAAM,IAAO,MAAc,IAAsC;AAE/D,YAAM,aAAa,MAAM,QAAQ,OAAO,IAAI,EAAE;AAC9C,UAAI,YAAY,WAAW,aAAa;AACtC,cAAM,IAAI,eAAe,IAAI,EAAE;AAAA,MACjC;AAGA,YAAM,eAAe,MAAM,QAAQ,iBAAiB,IAAI,IAAI,IAAI;AAChE,UAAI,cAAc;AAChB;AACA,eAAO,aAAa;AAAA,MACtB;AAGA,wBAAkB;AAGlB,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY,KAAK,IAAI;AAG3B,mBAAa,KAAK;AAAA,QAChB,MAAM;AAAA,QACN,OAAO,IAAI;AAAA,QACX;AAAA,QACA,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,SAAS,MAAM,GAAG;AAGxB,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,QAAQ;AAAA,UACR;AAAA,QACF,CAAC;AAGD;AACA,cAAM,QAAQ,UAAU,IAAI,IAAI,EAAE,kBAAkB,UAAU,CAAC;AAG/D,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV,WAAW,YAAY;AAAA,UACvB,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACzB,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AAEd,cAAM,eACJ,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAEvD,cAAM,QAAQ,WAAW;AAAA,UACvB,OAAO,IAAI;AAAA,UACX;AAAA,UACA,OAAO;AAAA,UACP,QAAQ;AAAA,UACR,OAAO;AAAA,UACP;AAAA,QACF,CAAC;AAGD,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX;AAAA,UACA,UAAU;AAAA,UACV;AAAA,UACA,OAAO;AAAA,QACT,CAAC;AAED,cAAM;AAAA,MACR,UAAE;AAEA,0BAAkB;AAAA,MACpB;AAAA,IACF;AAAA,IAEA,SAAS,SAAiB,OAAgB,SAAwB;AAEhE,cAAQ,UAAU,IAAI,IAAI;AAAA,QACxB,UAAU,EAAE,SAAS,OAAO,QAAQ;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,IAEA,KAAK;AAAA,MACH,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,KAAK,SAAiB,MAAsB;AAC1C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,MAEA,MAAM,SAAiB,MAAsB;AAC3C,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,UACX,UAAU;AAAA,UACV,OAAO;AAAA,UACP;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;ACrHO,SAAS,aACd,QACA,SACA,cACA,aACQ;AACR,MAAI,UAAU;AACd,MAAI,oBAA0C;AAC9C,MAAI,iBAAuD;AAC3D,MAAI,eAAoC;AACxC,MAAI,oBAA2D;AAC/D,MAAI,eAA8B;AAKlC,iBAAe,mBAAkC;AAC/C,UAAM,iBAAiB,IAAI;AAAA,MACzB,KAAK,IAAI,IAAI,OAAO;AAAA,IACtB,EAAE,YAAY;AACd,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAE/D,eAAW,OAAO,aAAa;AAC7B,UAAI,IAAI,cAAc,gBAAgB;AAEpC,cAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,UAC9B,QAAQ;AAAA,QACV,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAKA,iBAAe,kBAAiC;AAC9C,QAAI,cAAc;AAChB,YAAM,QAAQ,UAAU,cAAc;AAAA,QACpC,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,MACtC,CAAC;AAAA,IACH;AAAA,EACF;AAKA,WAAS,gBAAgB,OAAwB;AAC/C,WAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,EAC9D;AAKA,iBAAe,iBACb,OACA,SACA,QACA,WACe;AAEf,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,YAAY,WAAW,aAAa;AACtC;AAAA,IACF;AAEA,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR;AAAA,IACF,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,KAAK,IAAI,IAAI;AAAA,IACzB,CAAC;AAAA,EACH;AAKA,iBAAe,iBACb,OACA,SACA,OACe;AAGf,QAAI,iBAAiB,gBAAgB;AACnC;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,QAAQ,OAAO,KAAK;AAC7C,QAAI,YAAY,WAAW,aAAa;AACtC;AAAA,IACF;AAEA,UAAM,eAAe,gBAAgB,KAAK;AAG1C,UAAM,QAAQ,MAAM,QAAQ,SAAS,KAAK;AAC1C,UAAM,aAAa,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,QAAQ;AAE1D,UAAM,QAAQ,UAAU,OAAO;AAAA,MAC7B,QAAQ;AAAA,MACR,OAAO;AAAA,IACT,CAAC;AAED,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN;AAAA,MACA;AAAA,MACA,OAAO;AAAA,MACP,gBAAgB,YAAY,QAAQ;AAAA,IACtC,CAAC;AAAA,EACH;AAKA,iBAAe,WACb,KACA,KACe;AAEf,mBAAe,IAAI;AAInB,wBAAoB,YAAY,MAAM;AACpC,sBAAgB,EAAE,MAAM,CAAC,UAAU;AACjC,qBAAa,KAAK;AAAA,UAChB,MAAM;AAAA,UACN,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,UAC5D,SAAS;AAAA,UACT,OAAO,IAAI;AAAA,QACb,CAAC;AAAA,MACH,CAAC;AAAA,IACH,GAAG,OAAO,iBAAiB;AAG3B,iBAAa,KAAK;AAAA,MAChB,MAAM;AAAA,MACN,OAAO,IAAI;AAAA,MACX,SAAS,IAAI;AAAA,MACb,SAAS,IAAI;AAAA,IACf,CAAC;AAED,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEF,YAAM,OAAO,kBAAkB,KAAK,IAAI,SAAS,SAAS,YAAY;AACtE,YAAM,SAAS,MAAM,IAAI,GAAG,MAAM,IAAI,OAAO;AAG7C,UAAI,IAAI,cAAc;AACpB,cAAM,cAAc,IAAI,aAAa,UAAU,MAAM;AACrD,YAAI,CAAC,YAAY,SAAS;AACxB,gBAAM,IAAI,MAAM,mBAAmB,YAAY,MAAM,OAAO,EAAE;AAAA,QAChE;AAAA,MACF;AAEA,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,QAAQ,SAAS;AAAA,IAC/D,SAAS,OAAO;AACd,YAAM,iBAAiB,IAAI,IAAI,IAAI,SAAS,KAAK;AAAA,IACnD,UAAE;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AACA,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,iBAAe,iBAAmC;AAEhD,UAAM,cAAc,MAAM,QAAQ,QAAQ,EAAE,QAAQ,UAAU,CAAC;AAC/D,UAAM,yBAAyB,YAC5B;AAAA,MACC,CAAC,MACC,EAAE,mBAAmB;AAAA,IACzB,EACC,IAAI,CAAC,MAAM,EAAE,cAAc;AAG9B,UAAM,MAAM,MAAM,QAAQ,kBAAkB,sBAAsB;AAClE,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AAGA,UAAM,MAAM,YAAY,IAAI,IAAI,OAAO;AACvC,QAAI,CAAC,KAAK;AAER,YAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,QAC9B,QAAQ;AAAA,QACR,OAAO,gBAAgB,IAAI,OAAO;AAAA,MACpC,CAAC;AACD,aAAO;AAAA,IACT;AAGA,UAAM,QAAQ,UAAU,IAAI,IAAI;AAAA,MAC9B,QAAQ;AAAA,MACR,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,IACtC,CAAC;AAED,UAAM,WAAW,KAAK,GAAG;AAEzB,WAAO;AAAA,EACT;AAEA,iBAAe,OAAsB;AACnC,QAAI,CAAC,SAAS;AACZ;AAAA,IACF;AAEA,UAAM,SAAS,YAAY;AAEzB,YAAM,iBAAiB;AACvB,YAAM,eAAe;AAAA,IACvB;AAEA,QAAI;AACF,0BAAoB,OAAO;AAC3B,YAAM;AAAA,IACR,UAAE;AACA,0BAAoB;AAAA,IACtB;AAEA,QAAI,SAAS;AACX,uBAAiB,WAAW,MAAM,KAAK,GAAG,OAAO,eAAe;AAAA,IAClE,WAAW,cAAc;AACvB,mBAAa;AACb,qBAAe;AAAA,IACjB;AAAA,EACF;AAEA,SAAO;AAAA,IACL,IAAI,YAAqB;AACvB,aAAO;AAAA,IACT;AAAA,IAEA,QAAc;AACZ,UAAI,SAAS;AACX;AAAA,MACF;AACA,gBAAU;AACV,WAAK;AAAA,IACP;AAAA,IAEA,MAAM,OAAsB;AAC1B,UAAI,CAAC,SAAS;AACZ;AAAA,MACF;AAEA,gBAAU;AAEV,UAAI,gBAAgB;AAClB,qBAAa,cAAc;AAC3B,yBAAiB;AAAA,MACnB;AAEA,UAAI,mBAAmB;AACrB,sBAAc,iBAAiB;AAC/B,4BAAoB;AAAA,MACtB;AAEA,UAAI,mBAAmB;AAErB,eAAO,IAAI,QAAc,CAAC,YAAY;AACpC,yBAAe;AAAA,QACjB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;;;APrRA,IAAM,WAAW;AAAA,EACf,iBAAiB;AAAA,EACjB,mBAAmB;AAAA,EACnB,gBAAgB;AAClB;AAiHO,SAAS,cAAc,SAAkC;AAC9D,QAAM,SAAS;AAAA,IACb,iBAAiB,QAAQ,mBAAmB,SAAS;AAAA,IACrD,mBAAmB,QAAQ,qBAAqB,SAAS;AAAA,IACzD,gBAAgB,QAAQ,kBAAkB,SAAS;AAAA,EACrD;AAEA,QAAM,KAAK,IAAI,OAAiB,EAAE,SAAS,QAAQ,QAAQ,CAAC;AAC5D,QAAM,UAAU,oBAAoB,EAAE;AACtC,QAAM,eAAe,mBAAmB;AACxC,QAAM,cAAc,kBAAkB;AACtC,QAAM,SAAS,aAAa,QAAQ,SAAS,cAAc,WAAW;AAGtE,MAAI,YAAkC;AACtC,MAAI,WAAW;AAEf,QAAM,UAAmB;AAAA,IACvB;AAAA,IACA;AAAA,IACA,IAAI,aAAa;AAAA,IACjB,MAAM,aAAa;AAAA,IACnB,SAAS,aAAa;AAAA,IACtB,OAAO,OAAO;AAAA,IACd,MAAM,OAAO;AAAA,IAEb,UAKE,YACA,IAQA;AACA,aAAO,gBAAgB,YAAY,IAAI,SAAS,cAAc,WAAW;AAAA,IAC3E;AAAA,IAEA,QAAQ,QAAQ;AAAA,IAChB,SAAS,QAAQ;AAAA,IAEjB,IAAI,QAA6B;AAC/B,aAAO,QAAQ,OAAO;AAAA,IACxB;AAAA,IAEA,MAAM,MAAM,OAA8B;AACxC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,+BAA+B,KAAK,EAAE;AAAA,MACxD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AAEA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,QACR,OAAO;AAAA,MACT,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,OAAO,OAA8B;AACzC,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,gCAAgC,KAAK,EAAE;AAAA,MACzD;AACA,UAAI,IAAI,WAAW,UAAU;AAC3B,cAAM,IAAI,MAAM,6BAA6B,KAAK,EAAE;AAAA,MACtD;AACA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,wCAAwC,KAAK,EAAE;AAAA,MACjE;AAEA,YAAM,QAAQ,UAAU,OAAO;AAAA,QAC7B,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAAA,IAEA,MAAM,UAAU,OAA8B;AAC5C,YAAM,MAAM,MAAM,QAAQ,OAAO,KAAK;AACtC,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AACA,UAAI,IAAI,WAAW,WAAW;AAC5B,cAAM,IAAI,MAAM,8BAA8B,KAAK,EAAE;AAAA,MACvD;AAEA,YAAM,QAAQ,UAAU,KAAK;AAAA,IAC/B;AAAA,IAEA,MAAM,UAAyB;AAE7B,UAAI,UAAU;AACZ;AAAA,MACF;AAGA,UAAI,WAAW;AACb,eAAO;AAAA,MACT;AAGA,kBAAY,cAAc,EAAE,EACzB,KAAK,MAAM;AACV,mBAAW;AAAA,MACb,CAAC,EACA,QAAQ,MAAM;AACb,oBAAY;AAAA,MACd,CAAC;AAEH,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;;;AQ7RO,SAAS,qBAAoC;AAClD,SAAO;AAAA,IACL,MAAM;AAAA,IACN,QAAQ,SAAS;AACf,cAAQ,GAAG,aAAa,OAAO,UAAU;AACvC,cAAM,QAAQ,QAAQ,UAAU;AAAA,UAC9B,OAAO,MAAM;AAAA,UACb,UAAU,MAAM;AAAA,UAChB,OAAO,MAAM;AAAA,UACb,SAAS,MAAM;AAAA,UACf,MAAM,MAAM;AAAA,QACd,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AAAA,EACF;AACF;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@coji/durably",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "description": "Step-oriented resumable batch execution for Node.js and browsers using SQLite",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -16,7 +16,8 @@
16
16
  }
17
17
  },
18
18
  "files": [
19
- "dist"
19
+ "dist",
20
+ "README.md"
20
21
  ],
21
22
  "keywords": [
22
23
  "batch",