veryfront 0.0.51 → 0.0.53

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/ai/workflow/types.ts", "../../../src/ai/workflow/dsl/workflow.ts", "../../../src/ai/workflow/dsl/step.ts", "../../../src/ai/workflow/dsl/parallel.ts", "../../../src/ai/workflow/dsl/map.ts", "../../../src/ai/workflow/dsl/sub-workflow.ts", "../../../src/ai/workflow/dsl/branch.ts", "../../../src/ai/workflow/dsl/wait.ts", "../../../src/platform/compat/path-helper.ts", "../../../src/core/errors/veryfront-error.ts", "../../../src/platform/compat/runtime.ts", "../../../src/platform/compat/fs.ts", "../../../src/core/utils/runtime-guards.ts", "../../../src/core/utils/logger/env.ts", "../../../src/core/utils/logger/logger.ts", "../../../src/core/utils/constants/cache.ts", "../../../deno.json", "../../../src/platform/compat/process.ts", "../../../src/core/utils/version.ts", "../../../src/core/utils/constants/http.ts", "../../../src/core/utils/constants/hmr.ts", "../../../src/core/utils/constants/network.ts", "../../../src/core/utils/constants/server.ts", "../../../src/core/utils/paths.ts", "../../../src/core/utils/bundle-manifest.ts", "../../../src/ai/workflow/blob/local-storage.ts", "../../../src/ai/workflow/blob/s3-storage.ts", "../../../src/ai/workflow/blob/gcs-storage.ts", "../../../src/ai/workflow/backends/types.ts", "../../../src/ai/workflow/backends/memory.ts", "../../../src/ai/workflow/backends/redis.ts", "../../../src/ai/workflow/executor/dag-executor.ts", "../../../src/ai/workflow/executor/checkpoint-manager.ts", "../../../src/ai/workflow/executor/step-executor.ts", "../../../src/ai/workflow/executor/workflow-executor.ts", "../../../src/ai/workflow/runtime/approval-manager.ts", "../../../src/ai/workflow/runtime/agent-registry.ts", "../../../src/ai/workflow/api/workflow-client.ts", "../../../src/ai/workflow/backends/temporal.ts", "../../../src/ai/workflow/backends/inngest.ts", "../../../src/ai/workflow/backends/cloudflare.ts", "../../../src/ai/workflow/react/use-workflow.ts", "../../../src/ai/workflow/react/use-approval.ts", "../../../src/ai/workflow/react/use-workflow-list.ts", "../../../src/ai/workflow/react/use-workflow-start.ts"],
4
- "sourcesContent": ["/**\n * Veryfront Workflow Types\n *\n * Core type definitions for durable, DAG-based agentic workflows\n */\n\nimport type { z } from \"zod\";\nimport type { Agent } from \"../types/agent.ts\";\nimport type { Tool } from \"../types/tool.ts\";\nimport type { BlobRef, BlobStorage } from \"./blob/types.ts\";\n\n// ============================================================================\n// Workflow Status\n// ============================================================================\n\n/**\n * Status of a workflow run\n */\nexport type WorkflowStatus =\n | \"pending\" // Created but not started\n | \"running\" // Currently executing\n | \"waiting\" // Paused, waiting for approval/event\n | \"completed\" // Successfully finished\n | \"failed\" // Failed with error\n | \"cancelled\"; // Cancelled by user\n\n/**\n * Status of a single node in the workflow\n */\nexport type NodeStatus =\n | \"pending\" // Not yet executed\n | \"running\" // Currently executing\n | \"completed\" // Successfully finished\n | \"failed\" // Failed with error\n | \"skipped\"; // Skipped due to condition\n\n// ============================================================================\n// Workflow Node Types\n// ============================================================================\n\n/**\n * Types of nodes in a workflow DAG\n */\nexport type WorkflowNodeType =\n | \"step\" // Single agent or tool execution\n | \"parallel\" // Parallel execution of multiple nodes\n | \"map\" // Dynamic fan-out/map-reduce\n | \"branch\" // Conditional branching\n | \"wait\" // Wait for approval or event\n | \"subWorkflow\"; // Nested workflow execution\n\n/**\n * Retry configuration for a step\n */\nexport interface RetryConfig {\n /** Maximum number of retry attempts */\n maxAttempts?: number;\n /** Backoff strategy */\n backoff?: \"fixed\" | \"linear\" | \"exponential\";\n /** Initial delay in milliseconds */\n initialDelay?: number;\n /** Maximum delay between retries */\n maxDelay?: number;\n}\n\n/**\n * Base configuration for all workflow nodes\n */\nexport interface BaseNodeConfig {\n /** Whether to checkpoint after this node */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for this node */\n timeout?: string | number;\n /** Condition to skip this node */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Step node configuration (agent or tool execution)\n */\nexport interface StepNodeConfig extends BaseNodeConfig {\n type: \"step\";\n /** Agent ID or agent instance to execute */\n agent?: string | Agent;\n /** Tool ID or tool instance to execute */\n tool?: string | Tool | undefined;\n /** Input for the agent/tool - can be static or computed from context */\n input?:\n | string\n | Record<string, unknown>\n | ((context: WorkflowContext) => unknown);\n}\n\n/**\n * Parallel node configuration (concurrent execution)\n */\nexport interface ParallelNodeConfig extends BaseNodeConfig {\n type: \"parallel\";\n /** Nodes to execute in parallel */\n nodes: WorkflowNode[];\n /** How to handle parallel completion */\n strategy?: \"all\" | \"race\" | \"allSettled\";\n}\n\n/**\n * Branch node configuration (conditional execution)\n */\nexport interface BranchNodeConfig extends BaseNodeConfig {\n type: \"branch\";\n /** Condition to evaluate */\n condition: (context: WorkflowContext) => boolean | Promise<boolean>;\n /** Nodes to execute if condition is true */\n then: WorkflowNode[];\n /** Nodes to execute if condition is false */\n else?: WorkflowNode[];\n}\n\n/**\n * Wait node configuration (approval or event)\n */\nexport interface WaitNodeConfig extends BaseNodeConfig {\n type: \"wait\";\n /** Type of wait */\n waitType: \"approval\" | \"event\";\n /** Message to display for approval */\n message?: string;\n /** Payload to include with approval request */\n payload?: unknown | ((context: WorkflowContext) => unknown);\n /** Allowed approvers (email or user IDs) */\n approvers?: string[];\n /** Event name to wait for (for event type) */\n eventName?: string;\n}\n\n/**\n * Sub-workflow node configuration\n */\nexport interface SubWorkflowNodeConfig extends BaseNodeConfig {\n type: \"subWorkflow\";\n /** Workflow ID or workflow definition to execute */\n workflow: string | WorkflowDefinition;\n /** Input for the sub-workflow */\n input?: unknown | ((context: WorkflowContext) => unknown);\n /** Transform the sub-workflow output */\n output?: (result: unknown) => unknown;\n}\n\n/**\n * Map node configuration (dynamic fan-out)\n */\nexport interface MapNodeConfig extends BaseNodeConfig {\n type: \"map\";\n /** Collection to iterate over (array) */\n items: unknown[] | ((context: WorkflowContext) => unknown[] | Promise<unknown[]>);\n /** Node or workflow to execute for each item */\n processor: WorkflowNode | WorkflowDefinition;\n /** Maximum concurrent executions */\n concurrency?: number;\n}\n\n/**\n * Union of all node configurations\n */\nexport type WorkflowNodeConfig =\n | StepNodeConfig\n | ParallelNodeConfig\n | MapNodeConfig\n | BranchNodeConfig\n | WaitNodeConfig\n | SubWorkflowNodeConfig;\n\n/**\n * A node in the workflow DAG\n */\nexport interface WorkflowNode {\n /** Unique node ID within the workflow */\n id: string;\n /** Node configuration */\n config: WorkflowNodeConfig;\n /** Dependencies (node IDs that must complete before this node) */\n dependsOn?: string[];\n}\n\n// ============================================================================\n// Workflow Definition\n// ============================================================================\n\n/**\n * Workflow context - accumulated data during execution\n */\nexport interface WorkflowContext {\n /** Input provided when workflow was started */\n input: unknown;\n /** Results from each completed node, keyed by node ID */\n [nodeId: string]: unknown;\n}\n\n/**\n * Helper to resolve BlobRefs into actual content.\n */\nexport interface BlobResolver {\n /** Get blob content as text. */\n getText(ref: BlobRef): Promise<string | null>;\n /** Get blob content as Uint8Array. */\n getBytes(ref: BlobRef): Promise<Uint8Array | null>;\n /** Get blob content as ReadableStream. */\n getStream(ref: BlobRef): Promise<ReadableStream | null>;\n /** Get blob metadata. */\n stat(ref: BlobRef): Promise<BlobRef | null>;\n /** Delete blob data. */\n delete(ref: BlobRef): Promise<void>;\n}\n\n/**\n * Step builder function context\n */\nexport interface StepBuilderContext<TInput = unknown> {\n /** Original workflow input */\n input: TInput;\n /** Accumulated context from previous steps */\n context: WorkflowContext;\n /** Blob storage access (if configured) */\n blobStorage?: BlobStorage;\n /** Helper to resolve BlobRefs to content */\n blob?: BlobResolver;\n}\n\n/**\n * Workflow definition\n */\nexport interface WorkflowDefinition<\n TInput = unknown,\n TOutput = unknown,\n> {\n /** Unique workflow identifier */\n id: string;\n /** Optional description */\n description?: string;\n /** Optional version */\n version?: string;\n /** Input validation schema */\n inputSchema?: z.ZodSchema<TInput>;\n /** Output validation schema */\n outputSchema?: z.ZodSchema<TOutput>;\n /** Default retry configuration for all steps */\n retry?: RetryConfig;\n /** Default timeout for the entire workflow */\n timeout?: string | number;\n /** Workflow steps - can be static or dynamic based on input */\n steps:\n | WorkflowNode[]\n | ((context: StepBuilderContext<TInput>) => WorkflowNode[]);\n /** Error handler */\n onError?: (error: Error, context: WorkflowContext) => void | Promise<void>;\n /** Completion handler */\n onComplete?: (\n result: TOutput,\n context: WorkflowContext,\n ) => void | Promise<void>;\n}\n\n// ============================================================================\n// Workflow Run State\n// ============================================================================\n\n/**\n * State of a single node during execution\n */\nexport interface NodeState {\n /** Node ID */\n nodeId: string;\n /** Current status */\n status: NodeStatus;\n /** Input provided to the node */\n input?: unknown;\n /** Output produced by the node */\n output?: unknown;\n /** Error message if failed */\n error?: string;\n /** Current attempt number (for retries) */\n attempt: number;\n /** When execution started */\n startedAt?: Date;\n /** When execution completed */\n completedAt?: Date;\n}\n\n/**\n * Checkpoint for workflow resume\n */\nexport interface Checkpoint {\n /** Unique checkpoint ID */\n id: string;\n /** Node ID where checkpoint was created */\n nodeId: string;\n /** When checkpoint was created */\n timestamp: Date;\n /** Context at checkpoint time */\n context: WorkflowContext;\n /** Node states at checkpoint time */\n nodeStates: Record<string, NodeState>;\n}\n\n/**\n * Pending approval request\n */\nexport interface PendingApproval {\n /** Unique approval ID */\n id: string;\n /** Node ID that requested approval */\n nodeId: string;\n /** Message for the approver */\n message: string;\n /** Payload with context for the approver */\n payload: unknown;\n /** Allowed approvers (if restricted) */\n approvers?: string[];\n /** When approval was requested */\n requestedAt: Date;\n /** When approval expires */\n expiresAt?: Date;\n /** Current approval status */\n status: \"pending\" | \"approved\" | \"rejected\" | \"expired\";\n /** Who approved/rejected */\n decidedBy?: string;\n /** When decision was made */\n decidedAt?: Date;\n /** Optional comment from approver */\n comment?: string;\n}\n\n/**\n * Workflow run - tracks execution of a workflow instance\n */\nexport interface WorkflowRun<TInput = unknown, TOutput = unknown> {\n /** Unique run ID */\n id: string;\n /** Workflow definition ID */\n workflowId: string;\n /** Workflow version */\n version?: string;\n /** Current status */\n status: WorkflowStatus;\n /** Input provided when started */\n input: TInput;\n /** Final output (when completed) */\n output?: TOutput;\n\n // Execution state\n /** State of each node in the workflow */\n nodeStates: Record<string, NodeState>;\n /** Currently executing node IDs */\n currentNodes: string[];\n /** Accumulated context */\n context: WorkflowContext;\n\n // Durability\n /** Checkpoints for resume */\n checkpoints: Checkpoint[];\n /** Pending approvals */\n pendingApprovals: PendingApproval[];\n\n // Error state\n /** Error information if failed */\n error?: {\n message: string;\n stack?: string;\n nodeId?: string;\n };\n\n // Timing\n /** When run was created */\n createdAt: Date;\n /** When execution started */\n startedAt?: Date;\n /** When execution completed */\n completedAt?: Date;\n}\n\n// ============================================================================\n// Approval Decision\n// ============================================================================\n\n/**\n * Decision on a pending approval\n */\nexport interface ApprovalDecision {\n /** Whether the approval was granted */\n approved: boolean;\n /** Who made the decision */\n approver: string;\n /** Optional comment */\n comment?: string;\n}\n\n// ============================================================================\n// Workflow Job (for queue-based execution)\n// ============================================================================\n\n/**\n * Job for queue-based workflow execution\n */\nexport interface WorkflowJob {\n /** Run ID */\n runId: string;\n /** Workflow ID */\n workflowId: string;\n /** Input data */\n input: unknown;\n /** Priority (higher = more urgent) */\n priority?: number;\n /** When job was created */\n createdAt: Date;\n}\n\n// ============================================================================\n// Run Filter (for querying runs)\n// ============================================================================\n\n/**\n * Filter options for listing workflow runs\n */\nexport interface RunFilter {\n /** Filter by workflow ID */\n workflowId?: string;\n /** Filter by status */\n status?: WorkflowStatus | WorkflowStatus[];\n /** Filter by creation date (after) */\n createdAfter?: Date;\n /** Filter by creation date (before) */\n createdBefore?: Date;\n /** Maximum number of results */\n limit?: number;\n /** Offset for pagination */\n offset?: number;\n}\n\n// ============================================================================\n// Duration parsing utility type\n// ============================================================================\n\n/**\n * Duration string format: \"1h\", \"30m\", \"2d\", etc.\n */\nexport type DurationString = string;\n\n/**\n * Parse duration string to milliseconds\n *\n * @throws Error if duration is invalid, zero, or negative\n */\nexport function parseDuration(duration: string | number): number {\n if (typeof duration === \"number\") {\n if (duration < 0) {\n throw new Error(`Duration cannot be negative: ${duration}`);\n }\n return duration;\n }\n\n const match = duration.match(/^(\\d+(?:\\.\\d+)?)\\s*(ms|s|m|h|d)$/);\n if (!match) {\n throw new Error(`Invalid duration format: ${duration}`);\n }\n\n const value = match[1];\n const unit = match[2];\n\n if (!value || !unit) {\n throw new Error(`Invalid duration format: ${duration}`);\n }\n\n const num = parseFloat(value);\n\n // Reject zero and negative values\n if (num <= 0) {\n throw new Error(`Duration must be positive: ${duration}`);\n }\n\n switch (unit) {\n case \"ms\":\n return num;\n case \"s\":\n return num * 1000;\n case \"m\":\n return num * 60 * 1000;\n case \"h\":\n return num * 60 * 60 * 1000;\n case \"d\":\n return num * 24 * 60 * 60 * 1000;\n default:\n throw new Error(`Unknown duration unit: ${unit}`);\n }\n}\n\n/**\n * Validate retry configuration\n *\n * @throws Error if retry config has invalid values\n */\nexport function validateRetryConfig(config: RetryConfig): void {\n if (config.maxAttempts !== undefined) {\n if (!Number.isInteger(config.maxAttempts) || config.maxAttempts < 1) {\n throw new Error(`maxAttempts must be a positive integer, got: ${config.maxAttempts}`);\n }\n }\n\n if (config.initialDelay !== undefined) {\n if (config.initialDelay < 0) {\n throw new Error(`initialDelay cannot be negative: ${config.initialDelay}`);\n }\n }\n\n if (config.maxDelay !== undefined) {\n if (config.maxDelay < 0) {\n throw new Error(`maxDelay cannot be negative: ${config.maxDelay}`);\n }\n }\n\n if (config.initialDelay !== undefined && config.maxDelay !== undefined) {\n if (config.initialDelay > config.maxDelay) {\n throw new Error(\n `initialDelay (${config.initialDelay}) cannot be greater than maxDelay (${config.maxDelay})`,\n );\n }\n }\n\n if (config.backoff !== undefined) {\n const validBackoffs = [\"fixed\", \"linear\", \"exponential\"];\n if (!validBackoffs.includes(config.backoff)) {\n throw new Error(\n `Invalid backoff strategy: ${config.backoff}. Must be one of: ${validBackoffs.join(\", \")}`,\n );\n }\n }\n}\n\n/**\n * Generate a unique ID for workflow runs, nodes, etc.\n */\nexport function generateId(prefix: string = \"wf\"): string {\n const randomPart = crypto.randomUUID().slice(0, 12);\n return `${prefix}_${randomPart}`;\n}\n", "/**\n * Workflow DSL Builder\n *\n * Main factory function for creating durable workflows\n */\n\nimport type { z } from \"zod\";\nimport type {\n RetryConfig,\n StepBuilderContext,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a workflow\n */\nexport interface WorkflowOptions<TInput = unknown, TOutput = unknown> {\n /** Unique workflow identifier */\n id: string;\n /** Optional description */\n description?: string;\n /** Optional version */\n version?: string;\n /** Input validation schema (Zod) */\n inputSchema?: z.ZodSchema<TInput>;\n /** Output validation schema (Zod) */\n outputSchema?: z.ZodSchema<TOutput>;\n /** Default retry configuration for all steps */\n retry?: RetryConfig;\n /** Default timeout for the entire workflow */\n timeout?: string | number;\n /**\n * Workflow steps - can be:\n * - An array of WorkflowNode\n * - A function that returns an array based on input\n */\n steps:\n | WorkflowNode[]\n | ((context: StepBuilderContext<TInput>) => WorkflowNode[]);\n /** Error handler called when workflow fails */\n onError?: (error: Error, context: WorkflowContext) => void | Promise<void>;\n /** Completion handler called when workflow succeeds */\n onComplete?: (\n result: TOutput,\n context: WorkflowContext,\n ) => void | Promise<void>;\n}\n\n/**\n * Created workflow with execution methods\n */\nexport interface Workflow<TInput = unknown, TOutput = unknown> {\n /** Workflow definition */\n definition: WorkflowDefinition<TInput, TOutput>;\n /** Workflow ID */\n id: string;\n /** Workflow version */\n version?: string;\n}\n\n/**\n * Create a durable workflow definition\n *\n * @example\n * ```typescript\n * import { workflow, step, parallel, branch, waitForApproval } from 'veryfront/ai/workflow';\n * import { z } from 'zod';\n *\n * export default workflow({\n * id: 'content-pipeline',\n * description: 'Generate and publish content with human review',\n *\n * inputSchema: z.object({\n * topic: z.string(),\n * requiresApproval: z.boolean().default(true),\n * }),\n *\n * timeout: '2h',\n *\n * steps: ({ input }) => [\n * // Research phase\n * step('research', {\n * agent: 'researcher',\n * input: `Research: ${input.topic}`,\n * }),\n *\n * // Generate content in parallel\n * parallel('generate', [\n * step('write-article', { agent: 'writer' }),\n * step('create-images', { tool: 'imageGenerator' }),\n * ]),\n *\n * // Optional approval gate\n * branch('approval-gate', {\n * condition: () => input.requiresApproval,\n * then: [\n * waitForApproval('human-review', {\n * timeout: '24h',\n * message: 'Please review the content',\n * }),\n * ],\n * }),\n *\n * // Publish\n * step('publish', { agent: 'publisher' }),\n * ],\n *\n * onComplete: async (result, context) => {\n * console.log('Workflow completed:', result);\n * },\n *\n * onError: async (error, context) => {\n * console.error('Workflow failed:', error);\n * },\n * });\n * ```\n */\nexport function workflow<TInput = unknown, TOutput = unknown>(\n options: WorkflowOptions<TInput, TOutput>,\n): Workflow<TInput, TOutput> {\n // Validate required fields\n if (!options.id) {\n throw new Error(\"Workflow must have an 'id'\");\n }\n\n if (!options.steps) {\n throw new Error(`Workflow \"${options.id}\" must have 'steps'`);\n }\n\n // Create the workflow definition\n const definition: WorkflowDefinition<TInput, TOutput> = {\n id: options.id,\n description: options.description,\n version: options.version,\n inputSchema: options.inputSchema,\n outputSchema: options.outputSchema,\n retry: options.retry,\n timeout: options.timeout,\n steps: options.steps,\n onError: options.onError,\n onComplete: options.onComplete,\n };\n\n return {\n definition,\n id: options.id,\n version: options.version,\n };\n}\n\n/**\n * Helper to build linear dependencies between nodes\n *\n * Takes an array of nodes and returns them with dependsOn set\n * so each node depends on the previous one.\n */\nexport function sequence(...nodes: WorkflowNode[]): WorkflowNode[] {\n return nodes.map((node, index) => {\n if (index === 0) {\n return node;\n }\n const prevNode = nodes[index - 1];\n return {\n ...node,\n dependsOn: prevNode ? [prevNode.id] : undefined,\n };\n });\n}\n\n/**\n * Create a DAG-based workflow with explicit dependencies\n *\n * @example\n * ```typescript\n * import { dag, workflow } from 'veryfront/ai/workflow';\n *\n * export default workflow({\n * id: 'data-pipeline',\n * steps: dag({\n * 'fetch': step('fetch', { tool: 'dataFetcher' }),\n * 'validate': step('validate', { agent: 'validator' }).dependsOn('fetch'),\n * 'transform-a': step('transform-a', { tool: 'transformerA' }).dependsOn('validate'),\n * 'transform-b': step('transform-b', { tool: 'transformerB' }).dependsOn('validate'),\n * 'aggregate': step('aggregate', { agent: 'aggregator' }).dependsOn('transform-a', 'transform-b'),\n * }),\n * });\n * ```\n */\nexport function dag(\n nodes: Record<string, WorkflowNode | { node: WorkflowNode; dependsOn: string[] }>,\n): WorkflowNode[] {\n const result: WorkflowNode[] = [];\n const seenIds = new Set<string>();\n\n for (const [id, value] of Object.entries(nodes)) {\n let nodeId: string;\n let node: WorkflowNode;\n\n if (\"node\" in value && \"dependsOn\" in value) {\n // Object with explicit dependencies\n nodeId = value.node.id || id;\n node = {\n ...value.node,\n id: nodeId,\n dependsOn: value.dependsOn,\n };\n } else {\n // Plain WorkflowNode\n const workflowNode = value as WorkflowNode;\n nodeId = workflowNode.id || id;\n node = {\n ...workflowNode,\n id: nodeId,\n };\n }\n\n // Check for duplicate IDs\n if (seenIds.has(nodeId)) {\n throw new Error(`Duplicate node ID detected in dag: \"${nodeId}\"`);\n }\n seenIds.add(nodeId);\n\n result.push(node);\n }\n\n return result;\n}\n\n/**\n * Helper to add dependencies to a node\n */\nexport function dependsOn(\n node: WorkflowNode,\n ...dependencies: string[]\n): WorkflowNode {\n return {\n ...node,\n dependsOn: [...(node.dependsOn || []), ...dependencies],\n };\n}\n", "/**\n * Step DSL Builder\n *\n * Creates step nodes for agent or tool execution\n */\n\nimport type { Agent } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type {\n BaseNodeConfig,\n RetryConfig,\n StepNodeConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a step node\n */\nexport interface StepOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Agent ID or agent instance to execute */\n agent?: string | Agent;\n /** Tool ID or tool instance to execute */\n tool?: string | Tool | undefined;\n /** Input for the agent/tool */\n input?:\n | string\n | Record<string, unknown>\n | ((context: WorkflowContext) => unknown);\n /** Whether to checkpoint after this step (default: true for agents) */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for this step */\n timeout?: string | number;\n /** Condition to skip this step */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a step node for agent or tool execution\n *\n * @example\n * ```typescript\n * // Agent step\n * step('research', {\n * agent: 'researcher',\n * input: 'Research AI safety',\n * checkpoint: true,\n * })\n *\n * // Tool step\n * step('fetch-data', {\n * tool: 'dataFetcher',\n * input: { url: 'https://api.example.com/data' },\n * })\n *\n * // Dynamic input from context\n * step('write', {\n * agent: 'writer',\n * input: (ctx) => ctx['research'].output,\n * })\n * ```\n */\nexport function step(id: string, options: StepOptions): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n // Validate that either agent or tool is specified\n if (!options.agent && !options.tool) {\n throw new Error(`Step \"${id}\" must specify either 'agent' or 'tool'`);\n }\n\n if (options.agent && options.tool) {\n throw new Error(`Step \"${id}\" cannot specify both 'agent' and 'tool'`);\n }\n\n // Default checkpoint to true for agent steps\n const shouldCheckpoint = options.checkpoint ?? !!options.agent;\n\n const config: StepNodeConfig = {\n type: \"step\",\n agent: options.agent,\n tool: options.tool,\n input: options.input,\n checkpoint: shouldCheckpoint,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a step that executes an agent\n * Convenience wrapper around step()\n */\nexport function agentStep(\n id: string,\n agent: string | Agent,\n options?: Omit<StepOptions, \"agent\" | \"tool\">,\n): WorkflowNode {\n return step(id, { ...options, agent });\n}\n\n/**\n * Create a step that executes a tool\n * Convenience wrapper around step()\n */\nexport function toolStep(\n id: string,\n tool: string | Tool,\n options?: Omit<StepOptions, \"agent\" | \"tool\">,\n): WorkflowNode {\n return step(id, { ...options, tool });\n}\n", "/**\n * Parallel DSL Builder\n *\n * Creates parallel nodes for concurrent execution\n */\n\nimport type {\n BaseNodeConfig,\n ParallelNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a parallel node\n */\nexport interface ParallelOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** How to handle parallel completion */\n strategy?: \"all\" | \"race\" | \"allSettled\";\n /** Whether to checkpoint after all parallel steps complete */\n checkpoint?: boolean;\n /** Retry configuration for the parallel group */\n retry?: RetryConfig;\n /** Timeout for all parallel steps */\n timeout?: string | number;\n /** Condition to skip this parallel group */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a parallel node for concurrent execution of multiple steps\n *\n * @example\n * ```typescript\n * // Execute multiple agents in parallel\n * parallel('analyze', [\n * step('security-scan', { agent: 'securityAgent' }),\n * step('code-quality', { agent: 'codeReviewAgent' }),\n * step('test-coverage', { tool: 'coverageAnalyzer' }),\n * ])\n *\n * // Race condition - first to complete wins\n * parallel('fast-response', [\n * step('gpt4', { agent: 'gpt4Agent' }),\n * step('claude', { agent: 'claudeAgent' }),\n * ], { strategy: 'race' })\n *\n * // Continue even if some fail\n * parallel('optional-checks', [\n * step('lint', { tool: 'linter' }),\n * step('typecheck', { tool: 'typechecker' }),\n * ], { strategy: 'allSettled' })\n * ```\n */\nexport function parallel(\n id: string,\n nodes: WorkflowNode[],\n options: ParallelOptions = {},\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!nodes || nodes.length === 0) {\n throw new Error(`Parallel node \"${id}\" must have at least one child node`);\n }\n\n // Generate unique IDs for child nodes if they're nested under this parallel\n // Also validate child node IDs\n const prefixedNodes = nodes.map((node, index) => {\n if (!node.id || typeof node.id !== \"string\") {\n throw new Error(`Child node at index ${index} in parallel \"${id}\" has invalid ID`);\n }\n return {\n ...node,\n id: node.id.startsWith(`${id}/`) ? node.id : `${id}/${node.id}`,\n };\n });\n\n const config: ParallelNodeConfig = {\n type: \"parallel\",\n nodes: prefixedNodes,\n strategy: options.strategy ?? \"all\",\n checkpoint: options.checkpoint ?? true,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * Map DSL Builder\n *\n * Creates map nodes for dynamic fan-out execution\n */\n\nimport type {\n BaseNodeConfig,\n MapNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a map node\n */\nexport interface MapOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Items to iterate over */\n items: unknown[] | ((context: WorkflowContext) => unknown[] | Promise<unknown[]>);\n /** Node or workflow to execute for each item */\n processor: WorkflowNode | WorkflowDefinition;\n /** Maximum concurrent executions */\n concurrency?: number;\n /** Whether to checkpoint after all items complete */\n checkpoint?: boolean;\n /** Retry configuration for the map group */\n retry?: RetryConfig;\n /** Timeout for all map items */\n timeout?: string | number;\n /** Condition to skip this map group */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a map node for dynamic fan-out execution\n *\n * @example\n * ```typescript\n * // Process a list of URLs dynamically\n * map('process-urls', {\n * items: (ctx) => ctx.input.urls,\n * processor: step('scrape', { tool: 'webScraper' }),\n * concurrency: 5\n * })\n * ```\n */\nexport function map(\n id: string,\n options: MapOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.items) {\n throw new Error(`Map node \"${id}\" must have 'items' configured`);\n }\n\n if (!options.processor) {\n throw new Error(`Map node \"${id}\" must have a 'processor' configured`);\n }\n\n const config: MapNodeConfig = {\n type: \"map\",\n items: options.items,\n processor: options.processor,\n concurrency: options.concurrency,\n checkpoint: options.checkpoint ?? true,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * SubWorkflow DSL Builder\n *\n * Creates sub-workflow nodes for nested workflow execution\n */\n\nimport type {\n BaseNodeConfig,\n SubWorkflowNodeConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a sub-workflow node\n */\nexport interface SubWorkflowOptions extends BaseNodeConfig {\n /** The workflow definition to execute */\n workflow: WorkflowDefinition;\n /** Input for the sub-workflow */\n input?: unknown | ((context: WorkflowContext) => unknown);\n /** Transform the sub-workflow output */\n output?: (result: unknown) => unknown;\n}\n\n/**\n * Create a sub-workflow node for nested execution\n *\n * @example\n * ```typescript\n * import mySubWorkflow from './my-sub-workflow';\n *\n * // Execute a sub-workflow\n * subWorkflow('nested-process', {\n * workflow: mySubWorkflow.definition,\n * input: (ctx) => ({ data: ctx.prevStep.result })\n * })\n * ```\n */\nexport function subWorkflow(\n id: string,\n options: SubWorkflowOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.workflow) {\n throw new Error(`SubWorkflow node \"${id}\" must have a 'workflow' configured`);\n }\n\n const config: SubWorkflowNodeConfig = {\n type: \"subWorkflow\",\n workflow: options.workflow,\n input: options.input,\n output: options.output,\n checkpoint: options.checkpoint,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * Branch DSL Builder\n *\n * Creates conditional branch nodes for workflow control flow\n */\n\nimport type {\n BaseNodeConfig,\n BranchNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a branch node\n */\nexport interface BranchOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Condition to evaluate */\n condition: (context: WorkflowContext) => boolean | Promise<boolean>;\n /** Nodes to execute if condition is true */\n then: WorkflowNode[];\n /** Nodes to execute if condition is false (optional) */\n else?: WorkflowNode[];\n /** Whether to checkpoint after branching */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for the entire branch */\n timeout?: string | number;\n /** Condition to skip the entire branch */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a conditional branch node\n *\n * @example\n * ```typescript\n * // Simple if-then branch\n * branch('approval-gate', {\n * condition: (ctx) => ctx.input.requiresApproval,\n * then: [\n * waitForApproval('human-review', { timeout: '24h' }),\n * ],\n * })\n *\n * // If-then-else branch\n * branch('quality-check', {\n * condition: async (ctx) => {\n * const score = ctx['analyze'].output.score;\n * return score >= 0.8;\n * },\n * then: [\n * step('publish', { agent: 'publisher' }),\n * ],\n * else: [\n * step('revise', { agent: 'editor' }),\n * step('reanalyze', { agent: 'analyzer' }),\n * ],\n * })\n * ```\n */\nexport function branch(id: string, options: BranchOptions): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.condition) {\n throw new Error(`Branch \"${id}\" must specify a condition`);\n }\n\n if (!options.then || options.then.length === 0) {\n throw new Error(`Branch \"${id}\" must have at least one 'then' node`);\n }\n\n // Prefix child node IDs for proper namespacing\n const prefixThenNodes = options.then.map((node) => ({\n ...node,\n id: node.id.startsWith(`${id}/then/`) ? node.id : `${id}/then/${node.id}`,\n }));\n\n const prefixElseNodes = options.else?.map((node) => ({\n ...node,\n id: node.id.startsWith(`${id}/else/`) ? node.id : `${id}/else/${node.id}`,\n }));\n\n const config: BranchNodeConfig = {\n type: \"branch\",\n condition: options.condition,\n then: prefixThenNodes,\n else: prefixElseNodes,\n checkpoint: options.checkpoint ?? false,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a branch that only executes if condition is true (no else)\n * Convenience wrapper around branch()\n */\nexport function when(\n id: string,\n condition: (context: WorkflowContext) => boolean | Promise<boolean>,\n nodes: WorkflowNode[],\n): WorkflowNode {\n return branch(id, { condition, then: nodes });\n}\n\n/**\n * Create a branch that only executes if condition is false\n * Convenience wrapper around branch()\n */\nexport function unless(\n id: string,\n condition: (context: WorkflowContext) => boolean | Promise<boolean>,\n nodes: WorkflowNode[],\n): WorkflowNode {\n return branch(id, {\n condition: async (ctx) => !(await condition(ctx)),\n then: nodes,\n });\n}\n", "/**\n * Wait DSL Builder\n *\n * Creates wait nodes for approvals and external events\n */\n\nimport type {\n BaseNodeConfig,\n RetryConfig,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a wait-for-approval node\n */\nexport interface WaitForApprovalOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Message to display to the approver */\n message?: string;\n /** Payload to include with the approval request */\n payload?: unknown | ((context: WorkflowContext) => unknown);\n /** Timeout for the approval (e.g., \"24h\", \"7d\") */\n timeout?: string | number;\n /** Restrict approval to specific users */\n approvers?: string[];\n /** Retry configuration (for timeout/retry scenarios) */\n retry?: RetryConfig;\n /** Condition to skip this approval */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a wait-for-approval node\n *\n * This pauses the workflow until a human approves or rejects.\n * The workflow can be resumed via the approval API.\n *\n * @example\n * ```typescript\n * // Basic approval\n * waitForApproval('content-review', {\n * message: 'Please review the generated content',\n * timeout: '24h',\n * })\n *\n * // Approval with payload for context\n * waitForApproval('deployment-approval', {\n * message: 'Approve deployment to production?',\n * payload: (ctx) => ({\n * changes: ctx['summarize'].output,\n * riskLevel: ctx['analyze'].output.riskLevel,\n * }),\n * approvers: ['ops@company.com', 'lead@company.com'],\n * timeout: '48h',\n * })\n * ```\n */\nexport function waitForApproval(\n id: string,\n options: WaitForApprovalOptions = {},\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"approval\",\n message: options.message ?? \"Approval required\",\n payload: options.payload,\n approvers: options.approvers,\n timeout: options.timeout,\n // Always checkpoint before waiting\n checkpoint: true,\n retry: options.retry,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Options for creating a wait-for-event node\n */\nexport interface WaitForEventOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Event name to wait for */\n eventName: string;\n /** Timeout for the event (e.g., \"1h\", \"7d\") */\n timeout?: string | number;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Condition to skip this wait */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a wait-for-event node\n *\n * This pauses the workflow until an external event is received.\n * Events can be sent via the workflow event API.\n *\n * @example\n * ```typescript\n * // Wait for external webhook\n * waitForEvent('payment-confirmation', {\n * eventName: 'payment.completed',\n * timeout: '30m',\n * })\n *\n * // Wait for manual trigger\n * waitForEvent('manual-continue', {\n * eventName: 'workflow.continue',\n * })\n * ```\n */\nexport function waitForEvent(\n id: string,\n options: WaitForEventOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.eventName) {\n throw new Error(`waitForEvent \"${id}\" must specify an eventName`);\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"event\",\n eventName: options.eventName,\n timeout: options.timeout,\n // Always checkpoint before waiting\n checkpoint: true,\n retry: options.retry,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a simple delay/sleep node\n *\n * @example\n * ```typescript\n * // Wait for 5 minutes between steps\n * delay('cooldown', '5m')\n * ```\n */\nexport function delay(id: string, duration: string | number): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"event\",\n eventName: \"__delay__\",\n timeout: duration,\n checkpoint: false, // No need to checkpoint for simple delays\n };\n\n return {\n id,\n config,\n };\n}\n", "// Conditional imports for path module\nimport nodePath from \"node:path\";\nimport type { PlatformPath } from \"node:path\";\n\n// Use node:path for Node.js or import Deno's std/path for Deno\nlet pathMod: PlatformPath | null = null;\n\n// Initialize path module synchronously for Node.js\n// @ts-ignore - Deno global\nif (typeof Deno === \"undefined\") {\n pathMod = nodePath;\n} else {\n // Deno environment - start loading asynchronously but don't await\n // @ts-ignore - Deno global\n import(\"std/path/mod.ts\").then((mod) => {\n pathMod = mod as unknown as PlatformPath;\n });\n}\n\n// Helper to get path module, ensuring it's loaded\nfunction getPathMod(): PlatformPath {\n if (pathMod) return pathMod;\n // In Deno, if pathMod is not yet loaded, use Node.js path as temporary fallback\n // This should rarely happen as the import is fast\n return nodePath;\n}\n\n// Re-export common path functions with proper types\nexport const basename = (path: string, suffix?: string): string =>\n getPathMod().basename(path, suffix);\nexport const dirname = (path: string): string => getPathMod().dirname(path);\nexport const fromFileUrl = (url: string | URL): string => {\n const mod = getPathMod();\n // @ts-ignore - Deno path module has fromFileUrl\n if (mod && typeof (mod as any).fromFileUrl === \"function\") {\n // @ts-ignore - Deno path module has fromFileUrl\n return (mod as any).fromFileUrl(url);\n }\n // Fallback for Node.js where fromFileUrl might not be directly available\n // This uses URL parsing which is generally cross-platform\n const urlObj = typeof url === \"string\" ? new URL(url) : url;\n return urlObj.pathname;\n};\nexport const join = (...paths: string[]): string => getPathMod().join(...paths);\nexport const relative = (from: string, to: string): string => getPathMod().relative(from, to);\nexport const resolve = (...paths: string[]): string => getPathMod().resolve(...paths);\nexport const extname = (path: string): string => getPathMod().extname(path);\nexport const isAbsolute = (path: string): boolean => getPathMod().isAbsolute(path);\n// Export sep - uses getter function to ensure pathMod is resolved\nexport const sep: string = nodePath.sep;\n", "export interface BuildContext {\n file?: string;\n line?: number;\n column?: number;\n moduleId?: string;\n phase?: \"parse\" | \"transform\" | \"bundle\" | \"optimize\";\n}\n\nexport interface APIContext {\n endpoint?: string;\n method?: string;\n statusCode?: number;\n headers?: Record<string, string>;\n}\n\nexport interface RenderContext {\n component?: string;\n route?: string;\n phase?: \"server\" | \"client\" | \"hydration\";\n props?: unknown;\n}\n\nexport interface ConfigContext {\n configFile?: string;\n field?: string;\n value?: unknown;\n expected?: string;\n}\n\nexport interface AgentContext {\n agentId?: string;\n intent?: string;\n timeout?: number;\n}\n\nexport interface FileContext {\n path?: string;\n operation?: \"read\" | \"write\" | \"delete\" | \"mkdir\";\n permissions?: string;\n}\n\nexport interface NetworkContext {\n url?: string;\n timeout?: number;\n retryCount?: number;\n}\n\nexport type VeryfrontError =\n | { type: \"build\"; message: string; context?: BuildContext }\n | { type: \"api\"; message: string; context?: APIContext }\n | { type: \"render\"; message: string; context?: RenderContext }\n | { type: \"config\"; message: string; context?: ConfigContext }\n | { type: \"agent\"; message: string; context?: AgentContext }\n | { type: \"file\"; message: string; context?: FileContext }\n | { type: \"network\"; message: string; context?: NetworkContext }\n | { type: \"permission\"; message: string; context?: FileContext }\n | { type: \"not_supported\"; message: string; feature?: string };\n\nexport function createError(error: VeryfrontError): VeryfrontError {\n return error;\n}\n\nexport function isBuildError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"build\" }> {\n return error.type === \"build\";\n}\n\nexport function isAPIError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"api\" }> {\n return error.type === \"api\";\n}\n\nexport function isRenderError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"render\" }> {\n return error.type === \"render\";\n}\n\nexport function isConfigError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"config\" }> {\n return error.type === \"config\";\n}\n\nexport function isAgentError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"agent\" }> {\n return error.type === \"agent\";\n}\n\nexport function isFileError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"file\" }> {\n return error.type === \"file\";\n}\n\nexport function isNetworkError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"network\" }> {\n return error.type === \"network\";\n}\n\nexport function toError(veryfrontError: VeryfrontError): Error {\n const error = new Error(veryfrontError.message);\n error.name = `VeryfrontError[${veryfrontError.type}]`;\n Object.defineProperty(error, \"context\", {\n value: veryfrontError,\n enumerable: false,\n configurable: true,\n });\n return error;\n}\n\nexport function fromError(error: unknown): VeryfrontError | null {\n if (error && typeof error === \"object\" && \"context\" in error) {\n // Safe access after 'in' check\n const context = (error as Record<string, unknown>).context;\n if (\n context &&\n typeof context === \"object\" &&\n \"type\" in context &&\n \"message\" in context\n ) {\n return context as VeryfrontError;\n }\n }\n return null;\n}\n\nexport function logError(\n error: VeryfrontError,\n logger?: { error: (msg: string, ...args: unknown[]) => void },\n): void {\n const log = logger || console;\n const context = \"context\" in error ? error.context || {} : {};\n log.error(`[${error.type}] ${error.message}`, context);\n}\n", "export const isDeno = typeof Deno !== \"undefined\";\nexport const isNode =\n typeof (globalThis as { process?: { versions?: { node?: string } } }).process !== \"undefined\" &&\n (globalThis as { process?: { versions?: { node?: string } } }).process?.versions?.node !==\n undefined;\nexport const isBun = typeof (globalThis as { Bun?: unknown }).Bun !== \"undefined\";\nexport const isCloudflare = typeof globalThis !== \"undefined\" && \"caches\" in globalThis &&\n \"WebSocketPair\" in globalThis;\n\n/**\n * Detect if running in Node.js (vs Deno)\n * Use this function instead of the constant when runtime detection needs to happen\n * at call time (e.g., when bundled with esbuild's __esm lazy initialization pattern)\n */\nexport function isNodeRuntime(): boolean {\n // deno-lint-ignore no-explicit-any\n const _global = globalThis as any;\n return typeof Deno === \"undefined\" && typeof _global.process !== \"undefined\" &&\n !!_global.process?.versions?.node;\n}\n", "/**\n * Cross-platform filesystem abstraction for CLI commands and standalone utilities.\n *\n * This module provides a synchronous-style API for filesystem operations that works\n * across Deno, Node.js, and Bun runtimes. It's designed for CLI commands and scripts\n * where you don't have access to a RuntimeAdapter context.\n *\n * For server/rendering contexts where you have an adapter, prefer using adapter.fs directly:\n * ```ts\n * const adapter = await getAdapter();\n * const content = await adapter.fs.readFile(path);\n * ```\n *\n * For CLI commands and standalone utilities, use createFileSystem():\n * ```ts\n * import { createFileSystem } from \"@veryfront/platform/compat/fs.ts\";\n * const fs = createFileSystem();\n * const content = await fs.readTextFile(path);\n * ```\n *\n * @module\n */\n\nimport type { FileInfo } from \"@veryfront/platform/adapters/base.ts\";\nimport { createError, toError } from \"../../core/errors/veryfront-error.ts\";\nimport { isDeno, isNode } from \"./runtime.ts\";\n\n/**\n * Cross-platform filesystem interface for CLI commands and standalone utilities.\n * Compatible with RuntimeAdapter.fs (FileSystemAdapter) for easy interoperability.\n */\nexport interface FileSystem {\n readTextFile(path: string): Promise<string>;\n readFile(path: string): Promise<Uint8Array>; // Changed to Uint8Array for binary\n writeTextFile(path: string, data: string): Promise<void>;\n writeFile(path: string, data: Uint8Array): Promise<void>; // Changed to Uint8Array for binary\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<FileInfo>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n readDir(path: string): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }>;\n remove(path: string, options?: { recursive?: boolean }): Promise<void>;\n makeTempDir(options?: { prefix?: string }): Promise<string>; // New for temp dirs\n}\n\n// ============================================================================\n// Node.js Implementation\n// ============================================================================\n\ninterface NodeFsPromises {\n readFile(\n path: string,\n options?: { encoding?: string; flag?: string } | string,\n ): Promise<string | Uint8Array>;\n writeFile(\n path: string,\n data: string | Uint8Array,\n options?: { encoding?: string; flag?: string } | string,\n ): Promise<void>;\n access(path: string, mode?: number): Promise<void>;\n stat(path: string): Promise<{\n isFile(): boolean;\n isDirectory(): boolean;\n isSymbolicLink(): boolean;\n size: number;\n mtime: Date;\n }>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n readdir(path: string, options: { withFileTypes: true }): Promise<\n Array<{\n name: string;\n isFile(): boolean;\n isDirectory(): boolean;\n isSymbolicLink(): boolean;\n }>\n >;\n rm(path: string, options?: { recursive?: boolean; force?: boolean }): Promise<void>;\n}\n\nclass NodeFileSystem implements FileSystem {\n private fs: NodeFsPromises | null = null;\n private os: typeof import(\"node:os\") | null = null;\n private path: typeof import(\"node:path\") | null = null;\n private initialized = false;\n\n private async ensureInitialized(): Promise<void> {\n if (this.initialized) return;\n\n if (!isNode) {\n throw toError(createError({\n type: \"not_supported\",\n message: \"Node.js fs modules not available\",\n feature: \"Node.js\",\n }));\n }\n\n // Use dynamic ESM imports for Node.js modules\n const [fsModule, osModule, pathModule] = await Promise.all([\n import(\"node:fs/promises\"),\n import(\"node:os\"),\n import(\"node:path\"),\n ]);\n\n this.fs = fsModule as unknown as NodeFsPromises;\n this.os = osModule;\n this.path = pathModule;\n this.initialized = true;\n }\n\n async readTextFile(path: string): Promise<string> {\n await this.ensureInitialized();\n return await (this.fs!.readFile(path, { encoding: \"utf8\" }) as Promise<string>);\n }\n\n async readFile(path: string): Promise<Uint8Array> {\n await this.ensureInitialized();\n return await (this.fs!.readFile(path) as Promise<Uint8Array>);\n }\n\n async writeTextFile(path: string, data: string): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.writeFile(path, data, { encoding: \"utf8\" });\n }\n\n async writeFile(path: string, data: Uint8Array): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.writeFile(path, data);\n }\n\n async exists(path: string): Promise<boolean> {\n await this.ensureInitialized();\n try {\n await this.fs!.access(path);\n return true;\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n return false;\n }\n throw error;\n }\n }\n\n async stat(path: string): Promise<FileInfo> {\n await this.ensureInitialized();\n const stat = await this.fs!.stat(path);\n return {\n isFile: stat.isFile(),\n isDirectory: stat.isDirectory(),\n isSymlink: stat.isSymbolicLink(),\n size: stat.size,\n mtime: stat.mtime,\n };\n }\n\n async mkdir(path: string, options?: { recursive?: boolean }): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.mkdir(path, { recursive: options?.recursive ?? false });\n }\n\n async *readDir(\n path: string,\n ): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }> {\n await this.ensureInitialized();\n const entries = await this.fs!.readdir(path, { withFileTypes: true });\n for (const entry of entries) {\n yield {\n name: entry.name,\n isFile: entry.isFile(),\n isDirectory: entry.isDirectory(),\n };\n }\n }\n\n async remove(path: string, options?: { recursive?: boolean }): Promise<void> {\n await this.ensureInitialized();\n // Node.js fs.rm requires force for recursive deletion of non-empty directories\n await this.fs!.rm(path, {\n recursive: options?.recursive ?? false,\n force: options?.recursive ?? false,\n });\n }\n\n async makeTempDir(options?: { prefix?: string }): Promise<string> {\n await this.ensureInitialized();\n const tempDir = this.path!.join(\n this.os!.tmpdir(),\n `${options?.prefix ?? \"tmp-\"}${Math.random().toString(36).substring(2, 8)}`,\n );\n await this.fs!.mkdir(tempDir, { recursive: true });\n return tempDir;\n }\n}\n\n// ============================================================================\n// Deno Implementation\n// ============================================================================\n\nclass DenoFileSystem implements FileSystem {\n async readTextFile(path: string): Promise<string> {\n // @ts-ignore - Deno global\n return await Deno.readTextFile(path);\n }\n\n async readFile(path: string): Promise<Uint8Array> {\n // @ts-ignore - Deno global\n return await Deno.readFile(path);\n }\n\n async writeTextFile(path: string, data: string): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.writeTextFile(path, data);\n }\n\n async writeFile(path: string, data: Uint8Array): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.writeFile(path, data);\n }\n\n async exists(path: string): Promise<boolean> {\n try {\n // @ts-ignore - Deno global\n await Deno.stat(path);\n return true;\n } catch (error: any) {\n // @ts-ignore - Deno global\n if (error instanceof Deno.errors.NotFound) {\n return false;\n }\n throw error;\n }\n }\n\n async stat(path: string): Promise<FileInfo> {\n // @ts-ignore - Deno global\n const stat = await Deno.stat(path);\n return {\n isFile: stat.isFile,\n isDirectory: stat.isDirectory,\n isSymlink: stat.isSymlink,\n size: stat.size,\n mtime: stat.mtime,\n };\n }\n\n async mkdir(path: string, options?: { recursive?: boolean }): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.mkdir(path, { recursive: options?.recursive ?? false });\n }\n\n async *readDir(\n path: string,\n ): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }> {\n // @ts-ignore - Deno global\n for await (const entry of Deno.readDir(path)) {\n yield {\n name: entry.name,\n isFile: entry.isFile,\n isDirectory: entry.isDirectory,\n };\n }\n }\n\n async remove(path: string, options?: { recursive?: boolean }): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.remove(path, { recursive: options?.recursive ?? false });\n }\n\n async makeTempDir(options?: { prefix?: string }): Promise<string> {\n // @ts-ignore - Deno global\n return await Deno.makeTempDir({ prefix: options?.prefix });\n }\n}\n\n/**\n * Create a cross-platform filesystem instance for CLI commands and standalone utilities.\n *\n * Use this for CLI commands that don't have access to a RuntimeAdapter context:\n * ```ts\n * const fs = createFileSystem();\n * const content = await fs.readTextFile(path);\n * await fs.writeTextFile(outputPath, result);\n * ```\n *\n * For server/rendering contexts, prefer using adapter.fs directly.\n *\n * Note: For npm package, always uses Node.js fs APIs for cross-platform compatibility.\n */\nexport function createFileSystem(): FileSystem {\n if (isDeno) {\n return new DenoFileSystem();\n } else {\n // Node.js or Bun\n return new NodeFileSystem();\n }\n}\n", "export interface GlobalWithDeno {\n Deno?: {\n env: {\n get(key: string): string | undefined;\n };\n };\n}\n\nexport interface GlobalWithProcess {\n process?: {\n env: Record<string, string | undefined>;\n version?: string;\n versions?: Record<string, string>;\n };\n}\n\nexport interface GlobalWithBun {\n Bun?: {\n version: string;\n };\n}\n\nexport function hasDenoRuntime(global: unknown): global is GlobalWithDeno {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"Deno\" in global &&\n typeof (global as GlobalWithDeno).Deno?.env?.get === \"function\"\n );\n}\n\nexport function hasNodeProcess(global: unknown): global is GlobalWithProcess {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"process\" in global &&\n typeof (global as GlobalWithProcess).process?.env === \"object\"\n );\n}\n\nexport function hasBunRuntime(global: unknown): global is GlobalWithBun {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"Bun\" in global &&\n typeof (global as GlobalWithBun).Bun !== \"undefined\"\n );\n}\n", "import type { GlobalWithDeno, GlobalWithProcess } from \"../runtime-guards.ts\";\nimport { hasDenoRuntime, hasNodeProcess } from \"../runtime-guards.ts\";\n\nexport function getEnvironmentVariable(name: string): string | undefined {\n try {\n if (typeof Deno !== \"undefined\" && hasDenoRuntime(globalThis)) {\n const value = (globalThis as GlobalWithDeno).Deno?.env.get(name);\n return value === \"\" ? undefined : value;\n }\n if (hasNodeProcess(globalThis)) {\n const value = (globalThis as GlobalWithProcess).process?.env[name];\n return value === \"\" ? undefined : value;\n }\n } catch {\n return undefined;\n }\n return undefined;\n}\n\nexport function isTestEnvironment(): boolean {\n return getEnvironmentVariable(\"NODE_ENV\") === \"test\";\n}\n\nexport function isProductionEnvironment(): boolean {\n return getEnvironmentVariable(\"NODE_ENV\") === \"production\";\n}\n\nexport function isDevelopmentEnvironment(): boolean {\n const env = getEnvironmentVariable(\"NODE_ENV\");\n return env === \"development\" || env === undefined;\n}\n", "import { getEnvironmentVariable } from \"./env.ts\";\n\nexport enum LogLevel {\n DEBUG = 0,\n INFO = 1,\n WARN = 2,\n ERROR = 3,\n}\n\nexport interface Logger {\n debug(message: string, ...args: unknown[]): void;\n info(message: string, ...args: unknown[]): void;\n warn(message: string, ...args: unknown[]): void;\n error(message: string, ...args: unknown[]): void;\n time<T>(label: string, fn: () => Promise<T>): Promise<T>;\n}\n\nconst originalConsole = {\n debug: console.debug,\n log: console.log,\n warn: console.warn,\n error: console.error,\n};\n\nlet cachedLogLevel: LogLevel | undefined;\n\nfunction resolveLogLevel(force = false): LogLevel {\n if (force || cachedLogLevel === undefined) {\n cachedLogLevel = getDefaultLevel();\n }\n return cachedLogLevel;\n}\n\nclass ConsoleLogger implements Logger {\n constructor(\n private prefix: string,\n private level: LogLevel = resolveLogLevel(),\n ) {}\n\n setLevel(level: LogLevel): void {\n this.level = level;\n }\n\n getLevel(): LogLevel {\n return this.level;\n }\n\n debug(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.DEBUG) {\n console.debug(`[${this.prefix}] DEBUG: ${message}`, ...args);\n }\n }\n\n info(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.INFO) {\n console.log(`[${this.prefix}] ${message}`, ...args);\n }\n }\n\n warn(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.WARN) {\n console.warn(`[${this.prefix}] WARN: ${message}`, ...args);\n }\n }\n\n error(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.ERROR) {\n console.error(`[${this.prefix}] ERROR: ${message}`, ...args);\n }\n }\n\n async time<T>(label: string, fn: () => Promise<T>): Promise<T> {\n const start = performance.now();\n try {\n const result = await fn();\n const end = performance.now();\n this.debug(`${label} completed in ${(end - start).toFixed(2)}ms`);\n return result;\n } catch (error) {\n const end = performance.now();\n this.error(`${label} failed after ${(end - start).toFixed(2)}ms`, error);\n throw error;\n }\n }\n}\n\nfunction parseLogLevel(levelString: string | undefined): LogLevel | undefined {\n if (!levelString) return undefined;\n const upper = levelString.toUpperCase();\n switch (upper) {\n case \"DEBUG\":\n return LogLevel.DEBUG;\n case \"WARN\":\n return LogLevel.WARN;\n case \"ERROR\":\n return LogLevel.ERROR;\n case \"INFO\":\n return LogLevel.INFO;\n default:\n return undefined;\n }\n}\n\nconst getDefaultLevel = (): LogLevel => {\n const envLevel = getEnvironmentVariable(\"LOG_LEVEL\");\n const parsedLevel = parseLogLevel(envLevel);\n if (parsedLevel !== undefined) return parsedLevel;\n\n const debugFlag = getEnvironmentVariable(\"VERYFRONT_DEBUG\");\n if (debugFlag === \"1\" || debugFlag === \"true\") return LogLevel.DEBUG;\n\n return LogLevel.INFO;\n};\n\nconst trackedLoggers = new Set<ConsoleLogger>();\n\nfunction createLogger(prefix: string): ConsoleLogger {\n const logger = new ConsoleLogger(prefix);\n trackedLoggers.add(logger);\n return logger;\n}\n\nexport const cliLogger = createLogger(\"CLI\");\nexport const serverLogger = createLogger(\"SERVER\");\nexport const rendererLogger = createLogger(\"RENDERER\");\nexport const bundlerLogger = createLogger(\"BUNDLER\");\nexport const agentLogger = createLogger(\"AGENT\");\n\nexport const logger = createLogger(\"VERYFRONT\");\n\ntype LoggerResetOptions = {\n restoreConsole?: boolean;\n};\n\nexport function __loggerResetForTests(options: LoggerResetOptions = {}): void {\n const updatedLevel = resolveLogLevel(true);\n for (const instance of trackedLoggers) {\n instance.setLevel(updatedLevel);\n }\n\n if (options.restoreConsole) {\n console.debug = originalConsole.debug;\n console.log = originalConsole.log;\n console.warn = originalConsole.warn;\n console.error = originalConsole.error;\n }\n}\n", "export const SECONDS_PER_MINUTE = 60;\n\nexport const MINUTES_PER_HOUR = 60;\n\nexport const HOURS_PER_DAY = 24;\n\nexport const MS_PER_SECOND = 1000;\n\nexport const DEFAULT_LRU_MAX_ENTRIES = 100;\n\nexport const COMPONENT_LOADER_MAX_ENTRIES = 100;\nexport const COMPONENT_LOADER_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const MDX_RENDERER_MAX_ENTRIES = 200;\nexport const MDX_RENDERER_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const RENDERER_CORE_MAX_ENTRIES = 100;\nexport const RENDERER_CORE_TTL_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const TSX_LAYOUT_MAX_ENTRIES = 50;\nexport const TSX_LAYOUT_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const DATA_FETCHING_MAX_ENTRIES = 200;\nexport const DATA_FETCHING_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const MDX_CACHE_TTL_PRODUCTION_MS = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE *\n MS_PER_SECOND;\nexport const MDX_CACHE_TTL_DEVELOPMENT_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const BUNDLE_CACHE_TTL_PRODUCTION_MS = HOURS_PER_DAY * MINUTES_PER_HOUR *\n SECONDS_PER_MINUTE * MS_PER_SECOND;\nexport const BUNDLE_CACHE_TTL_DEVELOPMENT_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const BUNDLE_MANIFEST_PROD_TTL_MS = 7 * HOURS_PER_DAY * MINUTES_PER_HOUR *\n SECONDS_PER_MINUTE * MS_PER_SECOND;\nexport const BUNDLE_MANIFEST_DEV_TTL_MS = MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const RSC_MANIFEST_CACHE_TTL_MS = 5000;\n\nexport const SERVER_ACTION_DEFAULT_TTL_SEC = MINUTES_PER_HOUR * SECONDS_PER_MINUTE;\n\nexport const DENO_KV_SAFE_SIZE_LIMIT_BYTES = 64_000;\n\nexport const HTTP_CACHE_SHORT_MAX_AGE_SEC = 60;\nexport const HTTP_CACHE_MEDIUM_MAX_AGE_SEC = 3600;\nexport const HTTP_CACHE_LONG_MAX_AGE_SEC = 31536000;\n\nexport const ONE_DAY_MS = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const CACHE_CLEANUP_INTERVAL_MS = 60000;\n\nexport const LRU_DEFAULT_MAX_ENTRIES = 1000;\n\nexport const LRU_DEFAULT_MAX_SIZE_BYTES = 50 * 1024 * 1024;\n\nexport const CLEANUP_INTERVAL_MULTIPLIER = 2;\n", "{\n \"name\": \"veryfront\",\n \"version\": \"0.0.51\",\n \"exclude\": [\n \"npm/\",\n \"dist/\",\n \"coverage/\",\n \"scripts/\",\n \"examples/\",\n \"tests/\",\n \"src/cli/templates/files/\",\n \"src/cli/templates/integrations/\"\n ],\n \"exports\": {\n \".\": \"./src/index.ts\",\n \"./cli\": \"./src/cli/main.ts\",\n \"./server\": \"./src/server/index.ts\",\n \"./middleware\": \"./src/middleware/index.ts\",\n \"./components\": \"./src/react/components/index.ts\",\n \"./data\": \"./src/data/index.ts\",\n \"./config\": \"./src/core/config/index.ts\",\n \"./platform\": \"./src/platform/index.ts\",\n \"./ai\": \"./src/ai/index.ts\",\n \"./ai/client\": \"./src/ai/client.ts\",\n \"./ai/react\": \"./src/ai/react/index.ts\",\n \"./ai/primitives\": \"./src/ai/react/primitives/index.ts\",\n \"./ai/components\": \"./src/ai/react/components/index.ts\",\n \"./ai/production\": \"./src/ai/production/index.ts\",\n \"./ai/dev\": \"./src/ai/dev/index.ts\",\n \"./ai/workflow\": \"./src/ai/workflow/index.ts\",\n \"./ai/workflow/react\": \"./src/ai/workflow/react/index.ts\",\n \"./oauth\": \"./src/core/oauth/index.ts\",\n \"./oauth/providers\": \"./src/core/oauth/providers/index.ts\",\n \"./oauth/handlers\": \"./src/core/oauth/handlers/index.ts\",\n \"./oauth/token-store\": \"./src/core/oauth/token-store/index.ts\"\n },\n \"imports\": {\n \"@veryfront\": \"./src/index.ts\",\n \"@veryfront/\": \"./src/\",\n \"@veryfront/ai\": \"./src/ai/index.ts\",\n \"@veryfront/ai/\": \"./src/ai/\",\n \"@veryfront/platform\": \"./src/platform/index.ts\",\n \"@veryfront/platform/\": \"./src/platform/\",\n \"@veryfront/types\": \"./src/core/types/index.ts\",\n \"@veryfront/types/\": \"./src/core/types/\",\n \"@veryfront/utils\": \"./src/core/utils/index.ts\",\n \"@veryfront/utils/\": \"./src/core/utils/\",\n \"@veryfront/middleware\": \"./src/middleware/index.ts\",\n \"@veryfront/middleware/\": \"./src/middleware/\",\n \"@veryfront/errors\": \"./src/core/errors/index.ts\",\n \"@veryfront/errors/\": \"./src/core/errors/\",\n \"@veryfront/config\": \"./src/core/config/index.ts\",\n \"@veryfront/config/\": \"./src/core/config/\",\n \"@veryfront/observability\": \"./src/observability/index.ts\",\n \"@veryfront/observability/\": \"./src/observability/\",\n \"@veryfront/routing\": \"./src/routing/index.ts\",\n \"@veryfront/routing/\": \"./src/routing/\",\n \"@veryfront/transforms\": \"./src/build/transforms/index.ts\",\n \"@veryfront/transforms/\": \"./src/build/transforms/\",\n \"@veryfront/data\": \"./src/data/index.ts\",\n \"@veryfront/data/\": \"./src/data/\",\n \"@veryfront/security\": \"./src/security/index.ts\",\n \"@veryfront/security/\": \"./src/security/\",\n \"@veryfront/components\": \"./src/react/components/index.ts\",\n \"@veryfront/react\": \"./src/react/index.ts\",\n \"@veryfront/react/\": \"./src/react/\",\n \"@veryfront/html\": \"./src/html/index.ts\",\n \"@veryfront/html/\": \"./src/html/\",\n \"@veryfront/rendering\": \"./src/rendering/index.ts\",\n \"@veryfront/rendering/\": \"./src/rendering/\",\n \"@veryfront/build\": \"./src/build/index.ts\",\n \"@veryfront/build/\": \"./src/build/\",\n \"@veryfront/server\": \"./src/server/index.ts\",\n \"@veryfront/server/\": \"./src/server/\",\n \"@veryfront/modules\": \"./src/module-system/index.ts\",\n \"@veryfront/modules/\": \"./src/module-system/\",\n \"@veryfront/compat/console\": \"./src/platform/compat/console/index.ts\",\n \"@veryfront/compat/\": \"./src/platform/compat/\",\n \"@veryfront/oauth\": \"./src/core/oauth/index.ts\",\n \"@veryfront/oauth/\": \"./src/core/oauth/\",\n \"std/\": \"https://deno.land/std@0.220.0/\",\n \"@std/path\": \"https://deno.land/std@0.220.0/path/mod.ts\",\n \"@std/testing/bdd.ts\": \"https://deno.land/std@0.220.0/testing/bdd.ts\",\n \"@std/expect\": \"https://deno.land/std@0.220.0/expect/mod.ts\",\n \"csstype\": \"https://esm.sh/csstype@3.2.3\",\n \"@types/react\": \"https://esm.sh/@types/react@18.3.27?deps=csstype@3.2.3\",\n \"@types/react-dom\": \"https://esm.sh/@types/react-dom@18.3.7?deps=csstype@3.2.3\",\n \"react\": \"https://esm.sh/react@18.3.1\",\n \"react-dom\": \"https://esm.sh/react-dom@18.3.1\",\n \"react-dom/server\": \"https://esm.sh/react-dom@18.3.1/server\",\n \"react-dom/client\": \"https://esm.sh/react-dom@18.3.1/client\",\n \"react/jsx-runtime\": \"https://esm.sh/react@18.3.1/jsx-runtime\",\n \"react/jsx-dev-runtime\": \"https://esm.sh/react@18.3.1/jsx-dev-runtime\",\n \"@mdx-js/mdx\": \"https://esm.sh/@mdx-js/mdx@3.0.0?deps=react@18.3.1,react-dom@18.3.1\",\n \"@mdx-js/react\": \"https://esm.sh/@mdx-js/react@3.0.0?deps=react@18.3.1,react-dom@18.3.1\",\n \"unist-util-visit\": \"https://esm.sh/unist-util-visit@5.0.0\",\n \"mdast-util-to-string\": \"https://esm.sh/mdast-util-to-string@4.0.0\",\n \"github-slugger\": \"https://esm.sh/github-slugger@2.0.0\",\n \"remark-gfm\": \"https://esm.sh/remark-gfm@4.0.1\",\n \"remark-frontmatter\": \"https://esm.sh/remark-frontmatter@5.0.0\",\n \"rehype-highlight\": \"https://esm.sh/rehype-highlight@7.0.2\",\n \"rehype-slug\": \"https://esm.sh/rehype-slug@6.0.0\",\n \"esbuild\": \"https://deno.land/x/esbuild@v0.20.1/wasm.js\",\n \"esbuild/mod.js\": \"https://deno.land/x/esbuild@v0.20.1/mod.js\",\n \"es-module-lexer\": \"https://esm.sh/es-module-lexer@1.5.0\",\n \"zod\": \"https://esm.sh/zod@3.22.0\",\n \"mime-types\": \"https://esm.sh/mime-types@2.1.35\",\n \"mdast\": \"https://esm.sh/@types/mdast@4.0.3\",\n \"hast\": \"https://esm.sh/@types/hast@3.0.3\",\n \"unist\": \"https://esm.sh/@types/unist@3.0.2\",\n \"unified\": \"https://esm.sh/unified@11.0.5?dts\",\n \"ai\": \"https://esm.sh/ai@5.0.76?deps=react@18.3.1,react-dom@18.3.1\",\n \"ai/react\": \"https://esm.sh/@ai-sdk/react@2.0.59?deps=react@18.3.1,react-dom@18.3.1\",\n \"@ai-sdk/react\": \"https://esm.sh/@ai-sdk/react@2.0.59?deps=react@18.3.1,react-dom@18.3.1\",\n \"@ai-sdk/openai\": \"https://esm.sh/@ai-sdk/openai@2.0.1\",\n \"@ai-sdk/anthropic\": \"https://esm.sh/@ai-sdk/anthropic@2.0.4\",\n \"unocss\": \"https://esm.sh/unocss@0.59.0\",\n \"@unocss/core\": \"https://esm.sh/@unocss/core@0.59.0\",\n \"@unocss/preset-wind\": \"https://esm.sh/@unocss/preset-wind@0.59.0\",\n \"redis\": \"npm:redis\",\n \"pg\": \"npm:pg\"\n },\n \"compilerOptions\": {\n \"jsx\": \"react-jsx\",\n \"jsxImportSource\": \"react\",\n \"strict\": true,\n \"noImplicitAny\": true,\n \"noUncheckedIndexedAccess\": true,\n \"types\": [],\n \"lib\": [\n \"deno.window\",\n \"dom\",\n \"dom.iterable\",\n \"dom.asynciterable\",\n \"deno.ns\"\n ]\n },\n \"tasks\": {\n \"setup\": \"deno run --allow-all scripts/setup.ts\",\n \"dev\": \"deno run --allow-all --no-lock --unstable-net --unstable-worker-options src/cli/main.ts dev\",\n \"build\": \"deno compile --allow-all --output ../../bin/veryfront src/cli/main.ts\",\n \"build:npm\": \"deno run -A scripts/build-npm.ts\",\n \"release\": \"deno run -A scripts/release.ts\",\n \"test\": \"DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --unstable-worker-options --unstable-net\",\n \"test:unit\": \"DENO_JOBS=1 deno test --parallel --allow-all --v8-flags=--max-old-space-size=8192 --ignore=tests --unstable-worker-options --unstable-net\",\n \"test:integration\": \"DENO_JOBS=1 deno test --parallel --fail-fast --allow-all tests --unstable-worker-options --unstable-net\",\n \"test:coverage\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage --unstable-worker-options --unstable-net || exit 1\",\n \"test:coverage:unit\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage --ignore=tests --unstable-worker-options --unstable-net || exit 1\",\n \"test:coverage:integration\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage tests --unstable-worker-options --unstable-net || exit 1\",\n \"coverage:report\": \"deno coverage coverage --include=src/ --exclude=tests --exclude=src/**/*_test.ts --exclude=src/**/*_test.tsx --exclude=src/**/*.test.ts --exclude=src/**/*.test.tsx --lcov > coverage/lcov.info && deno run --allow-read scripts/check-coverage.ts 80\",\n \"coverage:html\": \"deno coverage coverage --include=src/ --exclude=tests --exclude=src/**/*_test.ts --exclude=src/**/*_test.tsx --exclude=src/**/*.test.ts --exclude=src/**/*.test.tsx --html\",\n \"lint\": \"DENO_NO_PACKAGE_JSON=1 deno lint src/\",\n \"fmt\": \"deno fmt src/\",\n \"typecheck\": \"deno check src/index.ts src/cli/main.ts src/server/index.ts src/routing/api/index.ts src/rendering/index.ts src/platform/index.ts src/platform/adapters/index.ts src/build/index.ts src/build/production-build/index.ts src/build/transforms/index.ts src/core/config/index.ts src/core/utils/index.ts src/data/index.ts src/security/index.ts src/middleware/index.ts src/server/handlers/dev/index.ts src/server/handlers/request/api/index.ts src/rendering/cache/index.ts src/rendering/cache/stores/index.ts src/rendering/rsc/actions/index.ts src/html/index.ts src/module-system/index.ts\",\n \"docs:check-links\": \"deno run -A scripts/check-doc-links.ts\",\n \"lint:ban-console\": \"deno run --allow-read scripts/ban-console.ts\",\n \"lint:ban-deep-imports\": \"deno run --allow-read scripts/ban-deep-imports.ts\",\n \"lint:ban-internal-root-imports\": \"deno run --allow-read scripts/ban-internal-root-imports.ts\",\n \"lint:check-awaits\": \"deno run --allow-read scripts/check-unawaited-promises.ts\",\n \"lint:platform\": \"deno run --allow-read scripts/lint-platform-agnostic.ts\",\n \"check:circular\": \"deno run -A jsr:@cunarist/deno-circular-deps src/index.ts\"\n },\n \"lint\": {\n \"include\": [\n \"src/**/*.ts\",\n \"src/**/*.tsx\"\n ],\n \"exclude\": [\n \"dist/\",\n \"coverage/\"\n ],\n \"rules\": {\n \"tags\": [\n \"recommended\"\n ],\n \"include\": [\n \"ban-untagged-todo\"\n ],\n \"exclude\": [\n \"no-explicit-any\",\n \"no-process-global\",\n \"no-console\"\n ]\n }\n },\n \"fmt\": {\n \"include\": [\n \"src/**/*.ts\",\n \"src/**/*.tsx\"\n ],\n \"exclude\": [\n \"dist/\",\n \"coverage/\"\n ],\n \"options\": {\n \"useTabs\": false,\n \"lineWidth\": 100,\n \"indentWidth\": 2,\n \"semiColons\": true,\n \"singleQuote\": false,\n \"proseWrap\": \"preserve\"\n }\n }\n}\n", "import { isDeno as IS_DENO } from \"./runtime.ts\";\n\nconst nodeProcess = (globalThis as { process?: typeof import(\"node:process\") }).process;\nconst hasNodeProcess = !!nodeProcess?.versions?.node;\n\nexport function getArgs(): string[] {\n if (IS_DENO) {\n return Deno.args;\n }\n if (hasNodeProcess) {\n return nodeProcess!.argv.slice(2);\n }\n return [];\n}\n\nexport function exit(code?: number): never {\n if (IS_DENO) {\n Deno.exit(code);\n }\n if (hasNodeProcess) {\n nodeProcess!.exit(code);\n }\n throw new Error(\"exit() is not supported in this runtime\");\n}\n\nexport function cwd(): string {\n if (IS_DENO) {\n return Deno.cwd();\n }\n if (hasNodeProcess) {\n return nodeProcess!.cwd();\n }\n throw new Error(\"cwd() is not supported in this runtime\");\n}\n\nexport function chdir(directory: string): void {\n if (IS_DENO) {\n Deno.chdir(directory);\n } else {\n if (hasNodeProcess) {\n nodeProcess!.chdir(directory);\n return;\n }\n throw new Error(\"chdir() is not supported in this runtime\");\n }\n}\n\nexport function env(): Record<string, string> {\n if (IS_DENO) {\n return Deno.env.toObject();\n }\n if (hasNodeProcess) {\n return nodeProcess!.env as Record<string, string>;\n }\n return {};\n}\n\nexport function getEnv(key: string): string | undefined {\n if (IS_DENO) {\n return Deno.env.get(key);\n }\n if (hasNodeProcess) {\n return nodeProcess!.env[key];\n }\n return undefined;\n}\n\n/**\n * Get an environment variable or throw if not set\n * @throws Error if the environment variable is not set\n */\nexport function requireEnv(key: string): string {\n const value = getEnv(key);\n if (value === undefined) {\n throw new Error(`Required environment variable \"${key}\" is not set`);\n }\n return value;\n}\n\nexport function setEnv(key: string, value: string): void {\n if (IS_DENO) {\n Deno.env.set(key, value);\n } else {\n if (hasNodeProcess) {\n nodeProcess!.env[key] = value;\n return;\n }\n throw new Error(\"setEnv() is not supported in this runtime\");\n }\n}\n\nexport function deleteEnv(key: string): void {\n if (IS_DENO) {\n Deno.env.delete(key);\n } else {\n if (hasNodeProcess) {\n delete nodeProcess!.env[key];\n return;\n }\n throw new Error(\"deleteEnv() is not supported in this runtime\");\n }\n}\n\nexport function pid(): number {\n if (IS_DENO) {\n return Deno.pid;\n }\n if (hasNodeProcess) {\n return nodeProcess!.pid;\n }\n return 0;\n}\n\nexport function ppid(): number {\n if (IS_DENO && \"ppid\" in Deno) {\n return Deno.ppid || 0;\n }\n if (hasNodeProcess) {\n return nodeProcess!.ppid || 0;\n }\n return 0;\n}\n\nexport function memoryUsage(): {\n rss: number;\n heapTotal: number;\n heapUsed: number;\n external: number;\n} {\n if (IS_DENO) {\n const usage = Deno.memoryUsage();\n return {\n rss: usage.rss,\n heapTotal: usage.heapTotal,\n heapUsed: usage.heapUsed,\n external: usage.external,\n };\n }\n\n if (!hasNodeProcess) {\n throw new Error(\"memoryUsage() is not supported in this runtime\");\n }\n\n const usage = nodeProcess!.memoryUsage();\n return {\n rss: usage.rss,\n heapTotal: usage.heapTotal,\n heapUsed: usage.heapUsed,\n external: usage.external || 0,\n };\n}\n\n/**\n * Check if stdin is a TTY (terminal)\n */\nexport function isInteractive(): boolean {\n if (IS_DENO) {\n return Deno.stdin.isTerminal();\n }\n if (hasNodeProcess) {\n return nodeProcess!.stdin.isTTY ?? false;\n }\n return false;\n}\n\n/**\n * Get network interfaces\n */\nexport async function getNetworkInterfaces(): Promise<\n Array<{ name: string; address: string; family: \"IPv4\" | \"IPv6\" }>\n> {\n if (IS_DENO) {\n const interfaces = Deno.networkInterfaces();\n return interfaces.map((iface) => ({\n name: iface.name,\n address: iface.address,\n family: iface.family as \"IPv4\" | \"IPv6\",\n }));\n }\n\n if (!hasNodeProcess) {\n throw new Error(\"networkInterfaces() is not supported in this runtime\");\n }\n\n const os = await import(\"node:os\");\n const interfaces = os.networkInterfaces();\n const result: Array<{ name: string; address: string; family: \"IPv4\" | \"IPv6\" }> = [];\n\n for (const [name, addrs] of Object.entries(interfaces)) {\n if (!addrs) continue;\n for (const addr of addrs) {\n result.push({\n name,\n address: addr.address,\n family: addr.family as \"IPv4\" | \"IPv6\",\n });\n }\n }\n\n return result;\n}\n\n/**\n * Get runtime version string\n */\nexport function getRuntimeVersion(): string {\n if (IS_DENO) {\n return `Deno ${Deno.version.deno}`;\n }\n if (\"Bun\" in globalThis) {\n return `Bun ${(globalThis as unknown as { Bun: { version: string } }).Bun.version}`;\n }\n if (hasNodeProcess) {\n return `Node.js ${nodeProcess!.version}`;\n }\n return \"unknown\";\n}\n\n/**\n * Register a signal handler (SIGINT, SIGTERM) for graceful shutdown\n */\nexport function onSignal(signal: \"SIGINT\" | \"SIGTERM\", handler: () => void): void {\n if (IS_DENO) {\n Deno.addSignalListener(signal, handler);\n } else if (hasNodeProcess) {\n nodeProcess!.on(signal, handler);\n }\n}\n\n/**\n * Unreference a timer to prevent it from keeping the process alive\n */\nexport function unrefTimer(timerId: ReturnType<typeof setInterval>): void {\n if (IS_DENO) {\n Deno.unrefTimer(timerId as number);\n } else if (timerId && typeof timerId === \"object\" && \"unref\" in timerId) {\n (timerId as { unref: () => void }).unref();\n }\n}\n\n/**\n * Get the executable path of the current runtime\n */\nexport function execPath(): string {\n if (IS_DENO) {\n return Deno.execPath();\n }\n if (hasNodeProcess) {\n return nodeProcess!.execPath;\n }\n return \"\";\n}\n\n/**\n * Get process uptime in seconds\n * Returns OS uptime on Deno, process uptime on Node.js\n */\nexport function uptime(): number {\n if (IS_DENO) {\n // Deno.osUptime() returns system uptime in seconds\n return Deno.osUptime?.() ?? 0;\n }\n if (hasNodeProcess) {\n // process.uptime() returns process uptime in seconds\n return nodeProcess!.uptime?.() ?? 0;\n }\n return 0;\n}\n\n/**\n * Get stdout stream for writing\n * Returns null if not available (e.g., in browser/workers)\n */\nexport function getStdout(): { write: (data: string) => void } | null {\n if (IS_DENO) {\n const encoder = new TextEncoder();\n return {\n write: (data: string) => {\n Deno.stdout.writeSync(encoder.encode(data));\n },\n };\n }\n if (hasNodeProcess && nodeProcess!.stdout) {\n return {\n write: (data: string) => {\n nodeProcess!.stdout.write(data);\n },\n };\n }\n return null;\n}\n\n// Cached Node.js modules for synchronous prompt\nlet cachedNodeFs: typeof import(\"node:fs\") | null = null;\n\n/**\n * Synchronous prompt function that works across Deno and Node.js\n * Displays a message and reads user input from stdin\n */\nexport function promptSync(message?: string): string | null {\n if (IS_DENO) {\n // Deno has a built-in prompt() function\n return prompt(message);\n }\n\n if (hasNodeProcess) {\n // Print the message\n if (message) {\n nodeProcess!.stdout.write(message + \" \");\n }\n\n // Lazy load fs module\n if (!cachedNodeFs) {\n // Dynamic import converted to sync require for bundling\n // @ts-ignore - dynamic require for Node.js\n cachedNodeFs = globalThis.require?.(\"node:fs\") || null;\n if (!cachedNodeFs) {\n // Try alternative approach\n try {\n // @ts-ignore: __require is injected by bundlers for Node.js require\n cachedNodeFs = __require(\"node:fs\");\n } catch {\n return null;\n }\n }\n }\n\n if (!cachedNodeFs) {\n return null;\n }\n\n // Read synchronously using fs\n // This works by reading from file descriptor 0 (stdin)\n // Use Uint8Array for cross-platform compatibility\n const bufferSize = 1024;\n const uint8Array = new Uint8Array(bufferSize);\n let input = \"\";\n\n try {\n // Read from stdin (fd 0) synchronously\n const bytesRead = cachedNodeFs.readSync(0, uint8Array, 0, bufferSize, null);\n if (bytesRead > 0) {\n const decoder = new TextDecoder(\"utf-8\");\n input = decoder.decode(uint8Array.subarray(0, bytesRead)).trim();\n }\n } catch {\n // If stdin is not available or EOF, return null\n return null;\n }\n\n return input || null;\n }\n\n return null;\n}\n", "import denoConfig from \"../../../deno.json\" with { type: \"json\" };\nimport { getEnv } from \"../../platform/compat/process.ts\";\n\nexport const VERSION: string = getEnv(\"VERYFRONT_VERSION\") ||\n (typeof denoConfig.version === \"string\" ? denoConfig.version : \"0.0.0\");\n", "export const KB_IN_BYTES = 1024;\n\nexport const HTTP_MODULE_FETCH_TIMEOUT_MS = 2500;\n\nexport const HMR_RECONNECT_DELAY_MS = 1000;\n\nexport const HMR_RELOAD_DELAY_MS = 1000;\n\nexport const HMR_FILE_WATCHER_DEBOUNCE_MS = 100;\n\nexport const HMR_KEEP_ALIVE_INTERVAL_MS = 30000;\n\nexport const DASHBOARD_RECONNECT_DELAY_MS = 3000;\n\nexport const SERVER_FUNCTION_DEFAULT_TIMEOUT_MS = 30000;\n\nexport const PREFETCH_MAX_SIZE_BYTES = 200 * KB_IN_BYTES;\n\nexport const PREFETCH_DEFAULT_TIMEOUT_MS = 10000;\n\nexport const PREFETCH_DEFAULT_DELAY_MS = 200;\n\nexport const HTTP_OK = 200;\n\nexport const HTTP_NO_CONTENT = 204;\n\nexport const HTTP_CREATED = 201;\n\nexport const HTTP_REDIRECT_FOUND = 302;\n\nexport const HTTP_NOT_MODIFIED = 304;\n\nexport const HTTP_BAD_REQUEST = 400;\n\nexport const HTTP_UNAUTHORIZED = 401;\n\nexport const HTTP_FORBIDDEN = 403;\n\nexport const HTTP_NOT_FOUND = 404;\n\nexport const HTTP_METHOD_NOT_ALLOWED = 405;\n\nexport const HTTP_GONE = 410;\n\nexport const HTTP_PAYLOAD_TOO_LARGE = 413;\n\nexport const HTTP_URI_TOO_LONG = 414;\n\nexport const HTTP_TOO_MANY_REQUESTS = 429;\n\nexport const HTTP_REQUEST_HEADER_FIELDS_TOO_LARGE = 431;\n\nexport const HTTP_SERVER_ERROR = 500;\n\nexport const HTTP_INTERNAL_SERVER_ERROR = 500;\n\nexport const HTTP_BAD_GATEWAY = 502;\n\nexport const HTTP_NOT_IMPLEMENTED = 501;\n\nexport const HTTP_UNAVAILABLE = 503;\n\nexport const HTTP_NETWORK_CONNECT_TIMEOUT = 599;\n\nexport const HTTP_STATUS_SUCCESS_MIN = 200;\n\nexport const HTTP_STATUS_REDIRECT_MIN = 300;\n\nexport const HTTP_STATUS_CLIENT_ERROR_MIN = 400;\n\nexport const HTTP_STATUS_SERVER_ERROR_MIN = 500;\n\nexport const HTTP_CONTENT_TYPES = {\n JS: \"application/javascript; charset=utf-8\",\n JSON: \"application/json; charset=utf-8\",\n HTML: \"text/html; charset=utf-8\",\n CSS: \"text/css; charset=utf-8\",\n TEXT: \"text/plain; charset=utf-8\",\n} as const;\n\nimport { MS_PER_SECOND, SECONDS_PER_MINUTE } from \"./cache.ts\";\n\nexport const MS_PER_MINUTE = 60000;\n\nexport { MS_PER_SECOND, SECONDS_PER_MINUTE };\n\nexport const HTTP_CONTENT_TYPE_IMAGE_PNG = \"image/png\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_JPEG = \"image/jpeg\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_WEBP = \"image/webp\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_AVIF = \"image/avif\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_SVG = \"image/svg+xml\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_GIF = \"image/gif\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_ICO = \"image/x-icon\";\n", "import { KB_IN_BYTES } from \"./http.ts\";\n\nexport const HMR_MAX_MESSAGE_SIZE_BYTES = 1024 * KB_IN_BYTES;\n\nexport const HMR_MAX_MESSAGES_PER_MINUTE = 100;\n\nexport const HMR_CLIENT_RELOAD_DELAY_MS = 3000;\n\nexport const HMR_PORT_OFFSET = 1;\n\nexport const HMR_RATE_LIMIT_WINDOW_MS = 60000;\n\nexport const HMR_CLOSE_NORMAL = 1000;\n\nexport const HMR_CLOSE_RATE_LIMIT = 1008;\n\nexport const HMR_CLOSE_MESSAGE_TOO_LARGE = 1009;\n\nexport const HMR_MESSAGE_TYPES = {\n CONNECTED: \"connected\",\n UPDATE: \"update\",\n RELOAD: \"reload\",\n PING: \"ping\",\n PONG: \"pong\",\n} as const;\n\nexport function isValidHMRMessageType(type: string): type is keyof typeof HMR_MESSAGE_TYPES {\n return Object.values(HMR_MESSAGE_TYPES).includes(\n type as typeof HMR_MESSAGE_TYPES[keyof typeof HMR_MESSAGE_TYPES],\n );\n}\n", "export const DEFAULT_DEV_SERVER_PORT = 3000;\nexport const DEFAULT_REDIS_PORT = 6379;\nexport const DEFAULT_API_SERVER_PORT = 8080;\nexport const DEFAULT_PREVIEW_SERVER_PORT = 5000;\nexport const DEFAULT_METRICS_PORT = 9000;\n\nexport const BYTES_PER_KB = 1024;\nexport const BYTES_PER_MB = 1024 * 1024;\n\nexport const DEFAULT_IMAGE_THUMBNAIL_SIZE = 256;\nexport const DEFAULT_IMAGE_SMALL_SIZE = 512;\nexport const DEFAULT_IMAGE_LARGE_SIZE = 2048;\n\nexport const RESPONSIVE_IMAGE_WIDTH_XS = 320;\nexport const RESPONSIVE_IMAGE_WIDTH_SM = 640;\nexport const RESPONSIVE_IMAGE_WIDTH_MD = 1024;\nexport const RESPONSIVE_IMAGE_WIDTH_LG = 1920;\n\nexport const RESPONSIVE_IMAGE_WIDTHS = [\n RESPONSIVE_IMAGE_WIDTH_XS,\n RESPONSIVE_IMAGE_WIDTH_SM,\n RESPONSIVE_IMAGE_WIDTH_MD,\n RESPONSIVE_IMAGE_WIDTH_LG,\n] as const;\n\nexport const MAX_CHUNK_SIZE_KB = 4096;\n\nexport const MIN_PORT = 1;\n\nexport const MAX_PORT = 65535;\n\nexport const DEFAULT_SERVER_PORT = 8000;\n", "/**\n * Centralized server endpoints and paths registry\n *\n * All internal veryfront URLs should be defined here as the single source of truth.\n * This prevents hardcoding URLs across the codebase and makes refactoring easier.\n */\n\n/** Default port for development dashboard */\nexport const DEFAULT_DASHBOARD_PORT = 3002;\n\n/** Default port for veryfront server */\nexport const DEFAULT_PORT = 3000;\n\n/** Internal URL prefix for all veryfront endpoints */\nexport const INTERNAL_PREFIX = \"/_veryfront\" as const;\n\n/**\n * All internal veryfront URL path prefixes (directories)\n */\nexport const INTERNAL_PATH_PREFIXES = {\n /** React Server Components endpoints */\n RSC: `${INTERNAL_PREFIX}/rsc/`,\n /** File system access endpoints (base64 encoded paths) */\n FS: `${INTERNAL_PREFIX}/fs/`,\n /** Virtual module system */\n MODULES: `${INTERNAL_PREFIX}/modules/`,\n /** Generated page modules */\n PAGES: `${INTERNAL_PREFIX}/pages/`,\n /** Data JSON endpoints */\n DATA: `${INTERNAL_PREFIX}/data/`,\n /** Library modules (AI SDK, etc.) */\n LIB: `${INTERNAL_PREFIX}/lib/`,\n /** Chunk assets */\n CHUNKS: `${INTERNAL_PREFIX}/chunks/`,\n /** Client component modules */\n CLIENT: `${INTERNAL_PREFIX}/client/`,\n} as const;\n\n/**\n * Specific internal endpoint URLs\n */\nexport const INTERNAL_ENDPOINTS = {\n // Development endpoints\n HMR_RUNTIME: `${INTERNAL_PREFIX}/hmr-runtime.js`,\n HMR: `${INTERNAL_PREFIX}/hmr.js`,\n HYDRATE: `${INTERNAL_PREFIX}/hydrate.js`,\n ERROR_OVERLAY: `${INTERNAL_PREFIX}/error-overlay.js`,\n DEV_LOADER: `${INTERNAL_PREFIX}/dev-loader.js`,\n CLIENT_LOG: `${INTERNAL_PREFIX}/log`,\n\n // Production endpoints\n CLIENT_JS: `${INTERNAL_PREFIX}/client.js`,\n ROUTER_JS: `${INTERNAL_PREFIX}/router.js`,\n PREFETCH_JS: `${INTERNAL_PREFIX}/prefetch.js`,\n MANIFEST_JSON: `${INTERNAL_PREFIX}/manifest.json`,\n APP_JS: `${INTERNAL_PREFIX}/app.js`,\n\n // RSC endpoints\n RSC_CLIENT: `${INTERNAL_PREFIX}/rsc/client.js`,\n RSC_MANIFEST: `${INTERNAL_PREFIX}/rsc/manifest`,\n RSC_STREAM: `${INTERNAL_PREFIX}/rsc/stream`,\n RSC_PAYLOAD: `${INTERNAL_PREFIX}/rsc/payload`,\n RSC_RENDER: `${INTERNAL_PREFIX}/rsc/render`,\n RSC_PAGE: `${INTERNAL_PREFIX}/rsc/page`,\n RSC_MODULE: `${INTERNAL_PREFIX}/rsc/module`,\n RSC_DOM: `${INTERNAL_PREFIX}/rsc/dom.js`,\n RSC_HYDRATOR: `${INTERNAL_PREFIX}/rsc/hydrator.js`,\n RSC_HYDRATE_CLIENT: `${INTERNAL_PREFIX}/rsc/hydrate-client.js`,\n\n // Library module endpoints\n LIB_AI_REACT: `${INTERNAL_PREFIX}/lib/ai/react.js`,\n LIB_AI_COMPONENTS: `${INTERNAL_PREFIX}/lib/ai/components.js`,\n LIB_AI_PRIMITIVES: `${INTERNAL_PREFIX}/lib/ai/primitives.js`,\n} as const;\n\n/**\n * Build output directory paths (relative)\n */\nexport const BUILD_DIRS = {\n /** Main build output directory */\n ROOT: \"_veryfront\",\n /** Chunks directory */\n CHUNKS: \"_veryfront/chunks\",\n /** Data directory */\n DATA: \"_veryfront/data\",\n /** Assets directory */\n ASSETS: \"_veryfront/assets\",\n} as const;\n\n/**\n * Local project directory paths (relative to project root)\n * These are .gitignore'd directories for caching and temporary files\n */\nexport const PROJECT_DIRS = {\n /** Base veryfront internal directory */\n ROOT: \".veryfront\",\n /** Cache directory for build artifacts, transforms, etc. */\n CACHE: \".veryfront/cache\",\n /** KV store directory */\n KV: \".veryfront/kv\",\n /** Log files directory */\n LOGS: \".veryfront/logs\",\n /** Temporary files directory */\n TMP: \".veryfront/tmp\",\n} as const;\n\n/** Default cache directory path */\nexport const DEFAULT_CACHE_DIR = PROJECT_DIRS.CACHE;\n\n/**\n * Helper to check if a pathname is an internal veryfront endpoint\n */\nexport function isInternalEndpoint(pathname: string): boolean {\n return pathname.startsWith(INTERNAL_PREFIX + \"/\");\n}\n\n/**\n * Helper to check if a pathname is a static asset (has extension or is internal)\n */\nexport function isStaticAsset(pathname: string): boolean {\n return pathname.includes(\".\") || isInternalEndpoint(pathname);\n}\n\n/**\n * Normalize a chunk path to include the base prefix\n */\nexport function normalizeChunkPath(\n filename: string,\n basePath: string = INTERNAL_PATH_PREFIXES.CHUNKS,\n): string {\n if (filename.startsWith(\"/\")) {\n return filename;\n }\n return `${basePath.replace(/\\/$/, \"\")}/${filename}`;\n}\n\n// Re-export for backward compatibility\nexport const DEV_SERVER_ENDPOINTS = {\n HMR_RUNTIME: INTERNAL_ENDPOINTS.HMR_RUNTIME,\n ERROR_OVERLAY: INTERNAL_ENDPOINTS.ERROR_OVERLAY,\n} as const;\n", "/**\n * Project directory paths and file extensions\n *\n * For internal veryfront URL endpoints, see ./constants/server.ts\n */\n\nimport {\n BUILD_DIRS,\n INTERNAL_ENDPOINTS,\n INTERNAL_PATH_PREFIXES,\n INTERNAL_PREFIX,\n} from \"./constants/server.ts\";\n\nexport const PATHS = {\n PAGES_DIR: \"pages\",\n COMPONENTS_DIR: \"components\",\n PUBLIC_DIR: \"public\",\n STYLES_DIR: \"styles\",\n DIST_DIR: \"dist\",\n CONFIG_FILE: \"veryfront.config.js\",\n} as const;\n\n/**\n * @deprecated Use INTERNAL_PREFIX, INTERNAL_ENDPOINTS, INTERNAL_PATH_PREFIXES from ./constants/server.ts\n */\nexport const VERYFRONT_PATHS = {\n INTERNAL_PREFIX: INTERNAL_PREFIX,\n BUILD_DIR: BUILD_DIRS.ROOT,\n CHUNKS_DIR: BUILD_DIRS.CHUNKS,\n DATA_DIR: BUILD_DIRS.DATA,\n ASSETS_DIR: BUILD_DIRS.ASSETS,\n HMR_RUNTIME: INTERNAL_ENDPOINTS.HMR_RUNTIME,\n CLIENT_JS: INTERNAL_ENDPOINTS.CLIENT_JS,\n ROUTER_JS: INTERNAL_ENDPOINTS.ROUTER_JS,\n ERROR_OVERLAY: INTERNAL_ENDPOINTS.ERROR_OVERLAY,\n} as const;\n\nexport const FILE_EXTENSIONS = {\n MDX: [\".mdx\", \".md\"],\n SCRIPT: [\".tsx\", \".ts\", \".jsx\", \".js\"],\n STYLE: [\".css\", \".scss\", \".sass\"],\n ALL: [\".mdx\", \".md\", \".tsx\", \".ts\", \".jsx\", \".js\", \".css\"],\n} as const;\n\n// Re-export for convenience\nexport { BUILD_DIRS, INTERNAL_ENDPOINTS, INTERNAL_PATH_PREFIXES, INTERNAL_PREFIX };\n", "import { serverLogger as logger } from \"./logger/index.ts\";\n\nexport interface BundleMetadata {\n hash: string;\n codeHash: string;\n size: number;\n compiledAt: number;\n source: string;\n mode: \"development\" | \"production\";\n meta?: {\n type?: \"mdx\" | \"component\" | \"layout\" | \"provider\";\n depsHash?: string;\n reactVersion?: string;\n };\n}\n\nexport interface BundleCode {\n code: string;\n sourceMap?: string;\n css?: string;\n}\n\nexport interface BundleManifestStore {\n getBundleMetadata(key: string): Promise<BundleMetadata | undefined>;\n\n setBundleMetadata(key: string, metadata: BundleMetadata, ttlMs?: number): Promise<void>;\n\n getBundleCode(hash: string): Promise<BundleCode | undefined>;\n\n setBundleCode(hash: string, code: BundleCode, ttlMs?: number): Promise<void>;\n\n deleteBundle(key: string): Promise<void>;\n\n invalidateSource(source: string): Promise<number>;\n\n clear(): Promise<void>;\n\n isAvailable(): Promise<boolean>;\n\n getStats(): Promise<{\n totalBundles: number;\n totalSize: number;\n oldestBundle?: number;\n newestBundle?: number;\n }>;\n}\n\nexport class InMemoryBundleManifestStore implements BundleManifestStore {\n private metadata = new Map<string, { value: BundleMetadata; expiry?: number }>();\n private code = new Map<string, { value: BundleCode; expiry?: number }>();\n private sourceIndex = new Map<string, Set<string>>();\n\n getBundleMetadata(key: string): Promise<BundleMetadata | undefined> {\n const entry = this.metadata.get(key);\n if (!entry) return Promise.resolve(undefined);\n if (entry.expiry && Date.now() > entry.expiry) {\n this.metadata.delete(key);\n return Promise.resolve(undefined);\n }\n return Promise.resolve(entry.value);\n }\n\n setBundleMetadata(key: string, metadata: BundleMetadata, ttlMs?: number): Promise<void> {\n const expiry = ttlMs ? Date.now() + ttlMs : undefined;\n this.metadata.set(key, { value: metadata, expiry });\n\n if (!this.sourceIndex.has(metadata.source)) {\n this.sourceIndex.set(metadata.source, new Set());\n }\n this.sourceIndex.get(metadata.source)!.add(key);\n return Promise.resolve();\n }\n\n getBundleCode(hash: string): Promise<BundleCode | undefined> {\n const entry = this.code.get(hash);\n if (!entry) return Promise.resolve(undefined);\n if (entry.expiry && Date.now() > entry.expiry) {\n this.code.delete(hash);\n return Promise.resolve(undefined);\n }\n return Promise.resolve(entry.value);\n }\n\n setBundleCode(hash: string, code: BundleCode, ttlMs?: number): Promise<void> {\n const expiry = ttlMs ? Date.now() + ttlMs : undefined;\n this.code.set(hash, { value: code, expiry });\n return Promise.resolve();\n }\n\n async deleteBundle(key: string): Promise<void> {\n const metadata = await this.getBundleMetadata(key);\n this.metadata.delete(key);\n if (metadata) {\n this.code.delete(metadata.codeHash);\n const sourceKeys = this.sourceIndex.get(metadata.source);\n if (sourceKeys) {\n sourceKeys.delete(key);\n if (sourceKeys.size === 0) {\n this.sourceIndex.delete(metadata.source);\n }\n }\n }\n }\n\n async invalidateSource(source: string): Promise<number> {\n const keys = this.sourceIndex.get(source);\n if (!keys) return 0;\n\n let count = 0;\n for (const key of Array.from(keys)) {\n await this.deleteBundle(key);\n count++;\n }\n this.sourceIndex.delete(source);\n return count;\n }\n\n clear(): Promise<void> {\n this.metadata.clear();\n this.code.clear();\n this.sourceIndex.clear();\n return Promise.resolve();\n }\n\n isAvailable(): Promise<boolean> {\n return Promise.resolve(true);\n }\n\n getStats(): Promise<{\n totalBundles: number;\n totalSize: number;\n oldestBundle?: number;\n newestBundle?: number;\n }> {\n let totalSize = 0;\n let oldest: number | undefined;\n let newest: number | undefined;\n\n for (const { value } of this.metadata.values()) {\n totalSize += value.size;\n if (!oldest || value.compiledAt < oldest) oldest = value.compiledAt;\n if (!newest || value.compiledAt > newest) newest = value.compiledAt;\n }\n\n return Promise.resolve({\n totalBundles: this.metadata.size,\n totalSize,\n oldestBundle: oldest,\n newestBundle: newest,\n });\n }\n}\n\nlet manifestStore: BundleManifestStore = new InMemoryBundleManifestStore();\n\nexport function setBundleManifestStore(store: BundleManifestStore): void {\n manifestStore = store;\n logger.info(\"[bundle-manifest] Bundle manifest store configured\", {\n type: store.constructor.name,\n });\n}\n\nexport function getBundleManifestStore(): BundleManifestStore {\n return manifestStore;\n}\n\nexport { computeCodeHash, computeContentHash } from \"./hash-utils.ts\";\n", "/**\n * Local File System Blob Storage\n *\n * Stores blobs as files on the local disk\n */\n\nimport { dirname, join } from \"../../../platform/compat/path-helper.ts\";\nimport { createFileSystem, FileSystem } from \"../../../platform/compat/fs.ts\";\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\n\nexport class LocalBlobStorage implements BlobStorage {\n private rootDir: string;\n private baseUrl?: string;\n private fs: FileSystem;\n\n constructor(rootDir: string, baseUrl?: string) {\n this.rootDir = rootDir;\n this.baseUrl = baseUrl;\n this.fs = createFileSystem();\n }\n\n private getPath(id: string): string {\n // Partition by first 2 chars to avoid too many files in one dir\n const prefix = id.slice(0, 2);\n return join(this.rootDir, prefix, id);\n }\n\n private getMetadataPath(id: string): string {\n return this.getPath(id) + \".meta.json\";\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const id = options.id || crypto.randomUUID();\n const filePath = this.getPath(id);\n const metaPath = this.getMetadataPath(id);\n\n await this.fs.mkdir(dirname(filePath), { recursive: true });\n\n let size = 0;\n\n if (typeof data === \"string\") {\n await this.fs.writeTextFile(filePath, data);\n size = new TextEncoder().encode(data).length;\n } else if (data instanceof Uint8Array) {\n await this.fs.writeFile(filePath, data);\n size = data.length;\n } else if (data instanceof Blob) {\n const arr = new Uint8Array(await data.arrayBuffer());\n await this.fs.writeFile(filePath, arr);\n size = data.size;\n } else if (data instanceof ReadableStream) {\n // Normalize stream to bytes for cross-runtime compatibility\n const buffer = new Uint8Array(await new Response(data).arrayBuffer());\n await this.fs.writeFile(filePath, buffer);\n size = buffer.length;\n } else {\n throw new Error(\"Unsupported data type for LocalBlobStorage\");\n }\n\n const ref: BlobRef = {\n __kind: \"blob\",\n id,\n size,\n mimeType: options.mimeType || \"application/octet-stream\",\n createdAt: new Date(),\n expiresAt: options.ttl ? new Date(Date.now() + options.ttl * 1000) : undefined,\n metadata: options.metadata,\n url: this.baseUrl ? `${this.baseUrl}/${id}` : undefined,\n };\n\n await this.fs.writeTextFile(metaPath, JSON.stringify(ref));\n\n return ref;\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n try {\n const bytes = await this.getBytes(id);\n if (!bytes) return null;\n // Create a minimal cross-runtime ReadableStream from the bytes\n return new ReadableStream({\n start(controller) {\n controller.enqueue(bytes);\n controller.close();\n },\n });\n } catch {\n return null;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const filePath = this.getPath(id);\n try {\n return await this.fs.readTextFile(filePath);\n } catch {\n return null;\n }\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const filePath = this.getPath(id);\n try {\n return await this.fs.readFile(filePath);\n } catch {\n return null;\n }\n }\n\n async delete(id: string): Promise<void> {\n const filePath = this.getPath(id);\n const metaPath = this.getMetadataPath(id);\n try {\n await this.fs.remove(filePath);\n await this.fs.remove(metaPath);\n } catch {\n // Ignore if not found\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const filePath = this.getPath(id);\n return await this.fs.exists(filePath);\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const metaPath = this.getMetadataPath(id);\n try {\n const json = await this.fs.readTextFile(metaPath);\n const data = JSON.parse(json);\n return {\n ...data,\n createdAt: new Date(data.createdAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n };\n } catch {\n return null;\n }\n }\n\n /**\n * Cleans up all expired blobs from storage.\n * This method should typically be run periodically by an external process.\n */\n async cleanupExpiredBlobs(): Promise<void> {\n // Iterate over prefixes (00-ff)\n for (let i = 0; i < 256; i++) {\n const prefix = i.toString(16).padStart(2, \"0\");\n const prefixDir = join(this.rootDir, prefix);\n try {\n for await (const entry of this.fs.readDir(prefixDir)) {\n if (entry.isFile && entry.name.endsWith(\".meta.json\")) {\n const id = entry.name.replace(\".meta.json\", \"\");\n const blobRef = await this.stat(id);\n if (blobRef && blobRef.expiresAt && blobRef.expiresAt < new Date()) {\n logger.debug(`[LocalBlobStorage] Deleting expired blob: ${id}`);\n await this.delete(id);\n }\n }\n }\n } catch (_e) {\n // Directory not found is fine, skip\n continue;\n }\n }\n }\n}\n", "/**\n * S3 Blob Storage\n *\n * Stores blobs in AWS S3.\n *\n * NOTE: This module uses dynamic imports for @aws-sdk/client-s3 to avoid\n * requiring the AWS SDK as a mandatory dependency. The SDK is only loaded\n * when S3BlobStorage is instantiated.\n */\n\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\nimport { isDeno } from \"@veryfront/platform/compat/runtime.ts\";\n\n// Type definitions for AWS SDK (to avoid top-level import)\ntype S3ClientType = import(\"@aws-sdk/client-s3\").S3Client;\ntype PutObjectCommandType = import(\"@aws-sdk/client-s3\").PutObjectCommand;\ntype GetObjectCommandType = import(\"@aws-sdk/client-s3\").GetObjectCommand;\ntype DeleteObjectCommandType = import(\"@aws-sdk/client-s3\").DeleteObjectCommand;\ntype HeadObjectCommandType = import(\"@aws-sdk/client-s3\").HeadObjectCommand;\ntype CreateBucketCommandType = import(\"@aws-sdk/client-s3\").CreateBucketCommand;\n\n// Cached module reference for lazy loading\nlet s3Module: typeof import(\"@aws-sdk/client-s3\") | null = null;\n\n/**\n * Dynamically import the AWS SDK (lazy loading)\n * This allows the module to be loaded without requiring @aws-sdk/client-s3 to be installed\n * unless S3BlobStorage is actually used.\n */\nasync function getS3Module(): Promise<typeof import(\"@aws-sdk/client-s3\")> {\n if (s3Module) {\n return s3Module;\n }\n\n try {\n // Try Deno's esm.sh import first (for Deno runtime)\n if (isDeno) {\n s3Module = await import(\"https://esm.sh/@aws-sdk/client-s3@3.490.0\");\n } else {\n // For Node.js runtime, use bare specifier\n s3Module = await import(\"@aws-sdk/client-s3\");\n }\n return s3Module;\n } catch (error) {\n throw new Error(\n `Failed to load @aws-sdk/client-s3. Please install it: npm install @aws-sdk/client-s3\\n` +\n `Original error: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n}\n\nexport interface S3BlobStorageConfig {\n /** AWS Region */\n region: string;\n /** S3 Bucket name */\n bucket: string;\n /** AWS Access Key ID */\n accessKeyId: string;\n /** AWS Secret Access Key */\n secretAccessKey: string;\n /** Optional S3 endpoint (for localstack or compatible storage) */\n endpoint?: string;\n /** Force path style URLs (required for MinIO/Localstack) */\n forcePathStyle?: boolean;\n /** Key prefix for namespacing blobs */\n prefix?: string;\n /** Base URL for constructing public URLs (if bucket is public) */\n baseUrl?: string;\n /** Default TTL for blobs in seconds */\n defaultTtl?: number;\n /** Automatically create the bucket if it does not exist (useful for local development) */\n autoCreateBucket?: boolean;\n}\n\nexport class S3BlobStorage implements BlobStorage {\n private client: S3ClientType | null = null;\n private config: S3BlobStorageConfig;\n private initPromise: Promise<void> | null = null;\n\n constructor(config: S3BlobStorageConfig) {\n this.config = config;\n // Trigger initialization (but don't await in constructor)\n this.initPromise = this.initialize();\n }\n\n /**\n * Initialize the S3 client asynchronously\n */\n private async initialize(): Promise<void> {\n const { S3Client } = await getS3Module();\n this.client = new S3Client({\n region: this.config.region,\n credentials: {\n accessKeyId: this.config.accessKeyId,\n secretAccessKey: this.config.secretAccessKey,\n },\n endpoint: this.config.endpoint,\n forcePathStyle: this.config.forcePathStyle,\n });\n }\n\n /**\n * Ensure the S3 client is initialized before use\n */\n private async ensureInitialized(): Promise<S3ClientType> {\n if (this.initPromise) {\n await this.initPromise;\n this.initPromise = null;\n }\n if (!this.client) {\n throw new Error(\"S3BlobStorage: Client failed to initialize\");\n }\n return this.client;\n }\n\n private getKey(id: string): string {\n return this.config.prefix ? `${this.config.prefix}${id}` : id;\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const client = await this.ensureInitialized();\n const { PutObjectCommand, CreateBucketCommand, HeadObjectCommand } = await getS3Module();\n\n const id = options.id || crypto.randomUUID();\n const key = this.getKey(id);\n const mimeType = options.mimeType || \"application/octet-stream\";\n const createdAt = new Date();\n const ttl = options.ttl ?? this.config.defaultTtl;\n const expiresAt = ttl ? new Date(createdAt.getTime() + ttl * 1000) : undefined;\n\n let body: string | Uint8Array | Blob | ReadableStream;\n let contentLength: number | undefined;\n\n if (typeof data === \"string\") {\n body = new TextEncoder().encode(data);\n contentLength = body.byteLength;\n } else if (data instanceof Uint8Array) {\n body = data;\n contentLength = data.byteLength;\n } else if (data instanceof Blob) {\n body = data;\n contentLength = data.size;\n } else if (data instanceof ReadableStream) {\n // For ReadableStream, S3 PutObjectCommand can directly accept it.\n // Content-Length is often required for streams, but sometimes S3 can infer it.\n // If it consistently fails, we might need to buffer the stream or require content-length in options.\n body = data;\n // Cannot determine contentLength easily from ReadableStream without consuming it.\n // If backend requires, user must provide via options.\n } else {\n throw new Error(\"Unsupported data type for S3BlobStorage\");\n }\n\n const putCommand = new PutObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n Body: body,\n ContentType: mimeType,\n ContentLength: contentLength, // Pass if known\n Expires: expiresAt, // S3 uses Expires header for HTTP caches, not lifecycle rules directly\n Metadata: options.metadata, // Custom metadata\n });\n\n try {\n await client.send(putCommand);\n } catch (e: any) {\n if (e.name === \"NoSuchBucket\" && this.config.autoCreateBucket) {\n // Bucket doesn't exist, try to create it\n try {\n await client.send(new CreateBucketCommand({ Bucket: this.config.bucket }));\n // Retry the put operation\n await client.send(putCommand);\n } catch (createError) {\n // If creation fails (e.g., race condition), throw the original error or the new one\n logger.error(\"Failed to auto-create bucket:\", createError);\n throw e;\n }\n } else {\n throw e;\n }\n }\n\n // S3 does not return size directly on PutObject. We can do a HeadObject to get it.\n // Or, for simplicity, use the contentLength we determined or was passed.\n let size = contentLength || 0; // Fallback if stream length is unknown\n if (size === 0) {\n try {\n const headCommand = new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n const headResult = await client.send(headCommand);\n size = headResult.ContentLength || 0;\n } catch (e) {\n logger.warn(`Could not get size for S3 blob ${key} after put:`, e);\n }\n }\n\n return {\n __kind: \"blob\",\n id,\n size,\n mimeType,\n createdAt,\n expiresAt,\n metadata: options.metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : undefined,\n };\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n const client = await this.ensureInitialized();\n const { GetObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n const getCommand = new GetObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n const response = await client.send(getCommand);\n if (response.Body) {\n // The S3 SDK returns an AsyncIterable (which is also a ReadableStream in Deno)\n return response.Body as ReadableStream;\n }\n return null;\n } catch (e) {\n if (e instanceof Error && e.name === \"NoSuchKey\") {\n return null;\n }\n throw e;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n // @ts-ignore - Deno's ReadableStream vs Web ReadableStream type mismatch\n const response = new Response(stream);\n return await response.text();\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n // @ts-ignore - Deno's ReadableStream vs Web ReadableStream type mismatch\n const response = new Response(stream);\n const buffer = await response.arrayBuffer();\n return new Uint8Array(buffer);\n }\n\n async delete(id: string): Promise<void> {\n const client = await this.ensureInitialized();\n const { DeleteObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n const deleteCommand = new DeleteObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n try {\n await client.send(deleteCommand);\n } catch (e) {\n if (e instanceof Error && e.name === \"NoSuchKey\") {\n // Ignore if trying to delete a non-existent key\n return;\n }\n throw e;\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const client = await this.ensureInitialized();\n const { HeadObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n await client.send(\n new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n }),\n );\n return true;\n } catch (e) {\n if (e instanceof Error && e.name === \"NotFound\") {\n return false;\n }\n throw e;\n }\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const client = await this.ensureInitialized();\n const { HeadObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n const headResult = await client.send(\n new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n }),\n );\n\n if (!headResult.LastModified) return null; // Should always be present for existing objects\n\n // Custom metadata is returned as all lowercase keys by S3\n const metadata: Record<string, string> = {};\n const rawMetadata = headResult.Metadata as Record<string, string> | undefined;\n for (const [k, v] of Object.entries(rawMetadata || {})) {\n if (v != null) {\n metadata[k] = v;\n }\n }\n\n let expiresAt: Date | undefined;\n if (headResult.Expires) {\n expiresAt = new Date(headResult.Expires);\n } else if (headResult.Metadata && headResult.Metadata[\"expiresat\"]) {\n // Check for custom expiresAt if stored in metadata\n expiresAt = new Date(headResult.Metadata[\"expiresat\"]!);\n }\n\n // S3 Lifecycle rules or object TTLs are not exposed directly via HeadObject.\n // If `options.ttl` was used in `put`, that TTL is not natively handled by S3 `Expires` header\n // for object lifecycle management (it's for caching).\n // To support TTL, user must configure S3 bucket lifecycle rules separately based on object tags/prefix\n // OR we store expiresAt in metadata and rely on cleanup logic (if any) or user to manage.\n // For now, we only populate expiresAt if S3 provides an Expires header (HTTP caching).\n\n return {\n __kind: \"blob\",\n id,\n size: headResult.ContentLength || 0,\n mimeType: headResult.ContentType || \"application/octet-stream\",\n createdAt: headResult.LastModified,\n expiresAt: expiresAt,\n metadata: metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : undefined,\n };\n } catch (e) {\n if (e instanceof Error && e.name === \"NotFound\") {\n return null;\n }\n throw e;\n }\n }\n}\n", "/**\n * Google Cloud Storage Blob Storage\n *\n * Stores blobs in Google Cloud Storage.\n */\n\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\n\nexport interface GCSBlobStorageConfig {\n /** Google Cloud Project ID */\n projectId: string;\n /** GCS Bucket name */\n bucket: string;\n /** Google Cloud Service Account Key (JSON string) */\n serviceAccountKey: string;\n /** Key prefix for namespacing blobs */\n prefix?: string;\n /** Base URL for constructing public URLs (if bucket is public) */\n baseUrl?: string;\n /** Default TTL for blobs in seconds */\n defaultTtl?: number;\n}\n\nexport class GCSBlobStorage implements BlobStorage {\n private config: GCSBlobStorageConfig;\n private tokenCache: { accessToken: string; expiresAt: Date } | null = null;\n\n constructor(config: GCSBlobStorageConfig) {\n this.config = config;\n try {\n JSON.parse(this.config.serviceAccountKey);\n } catch {\n throw new Error(\"GCSBlobStorage: serviceAccountKey must be a valid JSON string.\");\n }\n }\n\n private getKey(id: string): string {\n return this.config.prefix ? `${this.config.prefix}${id}` : id;\n }\n\n private async getAccessToken(): Promise<string> {\n if (this.tokenCache && this.tokenCache.expiresAt > new Date()) {\n return this.tokenCache.accessToken;\n }\n\n const sa = JSON.parse(this.config.serviceAccountKey);\n const tokenEndpoint = \"https://oauth2.googleapis.com/token\";\n const scope = \"https://www.googleapis.com/auth/devstorage.full_control\";\n\n const now = Date.now();\n const jwtHeader = btoa(JSON.stringify({ alg: \"RS256\", typ: \"JWT\" }));\n const jwtClaimSet = btoa(JSON.stringify({\n iss: sa.client_email,\n scope: scope,\n aud: tokenEndpoint,\n exp: Math.floor(now / 1000) + 3600, // 1 hour expiration\n iat: Math.floor(now / 1000),\n }));\n\n // This part requires a proper JWT signing library.\n // Deno's native crypto.subtle can sign, but creating the RS256 private key from PKCS8 (PEM)\n // is non-trivial without a dedicated library.\n // For a quick implementation, we will use a placeholder or assume a pre-signed JWT.\n // In a real-world Deno project, you'd use `djwt` or a similar library.\n console.warn(\n \"[GCSBlobStorage] JWT signing for service account requires a library like `djwt`. \" +\n \"Proceeding with a placeholder/manual approach, which is not suitable for production.\",\n );\n\n // Placeholder for actual JWT signing\n const signature = \"PLACEHOLDER_SIGNATURE\";\n const jwt = `${jwtHeader}.${jwtClaimSet}.${signature}`;\n\n // This is a simplified approach, a real implementation would correctly sign the JWT\n // and handle key loading from the service account JSON.\n\n const response = await fetch(tokenEndpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n },\n body: new URLSearchParams({\n grant_type: \"urn:ietf:params:oauth:grant-type:jwt-bearer\",\n assertion: jwt,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to get GCS access token: ${response.status} - ${error}`);\n }\n\n const data = await response.json();\n const accessToken = data.access_token;\n const expiresIn = data.expires_in; // in seconds\n\n this.tokenCache = {\n accessToken,\n expiresAt: new Date(Date.now() + (expiresIn - 60) * 1000), // Refresh 1 min before actual expiry\n };\n\n return accessToken;\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const id = options.id || crypto.randomUUID();\n const key = this.getKey(id);\n const mimeType = options.mimeType || \"application/octet-stream\";\n const createdAt = new Date();\n const ttl = options.ttl ?? this.config.defaultTtl;\n const expiresAt = ttl ? new Date(createdAt.getTime() + ttl * 1000) : undefined;\n\n let body: string | Uint8Array | ReadableStream | Blob;\n let contentLength: number | undefined;\n\n if (typeof data === \"string\") {\n body = new TextEncoder().encode(data);\n contentLength = body.byteLength;\n } else if (data instanceof Uint8Array) {\n body = data;\n contentLength = data.byteLength;\n } else if (data instanceof Blob) {\n body = data;\n contentLength = data.size;\n } else if (data instanceof ReadableStream) {\n body = data;\n // ContentLength cannot be easily determined for ReadableStream without consuming it.\n // GCS can handle chunked uploads without Content-Length, but specifying it is better.\n } else {\n throw new Error(\"Unsupported data type for GCSBlobStorage\");\n }\n\n const token = await this.getAccessToken();\n const uploadUrl =\n `https://storage.googleapis.com/upload/storage/v1/b/${this.config.bucket}/o?uploadType=media&name=${key}`;\n\n const headers: Record<string, string> = {\n \"Authorization\": `Bearer ${token}`,\n \"Content-Type\": mimeType,\n };\n if (contentLength !== undefined) {\n headers[\"Content-Length\"] = String(contentLength);\n }\n\n // Add custom metadata. GCS accepts x-goog-meta- prefix.\n const gcsMetadata: Record<string, string> = {};\n if (options.metadata) {\n for (const [k, v] of Object.entries(options.metadata)) {\n gcsMetadata[`x-goog-meta-${k.toLowerCase()}`] = v;\n }\n }\n if (expiresAt) {\n // Store expiresAt in metadata for stat retrieval, GCS native TTL is via object lifecycle rules\n gcsMetadata[\"x-goog-meta-expiresat\"] = expiresAt.toISOString();\n }\n Object.assign(headers, gcsMetadata);\n\n const response = await fetch(uploadUrl, {\n method: \"POST\",\n headers,\n body: body,\n });\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to upload to GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n const gcsObject = await response.json();\n\n return {\n __kind: \"blob\",\n id,\n size: Number(gcsObject.size),\n mimeType: gcsObject.contentType,\n createdAt: new Date(gcsObject.timeCreated),\n expiresAt: expiresAt, // Derived from TTL passed or default\n metadata: options.metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : gcsObject.mediaLink, // mediaLink is the direct download URL\n };\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const downloadUrl =\n `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}?alt=media`;\n\n try {\n const response = await fetch(downloadUrl, {\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n return null;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to download from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n return response.body; // Deno's fetch body is a ReadableStream\n } catch (e) {\n console.error(\"GCS getStream error:\", e);\n throw e;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n const reader = stream.getReader();\n let text = \"\";\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n text += new TextDecoder().decode(value);\n }\n return text;\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n const chunks: Uint8Array[] = [];\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n }\n const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n }\n\n async delete(id: string): Promise<void> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const deleteUrl = `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}`;\n\n const response = await fetch(deleteUrl, {\n method: \"DELETE\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n // Object not found, consider it deleted\n return;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to delete from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const getUrl =\n `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}?fields=id`;\n\n const response = await fetch(getUrl, {\n method: \"GET\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 200) {\n return true;\n }\n if (response.status === 404) {\n return false;\n }\n const errorBody = await response.text();\n throw new Error(\n `Failed to check existence in GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const getUrl = `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}`;\n\n const response = await fetch(getUrl, {\n method: \"GET\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n return null;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to get metadata from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n const gcsObject = await response.json();\n\n // Custom metadata is stored with `x-goog-meta-` prefix and is all lowercase\n const metadata: Record<string, string> = {};\n if (gcsObject.metadata) {\n for (const [k, v] of Object.entries(gcsObject.metadata as Record<string, string>)) {\n if (k.startsWith(\"x-goog-meta-\")) {\n metadata[k.replace(\"x-goog-meta-\", \"\")] = v;\n } else {\n metadata[k] = v;\n }\n }\n }\n\n let expiresAt: Date | undefined;\n if (metadata[\"expiresat\"]) {\n expiresAt = new Date(metadata[\"expiresat\"]!); // Retrieve custom expiresAt from metadata\n }\n\n return {\n __kind: \"blob\",\n id,\n size: Number(gcsObject.size),\n mimeType: gcsObject.contentType,\n createdAt: new Date(gcsObject.timeCreated),\n expiresAt: expiresAt, // Populated from custom metadata if available\n metadata: metadata,\n url: gcsObject.mediaLink, // mediaLink is the direct download URL\n };\n }\n}\n", "/**\n * Workflow Backend Interface\n *\n * Defines the contract for workflow persistence and execution backends.\n * Implementations can be:\n * - MemoryBackend (development)\n * - RedisBackend (production)\n * - TemporalAdapter (enterprise)\n * - InngestAdapter (serverless)\n * - CloudflareAdapter (edge)\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n WorkflowStatus as _WorkflowStatus,\n} from \"../types.ts\";\n\n/**\n * Backend configuration options\n */\nexport interface BackendConfig {\n /** Connection URL (for Redis, Postgres, etc.) */\n url?: string;\n /** Key prefix for namespacing */\n prefix?: string;\n /** Default TTL for runs (in milliseconds) */\n defaultTtl?: number;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Lock information for distributed execution\n */\nexport interface Lock {\n /** Lock identifier */\n lockId: string;\n /** Run ID that owns the lock */\n runId: string;\n /** When lock was acquired */\n acquiredAt: Date;\n /** When lock expires */\n expiresAt: Date;\n}\n\n/**\n * Workflow backend interface\n *\n * All backend implementations must implement this interface.\n * Optional methods (marked with ?) can be omitted for simpler backends.\n */\nexport interface WorkflowBackend {\n // =========================================================================\n // Run Management\n // =========================================================================\n\n /**\n * Create a new workflow run\n */\n createRun(run: WorkflowRun): Promise<void>;\n\n /**\n * Get a workflow run by ID\n */\n getRun(runId: string): Promise<WorkflowRun | null>;\n\n /**\n * Update a workflow run\n */\n updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void>;\n\n /**\n * Delete a workflow run\n */\n deleteRun?(runId: string): Promise<void>;\n\n /**\n * List workflow runs with optional filters\n */\n listRuns(filter: RunFilter): Promise<WorkflowRun[]>;\n\n /**\n * Count workflow runs matching filter\n */\n countRuns?(filter: RunFilter): Promise<number>;\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n /**\n * Save a checkpoint for a workflow run\n */\n saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void>;\n\n /**\n * Get the latest checkpoint for a workflow run\n */\n getLatestCheckpoint(runId: string): Promise<Checkpoint | null>;\n\n /**\n * Get all checkpoints for a workflow run\n */\n getCheckpoints?(runId: string): Promise<Checkpoint[]>;\n\n /**\n * Delete a specific checkpoint\n */\n deleteCheckpoint?(runId: string, checkpointId: string): Promise<void>;\n\n /**\n * Delete multiple checkpoints by ID\n */\n deleteCheckpoints?(runId: string, checkpointIds: string[]): Promise<void>;\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n /**\n * Save a pending approval request\n */\n savePendingApproval(\n runId: string,\n approval: PendingApproval,\n ): Promise<void>;\n\n /**\n * Get all pending approvals for a workflow run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]>;\n\n /**\n * Get a specific pending approval\n */\n getPendingApproval?(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null>;\n\n /**\n * Update an approval with a decision\n */\n updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void>;\n\n /**\n * List all pending approvals across workflows\n */\n listPendingApprovals?(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>>;\n\n // =========================================================================\n // Queue Operations (optional - for distributed execution)\n // =========================================================================\n\n /**\n * Enqueue a workflow job for processing\n */\n enqueue?(job: WorkflowJob): Promise<void>;\n\n /**\n * Dequeue the next workflow job\n */\n dequeue?(): Promise<WorkflowJob | null>;\n\n /**\n * Acknowledge job completion\n */\n acknowledge?(runId: string): Promise<void>;\n\n /**\n * Negative acknowledge - return job to queue\n */\n nack?(runId: string): Promise<void>;\n\n // =========================================================================\n // Distributed Locking (optional - for distributed execution)\n // =========================================================================\n\n /**\n * Acquire a lock for exclusive workflow execution\n * Returns true if lock was acquired, false if already locked\n */\n acquireLock?(runId: string, duration: number): Promise<boolean>;\n\n /**\n * Release a lock\n */\n releaseLock?(runId: string): Promise<void>;\n\n /**\n * Extend lock duration\n */\n extendLock?(runId: string, duration: number): Promise<boolean>;\n\n /**\n * Check if a lock is held\n */\n isLocked?(runId: string): Promise<boolean>;\n\n // =========================================================================\n // Events (optional - for event-driven workflows)\n // =========================================================================\n\n /**\n * Publish an event that waiting workflows can receive\n */\n publishEvent?(\n eventName: string,\n payload: unknown,\n options?: {\n runId?: string; // Target specific run\n workflowId?: string; // Target specific workflow type\n },\n ): Promise<void>;\n\n /**\n * Subscribe to events for a workflow run\n * Returns an async iterator of events\n */\n subscribeEvents?(runId: string): AsyncIterable<{\n eventName: string;\n payload: unknown;\n timestamp: Date;\n }>;\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n /**\n * Initialize the backend (connect to database, etc.)\n */\n initialize?(): Promise<void>;\n\n /**\n * Check if the backend is healthy\n */\n healthCheck?(): Promise<boolean>;\n\n /**\n * Cleanup and close connections\n */\n destroy(): Promise<void>;\n}\n\n/**\n * Backend with queue capabilities\n * Type guard for checking if backend supports queueing\n */\nexport function hasQueueSupport(\n backend: WorkflowBackend,\n): backend is\n & WorkflowBackend\n & Required<Pick<WorkflowBackend, \"enqueue\" | \"dequeue\" | \"acknowledge\">> {\n return (\n typeof backend.enqueue === \"function\" &&\n typeof backend.dequeue === \"function\" &&\n typeof backend.acknowledge === \"function\"\n );\n}\n\n/**\n * Backend with locking capabilities\n * Type guard for checking if backend supports distributed locking\n */\nexport function hasLockSupport(\n backend: WorkflowBackend,\n): backend is WorkflowBackend & Required<Pick<WorkflowBackend, \"acquireLock\" | \"releaseLock\">> {\n return (\n typeof backend.acquireLock === \"function\" &&\n typeof backend.releaseLock === \"function\"\n );\n}\n\n/**\n * Backend with event capabilities\n * Type guard for checking if backend supports events\n */\nexport function hasEventSupport(\n backend: WorkflowBackend,\n): backend is\n & WorkflowBackend\n & Required<Pick<WorkflowBackend, \"publishEvent\" | \"subscribeEvents\">> {\n return (\n typeof backend.publishEvent === \"function\" &&\n typeof backend.subscribeEvents === \"function\"\n );\n}\n", "/**\n * Memory Workflow Backend\n *\n * In-memory implementation of WorkflowBackend for development and testing.\n * Data is NOT persisted across restarts.\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Memory backend configuration\n */\nexport interface MemoryBackendConfig extends BackendConfig {\n /** Maximum queue size (default: 10000) */\n maxQueueSize?: number;\n}\n\n/** Default max queue size */\nconst DEFAULT_MAX_QUEUE_SIZE = 10000;\n\n/**\n * In-memory workflow backend\n *\n * @example\n * ```typescript\n * import { MemoryBackend } from 'veryfront/ai/workflow/backends/memory';\n *\n * const backend = new MemoryBackend();\n * ```\n */\nexport class MemoryBackend implements WorkflowBackend {\n private runs = new Map<string, WorkflowRun>();\n private checkpoints = new Map<string, Checkpoint[]>();\n private approvals = new Map<string, PendingApproval[]>();\n private queue: WorkflowJob[] = [];\n private locks = new Map<string, { lockId: string; expiresAt: number }>();\n private config: MemoryBackendConfig;\n\n constructor(config: MemoryBackendConfig = {}) {\n this.config = {\n prefix: \"wf:\",\n debug: false,\n maxQueueSize: DEFAULT_MAX_QUEUE_SIZE,\n ...config,\n };\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n createRun(run: WorkflowRun): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Creating run: ${run.id}`);\n }\n this.runs.set(run.id, structuredClone(run));\n return Promise.resolve();\n }\n\n getRun(runId: string): Promise<WorkflowRun | null> {\n const run = this.runs.get(runId);\n return Promise.resolve(run ? structuredClone(run) : null);\n }\n\n updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void> {\n const run = this.runs.get(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Updating run: ${runId}`, patch);\n }\n\n // Deep merge the patch\n const updated = {\n ...run,\n ...patch,\n // Deep merge specific fields\n nodeStates: { ...run.nodeStates, ...patch.nodeStates },\n context: { ...run.context, ...patch.context },\n };\n\n this.runs.set(runId, updated);\n return Promise.resolve();\n }\n\n deleteRun(runId: string): Promise<void> {\n this.runs.delete(runId);\n this.checkpoints.delete(runId);\n this.approvals.delete(runId);\n return Promise.resolve();\n }\n\n listRuns(filter: RunFilter): Promise<WorkflowRun[]> {\n let runs = Array.from(this.runs.values());\n\n // Apply filters\n if (filter.workflowId) {\n runs = runs.filter((r) => r.workflowId === filter.workflowId);\n }\n\n if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n runs = runs.filter((r) => statuses.includes(r.status));\n }\n\n if (filter.createdAfter) {\n runs = runs.filter((r) => r.createdAt >= filter.createdAfter!);\n }\n\n if (filter.createdBefore) {\n runs = runs.filter((r) => r.createdAt <= filter.createdBefore!);\n }\n\n // Sort by creation date (newest first)\n runs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());\n\n // Apply pagination (offset and limit together)\n const start = filter.offset ?? 0;\n const end = filter.limit ? start + filter.limit : undefined;\n runs = runs.slice(start, end);\n\n return Promise.resolve(runs.map((r) => structuredClone(r)));\n }\n\n async countRuns(filter: RunFilter): Promise<number> {\n const runs = await this.listRuns({ ...filter, limit: undefined, offset: undefined });\n return runs.length;\n }\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Saving checkpoint: ${checkpoint.id} for run ${runId}`);\n }\n\n const existing = this.checkpoints.get(runId) || [];\n existing.push(structuredClone(checkpoint));\n this.checkpoints.set(runId, existing);\n return Promise.resolve();\n }\n\n getLatestCheckpoint(runId: string): Promise<Checkpoint | null> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints || checkpoints.length === 0) {\n return Promise.resolve(null);\n }\n\n // Return the most recent checkpoint\n const latest = checkpoints[checkpoints.length - 1];\n return Promise.resolve(latest ? structuredClone(latest) : null);\n }\n\n getCheckpoints(runId: string): Promise<Checkpoint[]> {\n const checkpoints = this.checkpoints.get(runId) || [];\n return Promise.resolve(checkpoints.map((c) => structuredClone(c)));\n }\n\n deleteCheckpoint(runId: string, checkpointId: string): Promise<void> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints) {\n return Promise.resolve();\n }\n\n const index = checkpoints.findIndex((c) => c.id === checkpointId);\n if (index !== -1) {\n checkpoints.splice(index, 1);\n if (this.config.debug) {\n console.log(`[MemoryBackend] Deleted checkpoint: ${checkpointId}`);\n }\n }\n return Promise.resolve();\n }\n\n deleteCheckpoints(runId: string, checkpointIds: string[]): Promise<void> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints) {\n return Promise.resolve();\n }\n\n const idsToDelete = new Set(checkpointIds);\n const filtered = checkpoints.filter((c) => !idsToDelete.has(c.id));\n this.checkpoints.set(runId, filtered);\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Deleted ${checkpointIds.length} checkpoints`);\n }\n return Promise.resolve();\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n savePendingApproval(\n runId: string,\n approval: PendingApproval,\n ): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Saving approval: ${approval.id} for run ${runId}`);\n }\n\n const existing = this.approvals.get(runId) || [];\n existing.push(structuredClone(approval));\n this.approvals.set(runId, existing);\n return Promise.resolve();\n }\n\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n const approvals = this.approvals.get(runId) || [];\n return Promise.resolve(\n approvals\n .filter((a) => a.status === \"pending\")\n .map((a) => structuredClone(a)),\n );\n }\n\n getPendingApproval(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null> {\n const approvals = this.approvals.get(runId) || [];\n const approval = approvals.find((a) => a.id === approvalId);\n return Promise.resolve(approval ? structuredClone(approval) : null);\n }\n\n updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n const approvals = this.approvals.get(runId);\n if (!approvals) {\n throw new Error(`No approvals found for run: ${runId}`);\n }\n\n const approval = approvals.find((a) => a.id === approvalId);\n if (!approval) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Updating approval: ${approvalId}`, decision);\n }\n\n approval.status = decision.approved ? \"approved\" : \"rejected\";\n approval.decidedBy = decision.approver;\n approval.decidedAt = new Date();\n approval.comment = decision.comment;\n return Promise.resolve();\n }\n\n listPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n const result: Array<{ runId: string; approval: PendingApproval }> = [];\n\n for (const [runId, approvals] of this.approvals) {\n const run = this.runs.get(runId);\n if (!run) continue;\n\n if (filter?.workflowId && run.workflowId !== filter.workflowId) {\n continue;\n }\n\n for (const approval of approvals) {\n // Check status\n if (filter?.status === \"pending\" && approval.status !== \"pending\") {\n continue;\n }\n\n if (filter?.status === \"expired\") {\n const isExpired = approval.expiresAt && new Date() > approval.expiresAt;\n if (!isExpired) continue;\n }\n\n // Check approver\n if (\n filter?.approver &&\n approval.approvers &&\n !approval.approvers.includes(filter.approver)\n ) {\n continue;\n }\n\n result.push({ runId, approval: structuredClone(approval) });\n }\n }\n\n return Promise.resolve(result);\n }\n\n // =========================================================================\n // Queue Operations\n // =========================================================================\n\n enqueue(job: WorkflowJob): Promise<void> {\n // Check queue size limit\n const maxSize = this.config.maxQueueSize ?? DEFAULT_MAX_QUEUE_SIZE;\n if (this.queue.length >= maxSize) {\n return Promise.reject(\n new Error(`Queue full (max: ${maxSize}). Cannot enqueue job: ${job.runId}`),\n );\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Enqueueing job: ${job.runId}`);\n }\n\n // Insert based on priority (higher priority first)\n const priority = job.priority ?? 0;\n const insertIndex = this.queue.findIndex((j) => (j.priority ?? 0) < priority);\n\n if (insertIndex === -1) {\n this.queue.push(structuredClone(job));\n } else {\n this.queue.splice(insertIndex, 0, structuredClone(job));\n }\n return Promise.resolve();\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n const job = this.queue.shift();\n return Promise.resolve(job ? structuredClone(job) : null);\n }\n\n acknowledge(runId: string): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Acknowledging job: ${runId}`);\n }\n // For memory backend, acknowledgment is a no-op\n // The job is already removed from queue on dequeue\n return Promise.resolve();\n }\n\n async nack(runId: string): Promise<void> {\n // Re-enqueue the job\n const run = await this.getRun(runId);\n if (run) {\n await this.enqueue({\n runId: run.id,\n workflowId: run.workflowId,\n input: run.input,\n createdAt: new Date(),\n });\n }\n }\n\n // =========================================================================\n // Distributed Locking\n // =========================================================================\n\n acquireLock(runId: string, duration: number): Promise<boolean> {\n const existing = this.locks.get(runId);\n const now = Date.now();\n\n // If lock exists and hasn't expired, fail to acquire\n if (existing && existing.expiresAt > now) {\n return Promise.resolve(false);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Acquiring lock for: ${runId}`);\n }\n\n this.locks.set(runId, {\n lockId: crypto.randomUUID(),\n expiresAt: now + duration,\n });\n\n return Promise.resolve(true);\n }\n\n releaseLock(runId: string): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Releasing lock for: ${runId}`);\n }\n this.locks.delete(runId);\n return Promise.resolve();\n }\n\n extendLock(runId: string, duration: number): Promise<boolean> {\n const existing = this.locks.get(runId);\n const now = Date.now();\n\n if (!existing || existing.expiresAt <= now) {\n return Promise.resolve(false);\n }\n\n existing.expiresAt = now + duration;\n return Promise.resolve(true);\n }\n\n isLocked(runId: string): Promise<boolean> {\n const existing = this.locks.get(runId);\n return Promise.resolve(!!existing && existing.expiresAt > Date.now());\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n initialize(): Promise<void> {\n if (this.config.debug) {\n console.log(\"[MemoryBackend] Initialized\");\n }\n return Promise.resolve();\n }\n\n healthCheck(): Promise<boolean> {\n return Promise.resolve(true);\n }\n\n destroy(): Promise<void> {\n this.runs.clear();\n this.checkpoints.clear();\n this.approvals.clear();\n this.queue = [];\n this.locks.clear();\n\n if (this.config.debug) {\n console.log(\"[MemoryBackend] Destroyed\");\n }\n return Promise.resolve();\n }\n\n // =========================================================================\n // Development Helpers\n // =========================================================================\n\n /**\n * Get statistics about the backend (for debugging)\n */\n getStats(): {\n runs: number;\n checkpoints: number;\n approvals: number;\n queueLength: number;\n locks: number;\n } {\n let totalCheckpoints = 0;\n let totalApprovals = 0;\n\n for (const checkpoints of this.checkpoints.values()) {\n totalCheckpoints += checkpoints.length;\n }\n\n for (const approvals of this.approvals.values()) {\n totalApprovals += approvals.length;\n }\n\n return {\n runs: this.runs.size,\n checkpoints: totalCheckpoints,\n approvals: totalApprovals,\n queueLength: this.queue.length,\n locks: this.locks.size,\n };\n }\n\n /**\n * Clear all data (for testing)\n */\n clear(): Promise<void> {\n this.runs.clear();\n this.checkpoints.clear();\n this.approvals.clear();\n this.queue = [];\n this.locks.clear();\n return Promise.resolve();\n }\n}\n", "/**\n * Redis Workflow Backend\n *\n * Production-grade Redis implementation of WorkflowBackend.\n * Uses Redis hashes for state storage and Redis Streams for job queuing.\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\nimport { isDeno } from \"@veryfront/platform/compat/runtime.ts\";\n\n// Lazy-loaded Redis client modules (loaded only when Redis backend is used)\n// @ts-ignore - Deno global\nlet DenoRedis: any = null;\nlet NodeRedis: any = null;\n\n/**\n * Lazily load the Redis module for the current runtime.\n * This ensures the redis package is only required when the Redis backend is actually used.\n *\n * NOTE: We construct module names dynamically to prevent Deno's static analyzer\n * from pre-fetching these optional dependencies during lint/check tasks.\n */\nasync function getRedisModule(): Promise<{ DenoRedis: any; NodeRedis: any }> {\n // Return cached modules if already loaded\n if (DenoRedis || NodeRedis) {\n return { DenoRedis, NodeRedis };\n }\n\n if (isDeno) {\n try {\n // Construct URL dynamically to prevent static analysis from pre-fetching\n const denoRedisUrl = [\"https://deno.land/x/redis\", \"@v0.32.1/mod.ts\"].join(\"\");\n // @ts-ignore - Deno global\n DenoRedis = await import(denoRedisUrl);\n } catch (error) {\n throw new Error(\n `Failed to load Deno Redis module. Error: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n }\n } else {\n try {\n // Construct module name dynamically to prevent Deno static analyzer\n // from trying to resolve this npm package during lint/check\n const redisModuleName = [\"re\", \"dis\"].join(\"\");\n NodeRedis = await import(redisModuleName);\n } catch (error) {\n throw new Error(\n `Failed to load 'redis' package. Please install it with: npm install redis\\n` +\n `Error: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n return { DenoRedis, NodeRedis };\n}\n\n/**\n * Standardized Redis Adapter Interface\n * Normalizes differences between Deno and Node Redis clients\n */\nexport interface RedisAdapter {\n // Hash operations\n hset(key: string, fields: Record<string, string>): Promise<number | string>;\n hgetall(key: string): Promise<Record<string, string>>;\n hdel(key: string, ...fields: string[]): Promise<number>;\n del(...keys: string[]): Promise<number>;\n\n // Set operations (for indexing)\n sadd(key: string, ...members: string[]): Promise<number>;\n srem(key: string, ...members: string[]): Promise<number>;\n smembers(key: string): Promise<string[]>;\n\n // List operations (for checkpoints)\n rpush(key: string, ...values: string[]): Promise<number>;\n lrange(key: string, start: number, stop: number): Promise<string[]>;\n lindex(key: string, index: number): Promise<string | null>;\n lset(key: string, index: number, value: string): Promise<string | \"OK\">;\n llen(key: string): Promise<number>;\n\n // Stream operations\n xadd(key: string, id: string, fields: Record<string, string>): Promise<string>;\n xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string>;\n xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>>;\n xack(key: string, group: string, ...ids: string[]): Promise<number>;\n\n // Key operations\n keys(pattern: string): Promise<string[]>;\n exists(...keys: string[]): Promise<number>;\n expire(key: string, seconds: number): Promise<number>;\n\n // Lock operations (using SET with NX and PX)\n set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null>;\n get(key: string): Promise<string | null>;\n\n // Connection\n quit(): Promise<void>;\n disconnect(): Promise<void>;\n}\n\n// Helper to convert array [k1, v1, k2, v2] to object\nfunction arrayToObject(arr: string[]): Record<string, string> {\n const obj: Record<string, string> = {};\n for (let i = 0; i < arr.length; i += 2) {\n const key = arr[i];\n const value = arr[i + 1];\n if (key && value !== undefined) {\n obj[key] = value;\n }\n }\n return obj;\n}\n\n/**\n * Adapter for Node.js 'redis' package\n */\nclass NodeRedisAdapter implements RedisAdapter {\n constructor(private client: any) {}\n\n async hset(key: string, fields: Record<string, string>): Promise<number | string> {\n return await this.client.hSet(key, fields);\n }\n\n async hgetall(key: string): Promise<Record<string, string>> {\n return await this.client.hGetAll(key);\n }\n\n async hdel(key: string, ...fields: string[]): Promise<number> {\n return await this.client.hDel(key, fields);\n }\n\n async del(...keys: string[]): Promise<number> {\n return await this.client.del(keys);\n }\n\n async sadd(key: string, ...members: string[]): Promise<number> {\n return await this.client.sAdd(key, members);\n }\n\n async srem(key: string, ...members: string[]): Promise<number> {\n return await this.client.sRem(key, members);\n }\n\n async smembers(key: string): Promise<string[]> {\n return await this.client.sMembers(key);\n }\n\n async rpush(key: string, ...values: string[]): Promise<number> {\n return await this.client.rPush(key, values);\n }\n\n async lrange(key: string, start: number, stop: number): Promise<string[]> {\n return await this.client.lRange(key, start, stop);\n }\n\n async lindex(key: string, index: number): Promise<string | null> {\n return await this.client.lIndex(key, index);\n }\n\n async lset(key: string, index: number, value: string): Promise<string | \"OK\"> {\n return await this.client.lSet(key, index, value);\n }\n\n async llen(key: string): Promise<number> {\n return await this.client.lLen(key);\n }\n\n async xadd(key: string, id: string, fields: Record<string, string>): Promise<string> {\n return await this.client.xAdd(key, id, fields);\n }\n\n async xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string> {\n return await this.client.xGroupCreate(key, group, id, { MKSTREAM: mkstream });\n }\n\n async xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<\n Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>\n > {\n // Node redis format: { key: string, messages: Array<{ id: string, message: Record<string, string> }> }\n // OR if single stream: Array<{ id: string, message: Record<string, string> }> ??\n // The node-redis v4 API is slightly different.\n // Assuming commandOptions style:\n const result = await this.client.xReadGroup(\n options.group,\n options.consumer,\n streams.map((s) => ({ key: s.key, id: s.xid })),\n {\n BLOCK: options.block,\n COUNT: options.count,\n },\n );\n\n if (!result) return [];\n\n // Normalize output\n // node-redis v4 returns: Array<{ name: string, messages: Array<{ id: string, message: Record<string, string> }> }>\n return (result as any[]).map((stream: any) => ({\n key: stream.name,\n messages: stream.messages.map((msg: any) => ({\n id: msg.id,\n data: msg.message,\n })),\n }));\n }\n\n async xack(key: string, group: string, ...ids: string[]): Promise<number> {\n return await this.client.xAck(key, group, ids);\n }\n\n async keys(pattern: string): Promise<string[]> {\n return await this.client.keys(pattern);\n }\n\n async exists(...keys: string[]): Promise<number> {\n return await this.client.exists(keys);\n }\n\n async expire(key: string, seconds: number): Promise<number> {\n return await this.client.expire(key, seconds);\n }\n\n async set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null> {\n const opts: any = {};\n if (options?.nx) opts.NX = true;\n if (options?.px) opts.PX = options.px;\n if (options?.ex) opts.EX = options.ex;\n return await this.client.set(key, value, opts);\n }\n\n async get(key: string): Promise<string | null> {\n return await this.client.get(key);\n }\n\n async quit(): Promise<void> {\n await this.client.quit();\n }\n\n async disconnect(): Promise<void> {\n await this.client.disconnect();\n }\n}\n\n/**\n * Adapter for Deno 'redis' module\n */\nclass DenoRedisAdapter implements RedisAdapter {\n constructor(private client: any) {}\n\n async hset(key: string, fields: Record<string, string>): Promise<number | string> {\n return await this.client.hset(key, fields);\n }\n\n async hgetall(key: string): Promise<Record<string, string>> {\n const res = await this.client.hgetall(key);\n // Deno redis returns array [k1, v1, k2, v2]\n return arrayToObject(res);\n }\n\n async hdel(key: string, ...fields: string[]): Promise<number> {\n return await this.client.hdel(key, ...fields);\n }\n\n async del(...keys: string[]): Promise<number> {\n return await this.client.del(...keys);\n }\n\n async sadd(key: string, ...members: string[]): Promise<number> {\n return await this.client.sadd(key, ...members);\n }\n\n async srem(key: string, ...members: string[]): Promise<number> {\n return await this.client.srem(key, ...members);\n }\n\n async smembers(key: string): Promise<string[]> {\n return await this.client.smembers(key);\n }\n\n async rpush(key: string, ...values: string[]): Promise<number> {\n return await this.client.rpush(key, ...values);\n }\n\n async lrange(key: string, start: number, stop: number): Promise<string[]> {\n return await this.client.lrange(key, start, stop);\n }\n\n async lindex(key: string, index: number): Promise<string | null> {\n return await this.client.lindex(key, index);\n }\n\n async lset(key: string, index: number, value: string): Promise<string | \"OK\"> {\n return await this.client.lset(key, index, value);\n }\n\n async llen(key: string): Promise<number> {\n return await this.client.llen(key);\n }\n\n async xadd(key: string, id: string, fields: Record<string, string>): Promise<string> {\n return await this.client.xadd(key, id, fields);\n }\n\n async xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string> {\n return await this.client.xgroupCreate(key, group, id, mkstream);\n }\n\n async xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<\n Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>\n > {\n if (streams.length === 0) return [];\n\n // Deno redis returns: Array<{ key: string, messages: Array<{ id: string, fieldValues: string[] }> }>\n const res = await this.client.xreadgroup(\n streams.map((s) => ({ key: s.key, xid: s.xid })),\n options,\n );\n\n if (!res) return [];\n\n return (res as any[]).map((stream: any) => ({\n key: stream.key,\n messages: stream.messages.map((msg: any) => ({\n id: msg.id,\n data: arrayToObject(msg.fieldValues),\n })),\n }));\n }\n\n async xack(key: string, group: string, ...ids: string[]): Promise<number> {\n return await this.client.xack(key, group, ...ids);\n }\n\n async keys(pattern: string): Promise<string[]> {\n return await this.client.keys(pattern);\n }\n\n async exists(...keys: string[]): Promise<number> {\n return await this.client.exists(...keys);\n }\n\n async expire(key: string, seconds: number): Promise<number> {\n return await this.client.expire(key, seconds);\n }\n\n async set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null> {\n return await this.client.set(key, value, options);\n }\n\n async get(key: string): Promise<string | null> {\n return await this.client.get(key);\n }\n\n async quit(): Promise<void> {\n await this.client.close(); // Deno redis uses close\n }\n\n async disconnect(): Promise<void> {\n await this.client.close();\n }\n}\n\n/**\n * Redis backend configuration\n */\nexport interface RedisBackendConfig extends BackendConfig {\n /** Redis connection URL or config */\n url?: string;\n /** Redis hostname */\n hostname?: string;\n /** Redis port */\n port?: number;\n /** Key prefix for namespacing */\n prefix?: string;\n /** Stream name for job queue */\n streamKey?: string;\n /** Consumer group name */\n groupName?: string;\n /** Consumer name (unique per worker) */\n consumerName?: string;\n /** Default TTL for runs (in seconds) */\n runTtl?: number;\n /** Enable debug logging */\n debug?: boolean;\n /** Existing Redis client (optional) */\n client?: RedisAdapter;\n}\n\n/**\n * Redis Workflow Backend\n */\nexport class RedisBackend implements WorkflowBackend {\n private client: RedisAdapter | null = null;\n private connectionPromise: Promise<RedisAdapter> | null = null;\n private config:\n & Required<\n Pick<RedisBackendConfig, \"prefix\" | \"streamKey\" | \"groupName\" | \"consumerName\" | \"debug\">\n >\n & RedisBackendConfig;\n private initialized = false;\n\n constructor(config: RedisBackendConfig = {}) {\n this.config = {\n prefix: \"vf:workflow:\",\n streamKey: \"vf:workflow:stream\",\n groupName: \"vf:workflow:workers\",\n consumerName: `worker-${crypto.randomUUID().slice(0, 8)}`,\n debug: false,\n ...config,\n };\n\n // Use provided client if available\n if (config.client) {\n this.client = config.client;\n }\n }\n\n // =========================================================================\n // Key Generation\n // =========================================================================\n\n private runKey(runId: string): string {\n return `${this.config.prefix}run:${runId}`;\n }\n\n private checkpointsKey(runId: string): string {\n return `${this.config.prefix}checkpoints:${runId}`;\n }\n\n private approvalsKey(runId: string): string {\n return `${this.config.prefix}approvals:${runId}`;\n }\n\n private statusIndexKey(status: WorkflowStatus): string {\n return `${this.config.prefix}index:status:${status}`;\n }\n\n private workflowIndexKey(workflowId: string): string {\n return `${this.config.prefix}index:workflow:${workflowId}`;\n }\n\n private lockKey(runId: string): string {\n return `${this.config.prefix}lock:${runId}`;\n }\n\n // =========================================================================\n // Serialization\n // =========================================================================\n\n private serializeRun(run: WorkflowRun): Record<string, string> {\n return {\n id: run.id,\n workflowId: run.workflowId,\n version: run.version || \"\",\n status: run.status,\n input: JSON.stringify(run.input),\n output: run.output !== undefined ? JSON.stringify(run.output) : \"\",\n nodeStates: JSON.stringify(run.nodeStates),\n currentNodes: JSON.stringify(run.currentNodes),\n context: JSON.stringify(run.context),\n error: run.error ? JSON.stringify(run.error) : \"\",\n createdAt: run.createdAt.toISOString(),\n startedAt: run.startedAt?.toISOString() || \"\",\n completedAt: run.completedAt?.toISOString() || \"\",\n };\n }\n\n private deserializeRun(data: Record<string, string>): WorkflowRun {\n // Validate required fields\n if (!data.id) {\n throw new Error(\"Invalid workflow run data: missing 'id' field\");\n }\n if (!data.workflowId) {\n throw new Error(`Invalid workflow run data for run \"${data.id}\": missing 'workflowId' field`);\n }\n\n // Validate status is a known value\n const validStatuses: WorkflowStatus[] = [\n \"pending\",\n \"running\",\n \"completed\",\n \"failed\",\n \"cancelled\",\n \"waiting\",\n ];\n const status = data.status as WorkflowStatus;\n if (data.status && !validStatuses.includes(status)) {\n throw new Error(\n `Invalid workflow run data for run \"${data.id}\": unknown status \"${data.status}\". ` +\n `Expected one of: ${validStatuses.join(\", \")}`,\n );\n }\n\n // Safely parse JSON fields with error context\n const safeJsonParse = <T>(field: string, value: string | undefined, defaultValue: T): T => {\n if (!value) return defaultValue;\n try {\n return JSON.parse(value) as T;\n } catch (e) {\n throw new Error(\n `Invalid workflow run data for run \"${data.id}\": failed to parse '${field}' as JSON. ` +\n `Error: ${e instanceof Error ? e.message : String(e)}`,\n );\n }\n };\n\n return {\n id: data.id,\n workflowId: data.workflowId,\n version: data.version || undefined,\n status: status ?? \"pending\",\n input: safeJsonParse(\"input\", data.input, undefined),\n output: safeJsonParse(\"output\", data.output, undefined),\n nodeStates: safeJsonParse(\"nodeStates\", data.nodeStates, {}),\n currentNodes: safeJsonParse(\"currentNodes\", data.currentNodes, []),\n context: safeJsonParse(\"context\", data.context, { input: undefined }),\n checkpoints: [], // Loaded separately\n pendingApprovals: [], // Loaded separately\n error: safeJsonParse(\"error\", data.error, undefined),\n createdAt: data.createdAt ? new Date(data.createdAt) : new Date(),\n startedAt: data.startedAt ? new Date(data.startedAt) : undefined,\n completedAt: data.completedAt ? new Date(data.completedAt) : undefined,\n };\n }\n\n // =========================================================================\n // Connection Management\n // =========================================================================\n\n private ensureClient(): Promise<RedisAdapter> {\n // Return existing client if available\n if (this.client) {\n return Promise.resolve(this.client);\n }\n\n // Use existing connection promise to prevent race conditions\n // Multiple concurrent calls will share the same connection promise\n if (!this.connectionPromise) {\n this.connectionPromise = this.createConnection();\n }\n\n return this.connectionPromise;\n }\n\n /**\n * Create a new Redis connection\n */\n private async createConnection(): Promise<RedisAdapter> {\n // Lazily load the Redis module for the current runtime\n const { DenoRedis: denoRedis, NodeRedis: nodeRedis } = await getRedisModule();\n\n if (nodeRedis) {\n const client = nodeRedis.createClient({\n url: this.config.url,\n socket: {\n host: this.config.hostname,\n port: this.config.port,\n },\n });\n await client.connect();\n this.client = new NodeRedisAdapter(client);\n } else if (denoRedis) {\n const client = await denoRedis.connect({\n hostname: this.config.hostname,\n port: this.config.port,\n });\n this.client = new DenoRedisAdapter(client);\n } else {\n throw new Error(\"No Redis client available for this runtime.\");\n }\n\n const hostname = this.config.hostname || \"127.0.0.1\";\n const port = this.config.port || 6379;\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Connecting to ${hostname}:${port}`);\n }\n\n // Ensure client is not null for TS\n return this.client!;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n const client = await this.ensureClient();\n\n // Create consumer group for stream\n try {\n await client.xgroupCreate(\n this.config.streamKey,\n this.config.groupName,\n \"0\",\n true,\n );\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Created consumer group: ${this.config.groupName}`);\n }\n } catch (e) {\n const msg = String(e instanceof Error ? e.message : e);\n if (!msg.includes(\"BUSYGROUP\")) {\n logger.error(\"[RedisBackend] Error creating consumer group:\", e);\n }\n }\n\n this.initialized = true;\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n async createRun(run: WorkflowRun): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Creating run: ${run.id}`);\n }\n\n // Store run in hash\n await client.hset(this.runKey(run.id), this.serializeRun(run));\n\n // Add to indexes\n await client.sadd(this.statusIndexKey(run.status), run.id);\n await client.sadd(this.workflowIndexKey(run.workflowId), run.id);\n\n // Set TTL if configured\n if (this.config.runTtl) {\n await client.expire(this.runKey(run.id), this.config.runTtl);\n }\n }\n\n async getRun(runId: string): Promise<WorkflowRun | null> {\n const client = await this.ensureClient();\n const data = await client.hgetall(this.runKey(runId));\n\n if (!data || Object.keys(data).length === 0) {\n return null;\n }\n\n const run = this.deserializeRun(data);\n\n // Load approvals\n run.pendingApprovals = await this.getPendingApprovals(runId);\n\n return run;\n }\n\n async updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Updating run: ${runId}`);\n }\n\n // Get current status for index update\n const currentRun = await this.getRun(runId);\n const oldStatus = currentRun?.status;\n\n // Build fields to update\n const fields: Record<string, string> = {};\n\n if (patch.status !== undefined) fields.status = patch.status;\n if (patch.output !== undefined) fields.output = JSON.stringify(patch.output);\n if (patch.nodeStates !== undefined) fields.nodeStates = JSON.stringify(patch.nodeStates);\n if (patch.currentNodes !== undefined) fields.currentNodes = JSON.stringify(patch.currentNodes);\n if (patch.context !== undefined) fields.context = JSON.stringify(patch.context);\n if (patch.error !== undefined) fields.error = JSON.stringify(patch.error);\n if (patch.startedAt !== undefined) fields.startedAt = patch.startedAt.toISOString();\n if (patch.completedAt !== undefined) fields.completedAt = patch.completedAt.toISOString();\n\n if (Object.keys(fields).length > 0) {\n await client.hset(this.runKey(runId), fields);\n }\n\n // Update status index\n if (patch.status && oldStatus && patch.status !== oldStatus) {\n await client.srem(this.statusIndexKey(oldStatus), runId);\n await client.sadd(this.statusIndexKey(patch.status), runId);\n }\n }\n\n async deleteRun(runId: string): Promise<void> {\n const client = await this.ensureClient();\n\n // Get run for index cleanup\n const run = await this.getRun(runId);\n if (!run) return;\n\n // Delete run data\n await client.del(\n this.runKey(runId),\n this.checkpointsKey(runId),\n this.approvalsKey(runId),\n );\n\n // Remove from indexes\n await client.srem(this.statusIndexKey(run.status), runId);\n await client.srem(this.workflowIndexKey(run.workflowId), runId);\n }\n\n async listRuns(filter: RunFilter): Promise<WorkflowRun[]> {\n const client = await this.ensureClient();\n let runIds: string[] = [];\n\n // Get run IDs from indexes\n if (filter.workflowId) {\n runIds = await client.smembers(this.workflowIndexKey(filter.workflowId));\n } else if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n for (const status of statuses) {\n const ids = await client.smembers(this.statusIndexKey(status));\n runIds.push(...ids);\n }\n // Deduplicate\n runIds = [...new Set(runIds)];\n } else {\n // Get all runs (expensive - should use cursor in production)\n const keys = await client.keys(`${this.config.prefix}run:*`);\n runIds = keys.map((k) => k.replace(`${this.config.prefix}run:`, \"\"));\n }\n\n // Load runs\n const runs: WorkflowRun[] = [];\n for (const runId of runIds) {\n const run = await this.getRun(runId);\n if (!run) continue;\n\n // Apply filters\n if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n if (!statuses.includes(run.status)) continue;\n }\n\n if (filter.createdAfter && run.createdAt < filter.createdAfter) continue;\n if (filter.createdBefore && run.createdAt > filter.createdBefore) continue;\n\n runs.push(run);\n }\n\n // Sort by creation date (newest first)\n runs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());\n\n // Apply pagination\n let result = runs;\n if (filter.offset) {\n result = result.slice(filter.offset);\n }\n if (filter.limit) {\n result = result.slice(0, filter.limit);\n }\n\n return result;\n }\n\n async countRuns(filter: RunFilter): Promise<number> {\n const runs = await this.listRuns({ ...filter, limit: undefined, offset: undefined });\n return runs.length;\n }\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n async saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Saving checkpoint: ${checkpoint.id}`);\n }\n\n const serialized = JSON.stringify({\n ...checkpoint,\n timestamp: checkpoint.timestamp.toISOString(),\n });\n\n await client.rpush(this.checkpointsKey(runId), serialized);\n }\n\n async getLatestCheckpoint(runId: string): Promise<Checkpoint | null> {\n const client = await this.ensureClient();\n\n // Get last element\n const raw = await client.lindex(this.checkpointsKey(runId), -1);\n if (!raw) return null;\n\n const data = JSON.parse(raw);\n return {\n ...data,\n timestamp: new Date(data.timestamp),\n };\n }\n\n async getCheckpoints(runId: string): Promise<Checkpoint[]> {\n const client = await this.ensureClient();\n\n const rawList = await client.lrange(this.checkpointsKey(runId), 0, -1);\n\n return rawList.map((raw) => {\n const data = JSON.parse(raw);\n return {\n ...data,\n timestamp: new Date(data.timestamp),\n };\n });\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n async savePendingApproval(runId: string, approval: PendingApproval): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Saving approval: ${approval.id}`);\n }\n\n const serialized = JSON.stringify({\n ...approval,\n requestedAt: approval.requestedAt.toISOString(),\n expiresAt: approval.expiresAt?.toISOString(),\n decidedAt: approval.decidedAt?.toISOString(),\n });\n\n await client.rpush(this.approvalsKey(runId), serialized);\n }\n\n async getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n const client = await this.ensureClient();\n\n const rawList = await client.lrange(this.approvalsKey(runId), 0, -1);\n\n return rawList\n .map((raw) => {\n const data = JSON.parse(raw);\n return {\n ...data,\n requestedAt: new Date(data.requestedAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n decidedAt: data.decidedAt ? new Date(data.decidedAt) : undefined,\n } as PendingApproval;\n })\n .filter((a) => a.status === \"pending\");\n }\n\n async getPendingApproval(runId: string, approvalId: string): Promise<PendingApproval | null> {\n const approvals = await this.getPendingApprovals(runId);\n return approvals.find((a) => a.id === approvalId) || null;\n }\n\n async updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n const client = await this.ensureClient();\n const key = this.approvalsKey(runId);\n\n // Get all approvals to find the index\n const rawList = await client.lrange(key, 0, -1);\n\n // Find the index of the approval to update\n let targetIndex = -1;\n for (let i = 0; i < rawList.length; i++) {\n const data = JSON.parse(rawList[i]!);\n if (data.id === approvalId) {\n targetIndex = i;\n break;\n }\n }\n\n if (targetIndex === -1) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n // Parse and update the approval data\n const data = JSON.parse(rawList[targetIndex]!);\n data.status = decision.approved ? \"approved\" : \"rejected\";\n data.decidedBy = decision.approver;\n data.decidedAt = new Date().toISOString();\n data.comment = decision.comment;\n\n // Use LSET to atomically update the specific index\n // This is more atomic than del + rpush as it only modifies one element\n await client.lset(key, targetIndex, JSON.stringify(data));\n }\n\n async listPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n const client = await this.ensureClient();\n const result: Array<{ runId: string; approval: PendingApproval }> = [];\n\n // Get all approval keys\n const keys = await client.keys(`${this.config.prefix}approvals:*`);\n\n for (const key of keys) {\n const runId = key.replace(`${this.config.prefix}approvals:`, \"\");\n\n // Check workflow filter\n if (filter?.workflowId) {\n const run = await this.getRun(runId);\n if (!run || run.workflowId !== filter.workflowId) continue;\n }\n\n const rawList = await client.lrange(key, 0, -1);\n\n for (const raw of rawList) {\n const data = JSON.parse(raw);\n const approval: PendingApproval = {\n ...data,\n requestedAt: new Date(data.requestedAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n decidedAt: data.decidedAt ? new Date(data.decidedAt) : undefined,\n };\n\n // Check status filter\n if (filter?.status === \"pending\" && approval.status !== \"pending\") continue;\n if (filter?.status === \"expired\") {\n const isExpired = approval.expiresAt && new Date() > approval.expiresAt;\n if (!isExpired) continue;\n }\n\n // Check approver filter\n if (\n filter?.approver && approval.approvers && !approval.approvers.includes(filter.approver)\n ) {\n continue;\n }\n\n result.push({ runId, approval });\n }\n }\n\n return result;\n }\n\n // =========================================================================\n // Queue Operations\n // =========================================================================\n\n async enqueue(job: WorkflowJob): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Enqueueing job: ${job.runId}`);\n }\n\n await client.xadd(this.config.streamKey, \"*\", {\n runId: job.runId,\n workflowId: job.workflowId,\n input: JSON.stringify(job.input),\n priority: String(job.priority || 0),\n createdAt: job.createdAt.toISOString(),\n });\n }\n\n async dequeue(): Promise<WorkflowJob | null> {\n const client = await this.ensureClient();\n\n const streams = await client.xreadgroup(\n [{ key: this.config.streamKey, xid: \">\" }],\n {\n group: this.config.groupName,\n consumer: this.config.consumerName,\n block: 5000, // 5 second timeout\n count: 1,\n },\n );\n\n if (!streams || streams.length === 0) {\n return null;\n }\n\n // Now streams is strongly typed due to Adapter\n const stream = streams[0];\n if (!stream || !stream.messages || stream.messages.length === 0) {\n return null;\n }\n\n const message = stream.messages[0];\n if (!message) {\n return null;\n }\n\n const data = message.data;\n\n return {\n runId: data.runId ?? \"\",\n workflowId: data.workflowId ?? \"\",\n input: data.input ? JSON.parse(data.input) : undefined,\n priority: data.priority ? parseInt(data.priority) : undefined,\n createdAt: data.createdAt ? new Date(data.createdAt) : new Date(),\n };\n }\n\n acknowledge(runId: string): Promise<void> {\n // Note: In a full implementation, we'd need to track the message ID\n // For now, this is a placeholder\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Acknowledged: ${runId}`);\n }\n return Promise.resolve();\n }\n\n async nack(runId: string): Promise<void> {\n // Re-enqueue the job\n const run = await this.getRun(runId);\n if (run) {\n await this.enqueue({\n runId: run.id,\n workflowId: run.workflowId,\n input: run.input,\n createdAt: new Date(),\n });\n }\n }\n\n // =========================================================================\n // Distributed Locking\n // =========================================================================\n\n async acquireLock(runId: string, duration: number): Promise<boolean> {\n const client = await this.ensureClient();\n const lockValue = crypto.randomUUID();\n\n const result = await client.set(this.lockKey(runId), lockValue, {\n nx: true,\n px: duration,\n });\n\n return result === \"OK\";\n }\n\n async releaseLock(runId: string): Promise<void> {\n const client = await this.ensureClient();\n await client.del(this.lockKey(runId));\n }\n\n async extendLock(runId: string, duration: number): Promise<boolean> {\n const client = await this.ensureClient();\n const exists = await client.exists(this.lockKey(runId));\n\n if (exists === 0) return false;\n\n await client.expire(this.lockKey(runId), Math.ceil(duration / 1000));\n return true;\n }\n\n async isLocked(runId: string): Promise<boolean> {\n const client = await this.ensureClient();\n const exists = await client.exists(this.lockKey(runId));\n return exists > 0;\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n async healthCheck(): Promise<boolean> {\n try {\n const client = await this.ensureClient();\n await client.set(\"__health_check__\", \"ok\", { ex: 1 });\n return true;\n } catch {\n return false;\n }\n }\n\n destroy(): Promise<void> {\n if (this.client) {\n if (typeof this.client.quit === \"function\") {\n this.client.quit();\n } else if (typeof this.client.disconnect === \"function\") {\n this.client.disconnect();\n }\n this.client = null;\n }\n this.connectionPromise = null;\n this.initialized = false;\n\n if (this.config.debug) {\n logger.debug(\"[RedisBackend] Destroyed\");\n }\n return Promise.resolve();\n }\n}\n", "/**\n * DAG Executor\n *\n * Executes workflow DAGs with proper dependency ordering and parallel execution\n */\n\nimport type {\n BranchNodeConfig,\n Checkpoint,\n MapNodeConfig,\n NodeState,\n ParallelNodeConfig,\n SubWorkflowNodeConfig,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n WorkflowNodeConfig,\n WorkflowRun,\n} from \"../types.ts\";\nimport { generateId } from \"../types.ts\";\nimport type { StepExecutor } from \"./step-executor.ts\";\nimport type { CheckpointManager } from \"./checkpoint-manager.ts\";\n\n/**\n * DAG executor configuration\n */\nexport interface DAGExecutorConfig {\n /** Step executor for running individual steps */\n stepExecutor: StepExecutor;\n /** Checkpoint manager for durability */\n checkpointManager?: CheckpointManager;\n /** Maximum concurrent parallel executions */\n maxConcurrency?: number;\n /** Callback when node execution starts */\n onNodeStart?: (nodeId: string) => void;\n /** Callback when node execution completes */\n onNodeComplete?: (nodeId: string, state: NodeState) => void;\n /** Callback when waiting for approval/event */\n onWaiting?: (nodeId: string, waitConfig: WaitNodeConfig) => void;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Result of DAG execution\n */\nexport interface DAGExecutionResult {\n /** Whether the DAG completed successfully */\n completed: boolean;\n /** Whether the DAG is waiting (for approval/event) */\n waiting: boolean;\n /** Node that is waiting (if waiting) */\n waitingNode?: string;\n /** Final context after execution */\n context: WorkflowContext;\n /** Final node states */\n nodeStates: Record<string, NodeState>;\n /** Error if failed */\n error?: string;\n}\n\n/**\n * DAG Executor class\n *\n * Responsible for executing workflow DAGs with:\n * - Topological ordering for dependencies\n * - Parallel execution of independent nodes\n * - Support for branching and conditional logic\n * - Checkpointing for durability\n */\nexport class DAGExecutor {\n private config: DAGExecutorConfig;\n\n constructor(config: DAGExecutorConfig) {\n this.config = {\n maxConcurrency: 10,\n debug: false,\n ...config,\n };\n }\n\n /**\n * Execute a workflow DAG\n */\n async execute(\n nodes: WorkflowNode[],\n run: WorkflowRun,\n startFromNode?: string,\n ): Promise<DAGExecutionResult> {\n const context = { ...run.context };\n const nodeStates = { ...run.nodeStates };\n\n // Build dependency graph\n const { adjList, inDegree, nodeMap } = this.buildGraph(nodes);\n\n // Update in-degrees for nodes whose dependencies are already completed\n // This handles resuming from checkpoints\n for (const [nodeId, state] of Object.entries(nodeStates)) {\n if (state.status === \"completed\" || state.status === \"skipped\") {\n // Decrement in-degree for all dependents of this completed node\n for (const dependent of adjList.get(nodeId) || []) {\n const currentDegree = inDegree.get(dependent) ?? 0;\n if (currentDegree > 0) {\n inDegree.set(dependent, currentDegree - 1);\n }\n }\n }\n }\n\n // Validate DAG (no cycles)\n if (this.hasCycle(nodes, adjList)) {\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: \"Workflow DAG contains cycles\",\n };\n }\n\n // Find starting nodes\n let ready: string[];\n if (startFromNode) {\n // Resume from specific node\n ready = [startFromNode];\n } else {\n // Start from nodes with no dependencies that haven't been completed\n ready = this.getReadyNodes(inDegree, nodeStates);\n }\n\n // Execute nodes in topological order\n while (ready.length > 0) {\n // Execute ready nodes in parallel (respecting max concurrency)\n const batch = ready.slice(0, this.config.maxConcurrency);\n ready = ready.slice(this.config.maxConcurrency);\n\n const results = await Promise.allSettled(\n batch.map((nodeId) => this.executeNode(nodeMap.get(nodeId)!, context, nodeStates)),\n );\n\n // Process results\n for (let i = 0; i < batch.length; i++) {\n const nodeId = batch[i]!;\n const result = results[i]!;\n\n if (result.status === \"fulfilled\") {\n const nodeResult = result.value;\n\n // Update node state\n nodeStates[nodeId] = nodeResult.state;\n Object.assign(context, nodeResult.contextUpdates);\n\n // Handle waiting state\n if (nodeResult.waiting) {\n return {\n completed: false,\n waiting: true,\n waitingNode: nodeId,\n context,\n nodeStates,\n };\n }\n\n // Checkpoint if configured\n const nodeConfig = nodeMap.get(nodeId);\n if (\n nodeResult.state.status === \"completed\" &&\n nodeConfig && this.shouldCheckpoint(nodeConfig)\n ) {\n await this.checkpoint(run.id, nodeId, context, nodeStates);\n }\n\n // Check if node failed (step returned success: false)\n if (nodeResult.state.status === \"failed\") {\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: `Node \"${nodeId}\" failed: ${nodeResult.state.error || \"Unknown error\"}`,\n };\n }\n\n // Update ready nodes based on completed dependencies\n if (nodeResult.state.status === \"completed\" || nodeResult.state.status === \"skipped\") {\n for (const dependent of adjList.get(nodeId) || []) {\n const newDegree = inDegree.get(dependent)! - 1;\n inDegree.set(dependent, newDegree);\n }\n }\n } else {\n // Node execution failed\n const error = result.reason instanceof Error\n ? result.reason.message\n : String(result.reason);\n\n nodeStates[nodeId] = {\n nodeId,\n status: \"failed\",\n error,\n attempt: (nodeStates[nodeId]?.attempt || 0) + 1,\n completedAt: new Date(),\n };\n\n // Fail fast - don't continue with other nodes\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: `Node \"${nodeId}\" failed: ${error}`,\n };\n }\n }\n\n // Get newly ready nodes\n const newReady = this.getReadyNodes(inDegree, nodeStates);\n ready = [...ready, ...newReady];\n }\n\n return {\n completed: true,\n waiting: false,\n context,\n nodeStates,\n };\n }\n\n /**\n * Execute a single node\n */\n private async executeNode(\n node: WorkflowNode,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const nodeId = node.id;\n\n // Check if node is already completed (resuming from checkpoint)\n const existingState = nodeStates[nodeId];\n if (existingState?.status === \"completed\") {\n return { state: existingState, contextUpdates: {}, waiting: false };\n }\n\n this.config.onNodeStart?.(nodeId);\n\n // Check if should skip\n if (node.config.skip && (await node.config.skip(context))) {\n const state = this.config.stepExecutor.createSkippedState(nodeId);\n this.config.onNodeComplete?.(nodeId, state);\n return { state, contextUpdates: {}, waiting: false };\n }\n\n // Execute based on node type\n const config = node.config;\n\n switch (config.type) {\n case \"step\":\n return await this.executeStepNode(node, context);\n\n case \"parallel\":\n return await this.executeParallelNode(node, config, context, nodeStates);\n\n case \"map\":\n return await this.executeMapNode(node, config as MapNodeConfig, context, nodeStates);\n\n case \"branch\":\n return await this.executeBranchNode(node, config as BranchNodeConfig, context, nodeStates);\n\n case \"wait\":\n return await this.executeWaitNode(node, config as WaitNodeConfig, context);\n\n case \"subWorkflow\":\n return await this.executeSubWorkflowNode(\n node,\n config as SubWorkflowNodeConfig,\n context,\n nodeStates,\n );\n\n default:\n throw new Error(\n `Unknown node type \"${(config as WorkflowNodeConfig).type}\" for node \"${node.id}\". ` +\n `Valid types are: step, parallel, map, branch, wait, subWorkflow`,\n );\n }\n }\n\n /**\n * Execute a map node (dynamic fan-out)\n */\n private async executeMapNode(\n node: WorkflowNode,\n config: MapNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // 1. Resolve items collection\n const items = typeof config.items === \"function\" ? await config.items(context) : config.items;\n\n if (!Array.isArray(items)) {\n throw new Error(`Map node \"${node.id}\" items must be an array`);\n }\n\n if (items.length === 0) {\n // Empty collection, done immediately\n const state: NodeState = {\n nodeId: node.id,\n status: \"completed\",\n output: [],\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: new Date(),\n };\n return { state, contextUpdates: { [node.id]: [] }, waiting: false };\n }\n\n // 2. Generate child nodes for each item\n const childNodes: WorkflowNode[] = [];\n\n // Check if processor is a WorkflowDefinition or a single node\n const isWorkflowDef = (p: any): p is WorkflowDefinition => !!p.steps;\n\n // We'll map each item to a set of nodes\n // For simplicity in this implementation, if processor is a single node, we clone it.\n // If it's a workflow def, we'd need to expand it (similar to subworkflow).\n // Here we assume it's a single node structure for the \"map\" pattern or a simple chain.\n // To support complex subworkflows per item, best to wrap in a SubWorkflowNode.\n\n for (let i = 0; i < items.length; i++) {\n const item = items[i];\n const childId = `${node.id}_${i}`;\n\n let childNode: WorkflowNode;\n\n if (isWorkflowDef(config.processor)) {\n // Create a SubWorkflow node for this item\n childNode = {\n id: childId,\n config: {\n type: \"subWorkflow\",\n workflow: config.processor,\n input: item,\n retry: config.retry,\n checkpoint: false, // Don't checkpoint individual map items by default\n } as SubWorkflowNodeConfig,\n };\n } else {\n // Clone the single processor node\n // We must override the input to be the current item\n const processorConfig = { ...config.processor.config } as any;\n\n // If it's a step node, ensure input receives the item\n if (processorConfig.type === \"step\") {\n processorConfig.input = item;\n }\n\n childNode = {\n id: childId,\n config: processorConfig,\n };\n }\n\n childNodes.push(childNode);\n }\n\n // 3. Execute child nodes\n // We use a temporary DAG execution for these nodes\n // The maxConcurrency from config overrides default\n const originalConcurrency = this.config.maxConcurrency;\n if (config.concurrency) {\n this.config.maxConcurrency = config.concurrency;\n }\n\n try {\n const result = await this.execute(childNodes, {\n id: `${node.id}_map`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {}, // Start fresh for map iteration\n currentNodes: [],\n context: { ...context }, // Pass copy of context so they can read global state\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states into parent for visibility\n Object.assign(nodeStates, result.nodeStates);\n\n // Collect outputs in order\n const outputs = childNodes.map((child) => {\n const childState = result.nodeStates[child.id];\n return childState?.output;\n });\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: outputs,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.completed ? { [node.id]: outputs } : {},\n waiting: result.waiting,\n };\n } finally {\n // Restore concurrency setting\n this.config.maxConcurrency = originalConcurrency!;\n }\n }\n\n /**\n * Execute a sub-workflow node\n */\n private async executeSubWorkflowNode(\n node: WorkflowNode,\n config: SubWorkflowNodeConfig,\n context: WorkflowContext,\n _nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // 1. Resolve workflow definition\n let workflowDef: WorkflowDefinition;\n if (typeof config.workflow === \"string\") {\n throw new Error(\n \"Resolving workflow by ID is not yet supported in this execution context. Pass the WorkflowDefinition object.\",\n );\n } else {\n workflowDef = config.workflow;\n }\n\n // 2. Resolve input\n const input = typeof config.input === \"function\"\n ? await config.input(context)\n : (config.input ?? context.input);\n\n // 3. Expand steps (handle dynamic steps builder)\n let steps: WorkflowNode[];\n if (typeof workflowDef.steps === \"function\") {\n steps = workflowDef.steps({\n input,\n context,\n });\n } else {\n steps = workflowDef.steps;\n }\n\n // 4. Execute sub-workflow\n // We create a new isolated run context for the subworkflow\n const subRunId = `${node.id}_sub_${generateId()}`;\n\n // Execute recursively\n const result = await this.execute(steps, {\n id: subRunId,\n workflowId: workflowDef.id,\n status: \"running\",\n input,\n nodeStates: {},\n currentNodes: [],\n context: {\n input, // Subworkflow starts with fresh context scoped to its input\n // We do NOT inherit parent context to ensure isolation,\n // unless explicitly passed via input.\n },\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // 5. Process result\n let finalOutput = result.context; // Default output is the final context\n\n // If sub-workflow has explicit output transformation\n if (result.completed && config.output) {\n finalOutput = config.output(result.context) as any;\n }\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: finalOutput,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.completed ? { [node.id]: finalOutput } : {},\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a step node\n */\n private async executeStepNode(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const result = await this.config.stepExecutor.execute(node, context);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.success ? \"completed\" : \"failed\",\n input: context.input,\n output: result.output,\n error: result.error,\n attempt: 1,\n startedAt: new Date(Date.now() - result.executionTime),\n completedAt: new Date(),\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.success ? { [node.id]: result.output } : {},\n waiting: false,\n };\n }\n\n /**\n * Execute a parallel node\n */\n private async executeParallelNode(\n node: WorkflowNode,\n config: ParallelNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // Execute child nodes using DAG executor recursively\n const result = await this.execute(config.nodes, {\n id: `${node.id}_parallel`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {},\n currentNodes: [],\n context,\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states\n Object.assign(nodeStates, result.nodeStates);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: result.context,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.context,\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a branch node\n */\n private async executeBranchNode(\n node: WorkflowNode,\n config: BranchNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // Evaluate condition\n const conditionResult = await config.condition(context);\n\n // Select branch to execute\n const branchNodes = conditionResult ? config.then : (config.else || []);\n\n if (branchNodes.length === 0) {\n // No nodes to execute\n const state: NodeState = {\n nodeId: node.id,\n status: \"completed\",\n output: { branch: conditionResult ? \"then\" : \"else\", skipped: true },\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: new Date(),\n };\n\n return { state, contextUpdates: {}, waiting: false };\n }\n\n // Execute branch nodes\n const result = await this.execute(branchNodes, {\n id: `${node.id}_branch`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {},\n currentNodes: [],\n context,\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states\n Object.assign(nodeStates, result.nodeStates);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: {\n branch: conditionResult ? \"then\" : \"else\",\n result: result.context,\n },\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.context,\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a wait node (approval or event)\n */\n private async executeWaitNode(\n node: WorkflowNode,\n config: WaitNodeConfig,\n context: WorkflowContext,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n // Notify that we're waiting\n this.config.onWaiting?.(node.id, config);\n\n const state: NodeState = {\n nodeId: node.id,\n status: \"running\",\n input: {\n type: config.waitType,\n message: config.message,\n payload: typeof config.payload === \"function\"\n ? await config.payload(context)\n : config.payload,\n },\n attempt: 1,\n startedAt: new Date(),\n };\n\n // Signal that workflow is now waiting\n return {\n state,\n contextUpdates: {},\n waiting: true,\n };\n }\n\n /**\n * Build dependency graph from nodes\n */\n private buildGraph(nodes: WorkflowNode[]): {\n adjList: Map<string, string[]>;\n inDegree: Map<string, number>;\n nodeMap: Map<string, WorkflowNode>;\n } {\n const adjList = new Map<string, string[]>();\n const inDegree = new Map<string, number>();\n const nodeMap = new Map<string, WorkflowNode>();\n\n // Initialize\n for (const node of nodes) {\n adjList.set(node.id, []);\n inDegree.set(node.id, 0);\n nodeMap.set(node.id, node);\n }\n\n // Build edges from dependencies\n for (const node of nodes) {\n for (const dep of node.dependsOn || []) {\n if (!adjList.has(dep)) {\n throw new Error(\n `Node \"${node.id}\" depends on unknown node \"${dep}\"`,\n );\n }\n adjList.get(dep)!.push(node.id);\n inDegree.set(node.id, inDegree.get(node.id)! + 1);\n }\n }\n\n // Also handle implicit sequential dependencies (nodes without explicit deps)\n // If no dependencies specified (undefined), assume sequential order\n // If dependsOn is explicitly set (even to []), respect that choice\n let prevNodeId: string | null = null;\n for (const node of nodes) {\n // Only add implicit deps if:\n // 1. dependsOn is undefined (not explicitly set)\n // 2. No other node explicitly depends on this node\n // 3. This node has no incoming edges yet\n if (node.dependsOn === undefined && prevNodeId) {\n const isDependent = this.hasAnyDependents(nodes, node.id);\n const currentInDegree = inDegree.get(node.id) ?? 0;\n\n if (!isDependent && currentInDegree === 0) {\n // This node is \"floating\" - no explicit deps and nothing depends on it\n // Create implicit dependency on previous node\n adjList.get(prevNodeId)!.push(node.id);\n inDegree.set(node.id, inDegree.get(node.id)! + 1);\n }\n }\n prevNodeId = node.id;\n }\n\n return { adjList, inDegree, nodeMap };\n }\n\n /**\n * Check if any node explicitly depends on the given node\n */\n private hasAnyDependents(nodes: WorkflowNode[], nodeId: string): boolean {\n return nodes.some((n) => n.dependsOn?.includes(nodeId));\n }\n\n /**\n * Get nodes that are ready to execute\n */\n private getReadyNodes(\n inDegree: Map<string, number>,\n nodeStates: Record<string, NodeState>,\n ): string[] {\n const ready: string[] = [];\n\n for (const [nodeId, degree] of inDegree) {\n // Node is ready if:\n // 1. No remaining dependencies (in-degree = 0)\n // 2. Not already completed/running/failed\n const state = nodeStates[nodeId];\n const isReady = degree === 0 &&\n (!state || state.status === \"pending\");\n\n if (isReady) {\n ready.push(nodeId);\n }\n }\n\n return ready;\n }\n\n /**\n * Check if DAG has cycles (using DFS)\n */\n private hasCycle(\n nodes: WorkflowNode[],\n adjList: Map<string, string[]>,\n ): boolean {\n const visited = new Set<string>();\n const recursionStack = new Set<string>();\n\n const dfs = (nodeId: string): boolean => {\n visited.add(nodeId);\n recursionStack.add(nodeId);\n\n for (const neighbor of adjList.get(nodeId) || []) {\n if (!visited.has(neighbor)) {\n if (dfs(neighbor)) return true;\n } else if (recursionStack.has(neighbor)) {\n return true;\n }\n }\n\n recursionStack.delete(nodeId);\n return false;\n };\n\n for (const node of nodes) {\n if (!visited.has(node.id)) {\n if (dfs(node.id)) return true;\n }\n }\n\n return false;\n }\n\n /**\n * Check if node should be checkpointed\n */\n private shouldCheckpoint(node: WorkflowNode): boolean {\n return node.config.checkpoint ?? false;\n }\n\n /**\n * Create a checkpoint\n */\n private async checkpoint(\n runId: string,\n nodeId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n if (!this.config.checkpointManager) {\n return;\n }\n\n const checkpoint: Checkpoint = {\n id: generateId(\"cp\"),\n nodeId,\n timestamp: new Date(),\n context: structuredClone(context),\n nodeStates: structuredClone(nodeStates),\n };\n\n await this.config.checkpointManager.save(runId, checkpoint);\n }\n}\n", "/**\n * Checkpoint Manager\n *\n * Handles workflow state checkpointing for durability and resume\n */\n\nimport type { Checkpoint, NodeState, WorkflowContext, WorkflowNode } from \"../types.ts\";\nimport { generateId } from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\n\n/**\n * Checkpoint manager configuration\n */\nexport interface CheckpointManagerConfig {\n /** Backend for persisting checkpoints */\n backend: WorkflowBackend;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Resume information returned when resuming from checkpoint\n */\nexport interface ResumeInfo {\n /** Checkpoint to resume from */\n checkpoint: Checkpoint;\n /** Node to start execution from */\n startFromNode: string;\n /** Restored context */\n context: WorkflowContext;\n /** Restored node states */\n nodeStates: Record<string, NodeState>;\n}\n\n/**\n * Checkpoint Manager class\n *\n * Responsible for:\n * - Saving checkpoints after step completion\n * - Loading checkpoints for resume\n * - Determining resume points\n */\nexport class CheckpointManager {\n private config: CheckpointManagerConfig;\n\n constructor(config: CheckpointManagerConfig) {\n this.config = {\n debug: false,\n ...config,\n };\n }\n\n /**\n * Save a checkpoint for a workflow run\n */\n async save(runId: string, checkpoint: Checkpoint): Promise<void> {\n if (this.config.debug) {\n console.log(`[CheckpointManager] Saving checkpoint ${checkpoint.id} for run ${runId}`);\n }\n\n await this.config.backend.saveCheckpoint(runId, checkpoint);\n }\n\n /**\n * Create and save a checkpoint\n */\n async createCheckpoint(\n runId: string,\n nodeId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<Checkpoint> {\n const checkpoint: Checkpoint = {\n id: generateId(\"cp\"),\n nodeId,\n timestamp: new Date(),\n context: structuredClone(context),\n nodeStates: structuredClone(nodeStates),\n };\n\n await this.save(runId, checkpoint);\n\n return checkpoint;\n }\n\n /**\n * Get the latest checkpoint for a workflow run\n */\n async getLatest(runId: string): Promise<Checkpoint | null> {\n return await this.config.backend.getLatestCheckpoint(runId);\n }\n\n /**\n * Get all checkpoints for a workflow run\n */\n async getAll(runId: string): Promise<Checkpoint[]> {\n if (this.config.backend.getCheckpoints) {\n return await this.config.backend.getCheckpoints(runId);\n }\n\n // Fallback: just return latest if getCheckpoints not implemented\n const latest = await this.getLatest(runId);\n return latest ? [latest] : [];\n }\n\n /**\n * Prepare resume information from a checkpoint\n */\n async prepareResume(\n runId: string,\n nodes: WorkflowNode[],\n fromCheckpoint?: string,\n ): Promise<ResumeInfo | null> {\n let checkpoint: Checkpoint | null;\n\n if (fromCheckpoint) {\n // Find specific checkpoint\n const all = await this.getAll(runId);\n checkpoint = all.find((c) => c.id === fromCheckpoint) || null;\n } else {\n // Use latest checkpoint\n checkpoint = await this.getLatest(runId);\n }\n\n if (!checkpoint) {\n return null;\n }\n\n // Find next node to execute after checkpoint\n const startFromNode = this.findNextNode(nodes, checkpoint);\n\n if (!startFromNode) {\n // No more nodes to execute\n return null;\n }\n\n return {\n checkpoint,\n startFromNode,\n context: structuredClone(checkpoint.context),\n nodeStates: structuredClone(checkpoint.nodeStates),\n };\n }\n\n /**\n * Find the next node to execute after a checkpoint\n */\n private findNextNode(\n nodes: WorkflowNode[],\n checkpoint: Checkpoint,\n ): string | null {\n const completedNodeId = checkpoint.nodeId;\n const nodeStates = checkpoint.nodeStates;\n\n // Build node lookup\n const nodeIndex = new Map<string, number>();\n nodes.forEach((node, index) => nodeIndex.set(node.id, index));\n\n // Find the checkpoint node's position\n const checkpointIndex = nodeIndex.get(completedNodeId);\n if (checkpointIndex === undefined) {\n // Checkpoint node not found, start from beginning\n const firstNode = nodes[0];\n return firstNode?.id ?? null;\n }\n\n // Look for the first incomplete node after the checkpoint\n for (let i = checkpointIndex + 1; i < nodes.length; i++) {\n const node = nodes[i];\n if (!node) continue;\n\n const state = nodeStates[node.id];\n\n // Find first node that hasn't completed\n if (!state || state.status === \"pending\") {\n return node.id;\n }\n }\n\n // Also check nodes that depend on the checkpoint node\n for (const node of nodes) {\n if (node.dependsOn?.includes(completedNodeId)) {\n const state = nodeStates[node.id];\n if (!state || state.status === \"pending\") {\n return node.id;\n }\n }\n }\n\n // No incomplete nodes found\n return null;\n }\n\n /**\n * Determine if a node should be checkpointed\n */\n shouldCheckpoint(node: WorkflowNode): boolean {\n const config = node.config;\n\n // Explicit checkpoint configuration\n if (config.checkpoint !== undefined) {\n return config.checkpoint;\n }\n\n // Default checkpointing rules:\n // - Always checkpoint after agent steps\n // - Always checkpoint before wait/approval\n // - Checkpoint after parallel completion\n switch (config.type) {\n case \"step\":\n // Checkpoint agent steps, but not tool steps by default\n return \"agent\" in config && !!config.agent;\n\n case \"wait\":\n // Always checkpoint before waiting\n return true;\n\n case \"parallel\":\n // Checkpoint after all parallel steps complete\n return true;\n\n case \"branch\":\n // Don't checkpoint branches by default\n return false;\n\n case \"subWorkflow\":\n // Always checkpoint after sub-workflow\n return true;\n\n default:\n return false;\n }\n }\n\n /**\n * Clean up old checkpoints (keep only the most recent N)\n */\n async cleanup(runId: string, keepCount: number = 5): Promise<void> {\n const all = await this.getAll(runId);\n\n if (all.length <= keepCount) {\n return;\n }\n\n // Sort by timestamp (newest first)\n all.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());\n\n // Get checkpoints to delete (all except the newest keepCount)\n const toDelete = all.slice(keepCount);\n const idsToDelete = toDelete.map((c) => c.id);\n\n if (idsToDelete.length === 0) {\n return;\n }\n\n if (this.config.debug) {\n console.log(\n `[CheckpointManager] Cleaning up ${idsToDelete.length} old checkpoints for run ${runId}`,\n );\n }\n\n // Use batch delete if available, otherwise delete one by one\n if (this.config.backend.deleteCheckpoints) {\n await this.config.backend.deleteCheckpoints(runId, idsToDelete);\n } else if (this.config.backend.deleteCheckpoint) {\n for (const id of idsToDelete) {\n await this.config.backend.deleteCheckpoint(runId, id);\n }\n }\n // If neither method is available, cleanup is a no-op\n }\n}\n", "/**\n * Step Executor\n *\n * Executes individual workflow steps (agents and tools)\n */\n\nimport type { Agent, AgentResponse } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type { NodeState, StepNodeConfig, WorkflowContext, WorkflowNode } from \"../types.ts\";\nimport { parseDuration } from \"../types.ts\";\nimport type { BlobStorage } from \"../blob/types.ts\";\n\n/** Default timeout for workflow steps (5 minutes) */\nconst DEFAULT_STEP_TIMEOUT_MS = 5 * 60 * 1000;\n\n/**\n * Agent registry for looking up agents by ID\n */\nexport interface AgentRegistry {\n get(id: string): Agent | undefined;\n /** Optional: List all registered agent IDs (for error messages) */\n list?(): string[];\n}\n\n/**\n * Tool registry for looking up tools by ID\n */\nexport interface ToolRegistry {\n get(id: string): Tool | undefined;\n /** Optional: List all registered tool IDs (for error messages) */\n list?(): string[];\n}\n\n/**\n * Step executor configuration\n */\nexport interface StepExecutorConfig {\n /** Agent registry for looking up agents */\n agentRegistry?: AgentRegistry;\n /** Tool registry for looking up tools */\n toolRegistry?: ToolRegistry;\n /** Default timeout for steps (in milliseconds) */\n defaultTimeout?: number;\n /** Blob storage access */\n blobStorage?: BlobStorage;\n /** Callback when step starts */\n onStepStart?: (nodeId: string, input: unknown) => void;\n /** Callback when step completes */\n onStepComplete?: (nodeId: string, output: unknown) => void;\n /** Callback when step fails */\n onStepError?: (nodeId: string, error: Error) => void;\n}\n\n/**\n * Result of executing a step\n */\nexport interface StepResult {\n /** Whether the step succeeded */\n success: boolean;\n /** Output from the step (if successful) */\n output?: unknown;\n /** Error message (if failed) */\n error?: string;\n /** Execution time in milliseconds */\n executionTime: number;\n}\n\n/**\n * Step Executor class\n *\n * Responsible for executing individual workflow steps by invoking\n * the appropriate agent or tool.\n */\nexport class StepExecutor {\n private config: StepExecutorConfig;\n\n constructor(config: StepExecutorConfig = {}) {\n this.config = {\n defaultTimeout: DEFAULT_STEP_TIMEOUT_MS,\n ...config,\n };\n }\n\n /**\n * Execute a step node\n */\n async execute(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<StepResult> {\n const startTime = Date.now();\n const config = node.config as StepNodeConfig;\n\n if (config.type !== \"step\") {\n throw new Error(\n `StepExecutor can only execute 'step' nodes, but node \"${node.id}\" has type '${config.type}'. ` +\n `This is likely a bug in the DAG executor routing.`,\n );\n }\n\n try {\n // Notify start\n const resolvedInput = await this.resolveInput(config.input, context);\n this.config.onStepStart?.(node.id, resolvedInput);\n\n // Execute with timeout\n const timeout = config.timeout ? parseDuration(config.timeout) : this.config.defaultTimeout!;\n\n const output = await this.executeWithTimeout(\n () => this.executeStep(config, resolvedInput, context),\n timeout,\n node.id,\n );\n\n // Notify completion\n this.config.onStepComplete?.(node.id, output);\n\n return {\n success: true,\n output,\n executionTime: Date.now() - startTime,\n };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n\n // Notify error\n this.config.onStepError?.(node.id, error as Error);\n\n return {\n success: false,\n error: errorMessage,\n executionTime: Date.now() - startTime,\n };\n }\n }\n\n /**\n * Resolve step input from context\n */\n private async resolveInput(\n input: StepNodeConfig[\"input\"],\n context: WorkflowContext,\n ): Promise<unknown> {\n if (input === undefined) {\n // Default to the original workflow input\n return context.input;\n }\n\n if (typeof input === \"function\") {\n return await input(context);\n }\n\n return input;\n }\n\n /**\n * Execute step with timeout\n *\n * Uses Promise.race() to properly handle timeout cleanup.\n * The timeout is always cleared in the finally block to prevent memory leaks.\n */\n private async executeWithTimeout<T>(\n fn: () => Promise<T>,\n timeout: number,\n nodeId: string,\n ): Promise<T> {\n let timeoutId: ReturnType<typeof setTimeout> | undefined;\n\n const timeoutPromise = new Promise<never>((_, reject) => {\n timeoutId = setTimeout(() => {\n reject(new Error(`Step \"${nodeId}\" timed out after ${timeout}ms`));\n }, timeout);\n });\n\n try {\n return await Promise.race([fn(), timeoutPromise]);\n } finally {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n }\n }\n\n /**\n * Execute the actual step (agent or tool)\n */\n private async executeStep(\n config: StepNodeConfig,\n input: unknown,\n context: WorkflowContext,\n ): Promise<unknown> {\n if (config.agent) {\n return await this.executeAgent(config.agent, input, context);\n }\n\n if (config.tool) {\n return await this.executeTool(config.tool, input);\n }\n\n throw new Error(\"Step must have either 'agent' or 'tool' specified\");\n }\n\n /**\n * Execute an agent\n */\n private async executeAgent(\n agent: string | Agent,\n input: unknown,\n context: WorkflowContext,\n ): Promise<unknown> {\n // Resolve agent from registry if string\n const resolvedAgent = typeof agent === \"string\" ? this.getAgent(agent) : agent;\n\n // Prepare input for agent\n const agentInput = typeof input === \"string\" ? input : JSON.stringify(input);\n\n // Execute agent\n const response: AgentResponse = await resolvedAgent.generate({\n input: agentInput,\n context,\n });\n\n // Return the agent's response\n return {\n text: response.text,\n toolCalls: response.toolCalls,\n status: response.status,\n usage: response.usage,\n };\n }\n\n /**\n * Execute a tool\n */\n private async executeTool(\n tool: string | Tool,\n input: unknown,\n ): Promise<unknown> {\n // Resolve tool from registry if string\n const resolvedTool = typeof tool === \"string\" ? this.getTool(tool) : tool;\n\n // Execute tool\n const result = await resolvedTool.execute(\n input as Record<string, unknown>,\n {\n agentId: \"workflow\",\n blobStorage: this.config.blobStorage,\n },\n );\n\n return result;\n }\n\n /**\n * Get agent from registry\n */\n private getAgent(id: string): Agent {\n if (!this.config.agentRegistry) {\n throw new Error(\n `Agent registry not configured. Cannot resolve agent \"${id}\"`,\n );\n }\n\n const agent = this.config.agentRegistry.get(id);\n if (!agent) {\n const available = this.config.agentRegistry.list?.() ?? [];\n const suggestion = available.length > 0\n ? ` Available agents: ${available.slice(0, 5).join(\", \")}${\n available.length > 5 ? \"...\" : \"\"\n }`\n : \" No agents are registered.\";\n throw new Error(`Agent not found: \"${id}\".${suggestion}`);\n }\n\n return agent;\n }\n\n /**\n * Get tool from registry\n */\n private getTool(id: string): Tool {\n if (!this.config.toolRegistry) {\n throw new Error(\n `Tool registry not configured. Cannot resolve tool \"${id}\"`,\n );\n }\n\n const tool = this.config.toolRegistry.get(id);\n if (!tool) {\n const available = this.config.toolRegistry.list?.() ?? [];\n const suggestion = available.length > 0\n ? ` Available tools: ${available.slice(0, 5).join(\", \")}${\n available.length > 5 ? \"...\" : \"\"\n }`\n : \" No tools are registered.\";\n throw new Error(`Tool not found: \"${id}\".${suggestion}`);\n }\n\n return tool;\n }\n\n /**\n * Check if a step should be skipped\n */\n async shouldSkip(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<boolean> {\n const config = node.config;\n\n if (!config.skip) {\n return false;\n }\n\n return await config.skip(context);\n }\n\n /**\n * Create initial node state\n */\n createInitialState(nodeId: string): NodeState {\n return {\n nodeId,\n status: \"pending\",\n attempt: 0,\n };\n }\n\n /**\n * Update node state for running\n */\n createRunningState(nodeId: string, input: unknown, attempt: number): NodeState {\n return {\n nodeId,\n status: \"running\",\n input,\n attempt,\n startedAt: new Date(),\n };\n }\n\n /**\n * Update node state for completion\n *\n * @param result - The step execution result\n * @param previousState - The previous node state (contains nodeId)\n */\n createCompletedState(\n result: StepResult,\n previousState: NodeState,\n ): NodeState {\n if (result.success) {\n return {\n ...previousState,\n status: \"completed\",\n output: result.output,\n completedAt: new Date(),\n };\n }\n\n return {\n ...previousState,\n status: \"failed\",\n error: result.error,\n completedAt: new Date(),\n };\n }\n\n /**\n * Update node state for skip\n */\n createSkippedState(nodeId: string): NodeState {\n return {\n nodeId,\n status: \"skipped\",\n attempt: 0,\n completedAt: new Date(),\n };\n }\n}\n", "/**\n * Workflow Executor\n *\n * Main orchestrator for executing durable workflows\n */\n\nimport type {\n BlobResolver,\n NodeState,\n StepBuilderContext,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport { generateId, parseDuration } from \"../types.ts\";\nimport { hasLockSupport, type WorkflowBackend } from \"../backends/types.ts\";\nimport { DAGExecutor } from \"./dag-executor.ts\";\nimport { CheckpointManager } from \"./checkpoint-manager.ts\";\nimport { StepExecutor, type StepExecutorConfig } from \"./step-executor.ts\";\nimport type { BlobStorage } from \"../blob/types.ts\";\n\n/**\n * Workflow executor configuration\n */\nexport interface WorkflowExecutorConfig {\n /** Backend for persistence */\n backend: WorkflowBackend;\n /** Blob storage for large data */\n blobStorage?: BlobStorage;\n /** Step executor configuration */\n stepExecutor?: StepExecutorConfig;\n /** Maximum concurrent parallel executions */\n maxConcurrency?: number;\n /** Enable debug logging */\n debug?: boolean;\n /** Lock duration in milliseconds for distributed execution (default: 30000) */\n lockDuration?: number;\n /** Enable distributed locking (default: true if backend supports it) */\n enableLocking?: boolean;\n /** Callback when workflow starts */\n onStart?: (run: WorkflowRun) => void;\n /** Callback when workflow completes */\n onComplete?: (run: WorkflowRun) => void;\n /** Callback when workflow fails */\n onError?: (run: WorkflowRun, error: Error) => void;\n /** Callback when workflow is waiting */\n onWaiting?: (run: WorkflowRun, nodeId: string) => void;\n}\n\n/**\n * Handle for a running workflow\n */\nexport interface WorkflowHandle<TOutput = unknown> {\n /** Run ID */\n runId: string;\n /** Get current status */\n status(): Promise<WorkflowRun>;\n /** Wait for completion and get result */\n result(): Promise<TOutput>;\n /** Cancel the workflow */\n cancel(): Promise<void>;\n}\n\n/**\n * Workflow Executor class\n *\n * Main entry point for executing workflows. Handles:\n * - Starting new workflow runs\n * - Resuming from checkpoints\n * - Coordinating DAG execution\n * - Managing workflow lifecycle\n */\nexport class WorkflowExecutor {\n private config: WorkflowExecutorConfig;\n private stepExecutor: StepExecutor;\n private checkpointManager: CheckpointManager;\n private dagExecutor: DAGExecutor;\n private workflows = new Map<string, WorkflowDefinition<any, any>>();\n private blobResolver?: BlobResolver;\n\n /** Default lock duration: 30 seconds */\n private static readonly DEFAULT_LOCK_DURATION = 30000;\n\n constructor(config: WorkflowExecutorConfig) {\n this.config = {\n maxConcurrency: 10,\n debug: false,\n lockDuration: WorkflowExecutor.DEFAULT_LOCK_DURATION,\n ...config,\n };\n\n // Initialize components\n this.stepExecutor = new StepExecutor({\n ...this.config.stepExecutor,\n blobStorage: this.config.blobStorage,\n });\n\n this.checkpointManager = new CheckpointManager({\n backend: this.config.backend,\n debug: this.config.debug,\n });\n\n this.dagExecutor = new DAGExecutor({\n stepExecutor: this.stepExecutor,\n checkpointManager: this.checkpointManager,\n maxConcurrency: this.config.maxConcurrency,\n debug: this.config.debug,\n // onWaiting is intentionally a no-op here - waiting state is handled\n // by executeAsync() after DAG execution returns with waiting: true\n onWaiting: () => {},\n });\n\n if (this.config.blobStorage) {\n const bs = this.config.blobStorage;\n this.blobResolver = {\n getText: (ref) => ref.__kind === \"blob\" ? bs.getText(ref.id) : Promise.resolve(null),\n getBytes: (ref) => ref.__kind === \"blob\" ? bs.getBytes(ref.id) : Promise.resolve(null),\n getStream: (ref) => ref.__kind === \"blob\" ? bs.getStream(ref.id) : Promise.resolve(null),\n stat: (ref) => ref.__kind === \"blob\" ? bs.stat(ref.id) : Promise.resolve(null),\n delete: (ref) => ref.__kind === \"blob\" ? bs.delete(ref.id) : Promise.resolve(undefined),\n };\n }\n }\n\n /**\n * Register a workflow definition\n */\n register<TInput, TOutput>(workflow: WorkflowDefinition<TInput, TOutput>): void {\n this.workflows.set(workflow.id, workflow);\n }\n\n /**\n * Get a registered workflow\n */\n getWorkflow(id: string): WorkflowDefinition<any, any> | undefined {\n return this.workflows.get(id);\n }\n\n /**\n * Start a new workflow run\n */\n async start<TInput, TOutput>(\n workflowId: string,\n input: TInput,\n options?: { runId?: string },\n ): Promise<WorkflowHandle<TOutput>> {\n const workflow = this.workflows.get(workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${workflowId}`);\n }\n\n // Validate input if schema provided\n if (workflow.inputSchema) {\n workflow.inputSchema.parse(input);\n }\n\n // Create run\n const run: WorkflowRun<TInput, TOutput> = {\n id: options?.runId || generateId(\"run\"),\n workflowId,\n version: workflow.version,\n status: \"pending\",\n input,\n nodeStates: {},\n currentNodes: [],\n context: { input },\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n };\n\n // Persist run\n await this.config.backend.createRun(run);\n\n // Start execution asynchronously\n this.executeAsync(run.id).catch((error) => {\n console.error(`Workflow ${run.id} failed:`, error);\n });\n\n return this.createHandle<TOutput>(run.id);\n }\n\n /**\n * Resume a paused/waiting workflow\n */\n async resume(runId: string, fromCheckpoint?: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status !== \"waiting\" && run.status !== \"pending\") {\n throw new Error(\n `Cannot resume workflow run \"${runId}\": current status is \"${run.status}\". ` +\n `Only runs in \"waiting\" or \"pending\" status can be resumed.`,\n );\n }\n\n // Get workflow definition\n const workflow = this.workflows.get(run.workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${run.workflowId}`);\n }\n\n // Get nodes\n const nodes = this.resolveNodes(workflow, run.context);\n\n // Get resume point\n const resumeInfo = await this.checkpointManager.prepareResume(\n runId,\n nodes,\n fromCheckpoint,\n );\n\n // If an explicit checkpoint was requested but not found, throw error\n if (fromCheckpoint && !resumeInfo) {\n throw new Error(\n `Checkpoint \"${fromCheckpoint}\" not found for run \"${runId}\". ` +\n `Cannot resume from non-existent checkpoint.`,\n );\n }\n\n if (resumeInfo) {\n // Update run state from checkpoint\n await this.config.backend.updateRun(runId, {\n status: \"running\",\n context: resumeInfo.context,\n nodeStates: resumeInfo.nodeStates,\n });\n }\n\n // Resume execution\n await this.executeAsync(runId, resumeInfo?.startFromNode);\n }\n\n /**\n * Execute a workflow run asynchronously\n *\n * Uses distributed locking (when backend supports it) to prevent\n * concurrent execution of the same workflow run.\n */\n async executeAsync(runId: string, startFromNode?: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n // Get workflow definition\n const workflow = this.workflows.get(run.workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${run.workflowId}`);\n }\n\n // Try to acquire lock if backend supports it and locking is enabled\n const useLocking = this.config.enableLocking !== false &&\n hasLockSupport(this.config.backend);\n const lockDuration = this.config.lockDuration!;\n\n if (useLocking) {\n const acquired = await this.config.backend.acquireLock!(runId, lockDuration);\n if (!acquired) {\n throw new Error(\n `Cannot execute workflow run \"${runId}\": another worker is already executing it. ` +\n `This can happen when multiple workers try to execute the same run concurrently.`,\n );\n }\n\n if (this.config.debug) {\n console.log(`[WorkflowExecutor] Acquired lock for run: ${runId}`);\n }\n }\n\n try {\n // Update status to running\n await this.config.backend.updateRun(runId, {\n status: \"running\",\n startedAt: run.startedAt || new Date(),\n });\n\n // Notify start\n const updatedRun = await this.config.backend.getRun(runId);\n this.config.onStart?.(updatedRun!);\n\n // Resolve workflow nodes\n const nodes = this.resolveNodes(workflow, run.context);\n\n // Execute with timeout if configured\n const result = await this.executeWithTimeout(\n () => this.dagExecutor.execute(nodes, run as WorkflowRun, startFromNode),\n workflow.timeout,\n );\n\n // Update run based on result\n if (result.completed) {\n // Workflow completed successfully\n const finalRun = await this.completeRun(\n runId,\n result.context,\n result.nodeStates,\n );\n\n // Validate output if schema provided\n if (workflow.outputSchema) {\n workflow.outputSchema.parse(finalRun.output);\n }\n\n // Call completion handler\n await workflow.onComplete?.(finalRun.output, finalRun.context);\n this.config.onComplete?.(finalRun);\n } else if (result.waiting) {\n // Workflow is waiting for approval/event\n await this.pauseRun(\n runId,\n result.waitingNode!,\n result.context,\n result.nodeStates,\n );\n\n const pausedRun = await this.config.backend.getRun(runId);\n this.config.onWaiting?.(pausedRun!, result.waitingNode!);\n } else {\n // Workflow failed\n const error = new Error(result.error || \"Unknown error\");\n await this.failRun(runId, error, result.context, result.nodeStates);\n\n await workflow.onError?.(error, result.context);\n this.config.onError?.(run, error);\n }\n } catch (error) {\n // Unexpected error during execution\n const err = error instanceof Error ? error : new Error(String(error));\n await this.failRun(runId, err, run.context, run.nodeStates);\n\n await workflow.onError?.(err, run.context);\n this.config.onError?.(run, err);\n\n throw error;\n } finally {\n // Always release lock when done\n if (useLocking) {\n await this.config.backend.releaseLock!(runId);\n\n if (this.config.debug) {\n console.log(`[WorkflowExecutor] Released lock for run: ${runId}`);\n }\n }\n }\n }\n\n /**\n * Resolve workflow nodes from definition\n */\n private resolveNodes(\n workflow: WorkflowDefinition,\n context: WorkflowContext,\n ): WorkflowNode[] {\n let nodes: WorkflowNode[];\n\n if (Array.isArray(workflow.steps)) {\n nodes = workflow.steps;\n } else {\n // Dynamic steps - call the function\n if (!this.config.blobStorage) {\n // Warn if blobStorage is missing but dynamic steps might need it?\n // For now, we allow it to be undefined if user doesn't use it.\n }\n\n const builderContext: StepBuilderContext = {\n input: context.input,\n context,\n blobStorage: this.config.blobStorage,\n blob: this.blobResolver,\n };\n nodes = workflow.steps(builderContext);\n }\n\n // Validate resolved nodes\n this.validateNodes(nodes, workflow.id);\n\n return nodes;\n }\n\n /**\n * Validate workflow nodes\n */\n private validateNodes(nodes: WorkflowNode[], workflowId: string): void {\n if (!Array.isArray(nodes)) {\n throw new Error(`Workflow \"${workflowId}\" steps must resolve to an array`);\n }\n\n if (nodes.length === 0) {\n throw new Error(`Workflow \"${workflowId}\" must have at least one step`);\n }\n\n const seenIds = new Set<string>();\n\n for (let i = 0; i < nodes.length; i++) {\n const node = nodes[i];\n\n if (!node) {\n throw new Error(`Workflow \"${workflowId}\" has undefined node at index ${i}`);\n }\n\n if (!node.id || typeof node.id !== \"string\") {\n throw new Error(`Workflow \"${workflowId}\" node at index ${i} has invalid ID`);\n }\n\n if (seenIds.has(node.id)) {\n throw new Error(`Workflow \"${workflowId}\" has duplicate node ID: \"${node.id}\"`);\n }\n seenIds.add(node.id);\n\n if (!node.config || typeof node.config !== \"object\") {\n throw new Error(`Workflow \"${workflowId}\" node \"${node.id}\" has invalid config`);\n }\n\n if (!node.config.type) {\n throw new Error(`Workflow \"${workflowId}\" node \"${node.id}\" config missing type`);\n }\n }\n }\n\n /**\n * Execute with optional timeout\n *\n * Uses Promise.race() to properly handle timeout cleanup.\n * The timeout is always cleared in the finally block to prevent memory leaks.\n */\n private async executeWithTimeout<T>(\n fn: () => Promise<T>,\n timeout?: string | number,\n ): Promise<T> {\n if (!timeout) {\n return fn();\n }\n\n const timeoutMs = parseDuration(timeout);\n let timeoutId: ReturnType<typeof setTimeout> | undefined;\n\n const timeoutPromise = new Promise<never>((_, reject) => {\n timeoutId = setTimeout(() => {\n reject(new Error(`Workflow timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n });\n\n try {\n return await Promise.race([fn(), timeoutPromise]);\n } finally {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n }\n }\n\n /**\n * Mark run as completed\n */\n private async completeRun(\n runId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<WorkflowRun> {\n // Determine output (last node's output or accumulated context)\n const output = this.determineOutput(context);\n\n await this.config.backend.updateRun(runId, {\n status: \"completed\" as WorkflowStatus,\n output,\n context,\n nodeStates,\n completedAt: new Date(),\n });\n\n return (await this.config.backend.getRun(runId))!;\n }\n\n /**\n * Mark run as failed\n */\n private async failRun(\n runId: string,\n error: Error,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n await this.config.backend.updateRun(runId, {\n status: \"failed\" as WorkflowStatus,\n context,\n nodeStates,\n error: {\n message: error.message,\n stack: error.stack,\n },\n completedAt: new Date(),\n });\n }\n\n /**\n * Mark run as waiting\n */\n private async pauseRun(\n runId: string,\n waitingNode: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n await this.config.backend.updateRun(runId, {\n status: \"waiting\" as WorkflowStatus,\n currentNodes: [waitingNode],\n context,\n nodeStates,\n });\n }\n\n /**\n * Determine workflow output from context\n */\n private determineOutput(context: WorkflowContext): unknown {\n // Remove 'input' and return the rest as output\n const { input: _input, ...rest } = context;\n return rest;\n }\n\n /**\n * Create a handle for a workflow run\n */\n private createHandle<TOutput>(runId: string): WorkflowHandle<TOutput> {\n return {\n runId,\n status: () => this.config.backend.getRun(runId) as Promise<WorkflowRun>,\n result: () => this.waitForResult<TOutput>(runId),\n cancel: () => this.cancel(runId),\n };\n }\n\n /**\n * Wait for workflow result\n */\n private async waitForResult<TOutput>(\n runId: string,\n pollInterval: number = 1000,\n ): Promise<TOutput> {\n while (true) {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status === \"completed\") {\n return run.output as TOutput;\n }\n\n if (run.status === \"failed\") {\n throw new Error(run.error?.message || \"Workflow failed\");\n }\n\n if (run.status === \"cancelled\") {\n throw new Error(\"Workflow was cancelled\");\n }\n\n // Wait before polling again\n await new Promise((resolve) => setTimeout(resolve, pollInterval));\n }\n }\n\n /**\n * Cancel a workflow run\n */\n async cancel(runId: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status === \"completed\" || run.status === \"failed\") {\n throw new Error(\n `Cannot cancel workflow run \"${runId}\": run has already ${run.status}. ` +\n `Only active runs (pending, running, waiting) can be cancelled.`,\n );\n }\n\n await this.config.backend.updateRun(runId, {\n status: \"cancelled\" as WorkflowStatus,\n completedAt: new Date(),\n });\n }\n\n /**\n * Get workflow run status\n */\n getStatus(runId: string): Promise<WorkflowRun | null> {\n return this.config.backend.getRun(runId);\n }\n\n /**\n * List workflow runs\n */\n listRuns(options?: {\n workflowId?: string;\n status?: WorkflowStatus | WorkflowStatus[];\n limit?: number;\n }): Promise<WorkflowRun[]> {\n return this.config.backend.listRuns({\n workflowId: options?.workflowId,\n status: options?.status,\n limit: options?.limit,\n });\n }\n}\n", "/**\n * Approval Manager\n *\n * Handles human-in-the-loop approval workflows\n */\n\nimport type {\n ApprovalDecision,\n PendingApproval,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowRun,\n} from \"../types.ts\";\nimport { generateId, parseDuration } from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\nimport type { WorkflowExecutor } from \"../executor/workflow-executor.ts\";\n\n/**\n * Approval notification callback\n */\nexport type ApprovalNotifier = (\n approval: PendingApproval,\n run: WorkflowRun,\n) => Promise<void>;\n\n/**\n * Approval manager configuration\n */\nexport interface ApprovalManagerConfig {\n /** Backend for persistence */\n backend: WorkflowBackend;\n /** Workflow executor for resuming after approval */\n executor?: WorkflowExecutor;\n /** Notification callback */\n notifier?: ApprovalNotifier;\n /** Check expired approvals interval (ms) */\n expirationCheckInterval?: number;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Approval request result\n */\nexport interface ApprovalRequest {\n /** Approval ID */\n approvalId: string;\n /** Run ID */\n runId: string;\n /** Node ID */\n nodeId: string;\n /** Message for approver */\n message: string;\n /** Payload with context */\n payload: unknown;\n /** When approval expires */\n expiresAt?: Date;\n}\n\n/**\n * Approval Manager class\n *\n * Responsible for:\n * - Creating pending approvals\n * - Processing approval decisions\n * - Resuming workflows after approval\n * - Handling approval timeouts\n */\nexport class ApprovalManager {\n private config: ApprovalManagerConfig;\n private expirationTimer?: ReturnType<typeof setInterval>;\n private destroyed = false;\n\n constructor(config: ApprovalManagerConfig) {\n this.config = {\n expirationCheckInterval: 60000, // Check every minute\n debug: false,\n ...config,\n };\n\n // Start expiration checker if interval is set\n if (this.config.expirationCheckInterval && this.config.expirationCheckInterval > 0) {\n this.startExpirationChecker();\n }\n }\n\n /**\n * Create a pending approval request\n */\n async createApproval(\n run: WorkflowRun,\n nodeId: string,\n waitConfig: WaitNodeConfig,\n context: WorkflowContext,\n ): Promise<ApprovalRequest> {\n // Resolve payload if it's a function\n const payload = typeof waitConfig.payload === \"function\"\n ? await waitConfig.payload(context)\n : waitConfig.payload;\n\n // Calculate expiration\n const expiresAt = waitConfig.timeout\n ? new Date(Date.now() + parseDuration(waitConfig.timeout))\n : undefined;\n\n const approval: PendingApproval = {\n id: generateId(\"apr\"),\n nodeId,\n message: waitConfig.message || \"Approval required\",\n payload,\n approvers: waitConfig.approvers,\n requestedAt: new Date(),\n expiresAt,\n status: \"pending\",\n };\n\n if (this.config.debug) {\n console.log(`[ApprovalManager] Creating approval ${approval.id} for run ${run.id}`);\n }\n\n // Save to backend\n await this.config.backend.savePendingApproval(run.id, approval);\n\n // Notify approvers\n if (this.config.notifier) {\n try {\n await this.config.notifier(approval, run);\n } catch (error) {\n console.error(`[ApprovalManager] Failed to notify approvers:`, error);\n }\n }\n\n return {\n approvalId: approval.id,\n runId: run.id,\n nodeId,\n message: approval.message,\n payload: approval.payload,\n expiresAt: approval.expiresAt,\n };\n }\n\n /**\n * Get pending approval by ID\n */\n async getApproval(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null> {\n if (this.config.backend.getPendingApproval) {\n return this.config.backend.getPendingApproval(runId, approvalId);\n }\n\n // Fallback: get all and find\n const all = await this.config.backend.getPendingApprovals(runId);\n return all.find((a) => a.id === approvalId) || null;\n }\n\n /**\n * Get all pending approvals for a run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n return this.config.backend.getPendingApprovals(runId);\n }\n\n /**\n * Process an approval decision\n */\n async processDecision(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n if (this.config.debug) {\n console.log(\n `[ApprovalManager] Processing decision for ${approvalId}: ${\n decision.approved ? \"approved\" : \"rejected\"\n }`,\n );\n }\n\n // Get the approval\n const approval = await this.getApproval(runId, approvalId);\n if (!approval) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n // Check if already decided\n if (approval.status !== \"pending\") {\n throw new Error(`Approval already processed: ${approval.status}`);\n }\n\n // Check if expired\n if (approval.expiresAt && new Date() > approval.expiresAt) {\n throw new Error(\"Approval has expired\");\n }\n\n // Check if approver is authorized\n if (\n approval.approvers &&\n approval.approvers.length > 0 &&\n !approval.approvers.includes(decision.approver)\n ) {\n throw new Error(\"Not authorized to approve this request\");\n }\n\n // Update the approval\n await this.config.backend.updateApproval(runId, approvalId, decision);\n\n // Get the run\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n // Update run context with approval result\n const updatedContext = {\n ...run.context,\n [approval.nodeId]: {\n approved: decision.approved,\n approver: decision.approver,\n comment: decision.comment,\n decidedAt: new Date().toISOString(),\n },\n };\n\n // Update node state\n const updatedNodeStates = {\n ...run.nodeStates,\n [approval.nodeId]: {\n nodeId: approval.nodeId,\n status: \"completed\" as const,\n output: {\n approved: decision.approved,\n approver: decision.approver,\n comment: decision.comment,\n },\n attempt: 1,\n completedAt: new Date(),\n },\n };\n\n await this.config.backend.updateRun(runId, {\n context: updatedContext,\n nodeStates: updatedNodeStates,\n });\n\n // Resume workflow if approved and executor is available\n if (decision.approved && this.config.executor) {\n try {\n await this.config.executor.resume(runId);\n } catch (error) {\n console.error(`[ApprovalManager] Failed to resume workflow:`, error);\n throw error;\n }\n } else if (!decision.approved) {\n // If rejected, fail the workflow\n await this.config.backend.updateRun(runId, {\n status: \"failed\",\n error: {\n message: `Approval \"${approvalId}\" was rejected${\n decision.comment ? `: ${decision.comment}` : \"\"\n }`,\n },\n completedAt: new Date(),\n });\n }\n }\n\n /**\n * Approve an approval request\n */\n async approve(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n await this.processDecision(runId, approvalId, {\n approved: true,\n approver,\n comment,\n });\n }\n\n /**\n * Reject an approval request\n */\n async reject(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n await this.processDecision(runId, approvalId, {\n approved: false,\n approver,\n comment,\n });\n }\n\n /**\n * List all pending approvals across workflows\n */\n listAllPending(filter?: {\n workflowId?: string;\n approver?: string;\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n if (this.config.backend.listPendingApprovals) {\n return this.config.backend.listPendingApprovals({\n ...filter,\n status: \"pending\",\n });\n }\n\n // Fallback: not supported by backend\n console.warn(\n \"[ApprovalManager] listPendingApprovals not supported by backend\",\n );\n return Promise.resolve([]);\n }\n\n /**\n * Check and expire stale approvals\n */\n async checkExpiredApprovals(): Promise<void> {\n // Guard against post-stop execution\n if (this.destroyed) {\n return;\n }\n\n if (!this.config.backend.listPendingApprovals) {\n return;\n }\n\n const pending = await this.config.backend.listPendingApprovals({\n status: \"pending\",\n });\n\n const now = new Date();\n\n for (const { runId, approval } of pending) {\n if (approval.expiresAt && now > approval.expiresAt) {\n if (this.config.debug) {\n console.log(`[ApprovalManager] Expiring approval ${approval.id}`);\n }\n\n // Mark as expired\n await this.config.backend.updateApproval(runId, approval.id, {\n approved: false,\n approver: \"system\",\n comment: \"Approval expired\",\n });\n\n // Fail the workflow\n await this.config.backend.updateRun(runId, {\n status: \"failed\",\n error: {\n message: `Approval \"${approval.id}\" expired`,\n },\n completedAt: new Date(),\n });\n }\n }\n }\n\n /**\n * Start the expiration checker timer\n */\n private startExpirationChecker(): void {\n this.expirationTimer = setInterval(() => {\n this.checkExpiredApprovals().catch((error) => {\n console.error(`[ApprovalManager] Expiration check failed:`, error);\n });\n }, this.config.expirationCheckInterval);\n }\n\n /**\n * Stop the approval manager\n */\n stop(): void {\n this.destroyed = true;\n if (this.expirationTimer) {\n clearInterval(this.expirationTimer);\n this.expirationTimer = undefined;\n }\n }\n}\n", "/**\n * Agent Registry\n *\n * Registry for managing and looking up agents in workflow execution\n */\n\nimport type { Agent } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type { AgentRegistry, ToolRegistry } from \"../executor/step-executor.ts\";\n\n/**\n * Default agent registry implementation\n *\n * Provides in-memory storage for agents that can be used in workflow steps.\n *\n * @example\n * ```typescript\n * import { DefaultAgentRegistry } from 'veryfront/ai/workflow/runtime/agent-registry';\n *\n * const registry = new DefaultAgentRegistry();\n *\n * // Register agents\n * registry.registerAgent(researchAgent);\n * registry.registerAgent(writerAgent);\n *\n * // Use with workflow client\n * const client = createWorkflowClient({\n * executor: {\n * agentRegistry: registry,\n * },\n * });\n * ```\n */\nexport class DefaultAgentRegistry implements AgentRegistry {\n private agents = new Map<string, Agent>();\n\n /**\n * Register an agent\n */\n registerAgent(agent: Agent): void {\n this.agents.set(agent.id, agent);\n }\n\n /**\n * Register multiple agents\n */\n registerAgents(agents: Agent[]): void {\n for (const agent of agents) {\n this.registerAgent(agent);\n }\n }\n\n /**\n * Get an agent by ID (implements AgentRegistry.get)\n */\n get(id: string): Agent | undefined {\n return this.agents.get(id);\n }\n\n /**\n * Check if an agent exists\n */\n hasAgent(id: string): boolean {\n return this.agents.has(id);\n }\n\n /**\n * List all registered agent IDs\n */\n listAgentIds(): string[] {\n return Array.from(this.agents.keys());\n }\n\n /**\n * Remove an agent\n */\n removeAgent(id: string): boolean {\n return this.agents.delete(id);\n }\n\n /**\n * Clear all registrations\n */\n clear(): void {\n this.agents.clear();\n }\n}\n\n/**\n * Default tool registry implementation\n *\n * Provides in-memory storage for tools that can be used in workflow steps.\n */\nexport class DefaultToolRegistry implements ToolRegistry {\n private tools = new Map<string, Tool>();\n\n /**\n * Register a tool\n */\n registerTool(tool: Tool): void {\n this.tools.set(tool.id, tool);\n }\n\n /**\n * Register multiple tools\n */\n registerTools(tools: Tool[]): void {\n for (const tool of tools) {\n this.registerTool(tool);\n }\n }\n\n /**\n * Get a tool by name (implements ToolRegistry.get)\n */\n get(name: string): Tool | undefined {\n return this.tools.get(name);\n }\n\n /**\n * Check if a tool exists\n */\n hasTool(name: string): boolean {\n return this.tools.has(name);\n }\n\n /**\n * List all registered tool names\n */\n listToolNames(): string[] {\n return Array.from(this.tools.keys());\n }\n\n /**\n * Remove a tool\n */\n removeTool(name: string): boolean {\n return this.tools.delete(name);\n }\n\n /**\n * Clear all tools\n */\n clear(): void {\n this.tools.clear();\n }\n}\n\n/**\n * Create a mock agent for testing\n *\n * Creates an agent that returns a predictable response without\n * making actual API calls.\n *\n * @example\n * ```typescript\n * const mockAgent = createMockAgent('test-agent', {\n * response: 'This is the mock response',\n * });\n *\n * registry.registerAgent(mockAgent);\n * ```\n */\nexport function createMockAgent(\n id: string,\n options: {\n response?: string;\n responseFunc?: (input: string) => string | Promise<string>;\n toolCalls?: Array<{\n id: string;\n name: string;\n args: Record<string, unknown>;\n }>;\n } = {},\n): Agent {\n return {\n id,\n config: {\n model: \"mock/test-model\",\n system: \"Mock agent for testing\",\n },\n async generate(input: { input: string | unknown[]; context?: Record<string, unknown> }) {\n const inputStr = typeof input.input === \"string\" ? input.input : JSON.stringify(input.input);\n\n let text: string;\n if (options.responseFunc) {\n text = await options.responseFunc(inputStr);\n } else {\n text = options.response ?? `Mock response for: ${inputStr.slice(0, 50)}...`;\n }\n\n return {\n text,\n messages: [\n { role: \"user\" as const, content: inputStr },\n { role: \"assistant\" as const, content: text },\n ],\n toolCalls: options.toolCalls?.map((tc) => ({\n ...tc,\n status: \"completed\" as const,\n })) ?? [],\n status: \"completed\" as const,\n usage: {\n promptTokens: 100,\n completionTokens: 50,\n totalTokens: 150,\n },\n };\n },\n stream() {\n throw new Error(\"Mock agent does not support streaming\");\n },\n respond() {\n throw new Error(\"Mock agent does not support HTTP responses\");\n },\n getMemory() {\n throw new Error(\"Mock agent does not have memory\");\n },\n getMemoryStats() {\n return Promise.resolve({\n totalMessages: 0,\n estimatedTokens: 0,\n type: \"mock\",\n });\n },\n async clearMemory() {\n // No-op\n },\n };\n}\n\n/**\n * Create a mock tool for testing\n *\n * @example\n * ```typescript\n * const mockTool = createMockTool('fetchData', {\n * result: { data: 'test' },\n * });\n *\n * registry.registerTool(mockTool);\n * ```\n */\nexport function createMockTool(\n id: string,\n options: {\n description?: string;\n result?: unknown;\n executeFunc?: (\n args: Record<string, unknown>,\n ) => unknown | Promise<unknown>;\n } = {},\n): Tool {\n // Import z dynamically to avoid bundling issues\n const mockSchema = { parse: (x: unknown) => x } as unknown as import(\"zod\").z.ZodSchema;\n\n return {\n id,\n description: options.description ?? `Mock tool: ${id}`,\n inputSchema: mockSchema,\n async execute(args: Record<string, unknown>) {\n if (options.executeFunc) {\n return await options.executeFunc(args);\n }\n return options.result ?? { success: true, tool: id, args };\n },\n };\n}\n", "/**\n * Workflow Client\n *\n * High-level API for interacting with workflows\n */\n\nimport type {\n PendingApproval,\n RunFilter,\n WorkflowDefinition,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\nimport { MemoryBackend } from \"../backends/memory.ts\";\nimport {\n WorkflowExecutor,\n type WorkflowExecutorConfig,\n type WorkflowHandle,\n} from \"../executor/workflow-executor.ts\";\nimport { ApprovalManager, type ApprovalManagerConfig } from \"../runtime/approval-manager.ts\";\nimport type { Workflow } from \"../dsl/workflow.ts\";\n\n/**\n * Workflow client configuration\n */\nexport interface WorkflowClientConfig {\n /** Backend for persistence (default: MemoryBackend) */\n backend?: WorkflowBackend;\n /** Executor configuration */\n executor?: Partial<WorkflowExecutorConfig>;\n /** Approval manager configuration */\n approval?: Partial<ApprovalManagerConfig>;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Workflow Client class\n *\n * The main entry point for working with workflows.\n * Provides a simple API for:\n * - Registering workflow definitions\n * - Starting and managing workflow runs\n * - Handling approvals\n */\nexport class WorkflowClient {\n private backend: WorkflowBackend;\n private executor: WorkflowExecutor;\n private approvalManager: ApprovalManager;\n private debug: boolean;\n\n constructor(config: WorkflowClientConfig = {}) {\n this.debug = config.debug ?? false;\n this.backend = config.backend ?? new MemoryBackend({ debug: this.debug });\n\n // Initialize executor\n this.executor = new WorkflowExecutor({\n backend: this.backend,\n debug: this.debug,\n ...config.executor,\n });\n\n // Initialize approval manager\n this.approvalManager = new ApprovalManager({\n backend: this.backend,\n executor: this.executor,\n debug: this.debug,\n ...config.approval,\n });\n }\n\n // =========================================================================\n // Workflow Registration\n // =========================================================================\n\n /**\n * Register a workflow definition\n */\n register(\n workflow: Workflow | WorkflowDefinition,\n ): void {\n const definition = \"definition\" in workflow ? workflow.definition : workflow;\n\n this.executor.register(definition as WorkflowDefinition);\n\n if (this.debug) {\n console.log(`[WorkflowClient] Registered workflow: ${definition.id}`);\n }\n }\n\n /**\n * Register multiple workflows\n */\n registerAll(\n workflows: Array<Workflow | WorkflowDefinition>,\n ): void {\n for (const workflow of workflows) {\n this.register(workflow);\n }\n }\n\n // =========================================================================\n // Workflow Execution\n // =========================================================================\n\n /**\n * Start a new workflow run\n *\n * @example\n * ```typescript\n * const handle = await client.start('content-pipeline', {\n * topic: 'AI Safety',\n * requiresApproval: true,\n * });\n *\n * const result = await handle.result();\n * ```\n */\n start<TInput, TOutput = unknown>(\n workflowId: string,\n input: TInput,\n options?: { runId?: string },\n ): Promise<WorkflowHandle<TOutput>> {\n return this.executor.start<TInput, TOutput>(workflowId, input, options);\n }\n\n /**\n * Resume a paused/waiting workflow\n */\n resume(runId: string): Promise<void> {\n return this.executor.resume(runId);\n }\n\n /**\n * Cancel a workflow run\n */\n cancel(runId: string): Promise<void> {\n return this.executor.cancel(runId);\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n /**\n * Get a workflow run by ID\n */\n getRun(runId: string): Promise<WorkflowRun | null> {\n return this.backend.getRun(runId);\n }\n\n /**\n * List workflow runs\n */\n listRuns(filter?: RunFilter): Promise<WorkflowRun[]> {\n return this.backend.listRuns(filter ?? {});\n }\n\n /**\n * Get runs by status\n */\n getRunsByStatus(\n status: WorkflowStatus | WorkflowStatus[],\n limit?: number,\n ): Promise<WorkflowRun[]> {\n return this.backend.listRuns({ status, limit });\n }\n\n /**\n * Get runs for a specific workflow\n */\n getRunsForWorkflow(\n workflowId: string,\n limit?: number,\n ): Promise<WorkflowRun[]> {\n return this.backend.listRuns({ workflowId, limit });\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n /**\n * Get pending approvals for a run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n return this.approvalManager.getPendingApprovals(runId);\n }\n\n /**\n * Approve an approval request\n *\n * @example\n * ```typescript\n * await client.approve(runId, approvalId, 'user@example.com', 'Looks good!');\n * ```\n */\n approve(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n return this.approvalManager.approve(runId, approvalId, approver, comment);\n }\n\n /**\n * Reject an approval request\n */\n reject(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n return this.approvalManager.reject(runId, approvalId, approver, comment);\n }\n\n /**\n * List all pending approvals across workflows\n */\n listAllPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n return this.approvalManager.listAllPending(filter);\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n /**\n * Get the underlying backend\n */\n getBackend(): WorkflowBackend {\n return this.backend;\n }\n\n /**\n * Get the underlying executor\n */\n getExecutor(): WorkflowExecutor {\n return this.executor;\n }\n\n /**\n * Get the underlying approval manager\n */\n getApprovalManager(): ApprovalManager {\n return this.approvalManager;\n }\n\n /**\n * Cleanup and shutdown\n */\n async destroy(): Promise<void> {\n this.approvalManager.stop();\n await this.backend.destroy();\n\n if (this.debug) {\n console.log(\"[WorkflowClient] Destroyed\");\n }\n }\n}\n\n/**\n * Create a workflow client with default configuration\n */\nexport function createWorkflowClient(\n config?: WorkflowClientConfig,\n): WorkflowClient {\n return new WorkflowClient(config);\n}\n", "/**\n * Temporal Adapter\n *\n * Adapter for using Temporal as the workflow execution backend.\n * Temporal is ideal for enterprise-grade, long-running workflows.\n *\n * @see https://docs.temporal.io/\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Temporal adapter configuration\n */\nexport interface TemporalAdapterConfig extends BackendConfig {\n /** Temporal server address */\n address?: string;\n /** Temporal namespace */\n namespace?: string;\n /** Task queue name */\n taskQueue?: string;\n /** TLS configuration */\n tls?: {\n clientCertPath?: string;\n clientKeyPath?: string;\n serverRootCACertPath?: string;\n };\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Temporal Adapter\n *\n * Translates Veryfront workflow operations to Temporal workflows.\n *\n * @example\n * ```typescript\n * import { TemporalAdapter } from 'veryfront/ai/workflow/backends/temporal';\n *\n * const backend = new TemporalAdapter({\n * address: 'localhost:7233',\n * namespace: 'default',\n * taskQueue: 'veryfront-workflows',\n * });\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * the Temporal SDK and worker setup.\n */\nexport class TemporalAdapter implements WorkflowBackend {\n private config: TemporalAdapterConfig;\n\n constructor(config: TemporalAdapterConfig = {}) {\n this.config = {\n address: \"localhost:7233\",\n namespace: \"default\",\n taskQueue: \"veryfront-workflows\",\n debug: false,\n ...config,\n };\n\n console.warn(\n \"[TemporalAdapter] This is a stub implementation. \" +\n \"Full Temporal integration requires the Temporal SDK and worker setup. \" +\n \"See: https://docs.temporal.io/\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n // This would start a Temporal workflow execution\n throw new Error(\"TemporalAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n // This would query the Temporal workflow state\n throw new Error(\"TemporalAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n // This would signal the Temporal workflow\n throw new Error(\"TemporalAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n // This would use Temporal's visibility API\n throw new Error(\"TemporalAdapter.listRuns not implemented\");\n }\n\n // Checkpointing (Temporal handles this internally via event sourcing)\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n // Temporal provides automatic checkpointing via event sourcing\n // This is essentially a no-op\n return Promise.resolve();\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n // Temporal maintains full workflow history\n throw new Error(\"TemporalAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n // This would update a Temporal workflow signal handler\n throw new Error(\"TemporalAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"TemporalAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n // This would send a signal to the Temporal workflow\n throw new Error(\"TemporalAdapter.updateApproval not implemented\");\n }\n\n // Queue (Temporal handles this internally)\n enqueue(_job: WorkflowJob): Promise<void> {\n // Temporal uses workflow execution, not explicit queues\n throw new Error(\"TemporalAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Temporal workers poll for tasks automatically\n throw new Error(\"TemporalAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Temporal handles acknowledgment automatically\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // Cleanup Temporal client connection\n return Promise.resolve();\n }\n}\n", "/**\n * Inngest Adapter\n *\n * Adapter for using Inngest as the workflow execution backend.\n * Inngest is ideal for serverless deployments (Vercel, Cloudflare, etc.)\n *\n * @see https://www.inngest.com/docs\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\n\n/**\n * Inngest adapter configuration\n */\nexport interface InngestAdapterConfig extends BackendConfig {\n /** Inngest event key */\n eventKey?: string;\n /** Inngest signing key (for production) */\n signingKey?: string;\n /** Inngest API base URL (for self-hosted) */\n baseUrl?: string;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Inngest Adapter\n *\n * Translates Veryfront workflow operations to Inngest functions.\n *\n * @example\n * ```typescript\n * import { InngestAdapter } from 'veryfront/ai/workflow/backends/inngest';\n *\n * const backend = new InngestAdapter({\n * eventKey: process.env.INNGEST_EVENT_KEY,\n * signingKey: process.env.INNGEST_SIGNING_KEY,\n * });\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * the Inngest SDK and server-side setup.\n */\nexport class InngestAdapter implements WorkflowBackend {\n private config: InngestAdapterConfig;\n\n constructor(config: InngestAdapterConfig = {}) {\n this.config = {\n debug: false,\n ...config,\n };\n\n logger.warn(\n \"[InngestAdapter] This is a stub implementation. \" +\n \"Full Inngest integration requires additional setup. \" +\n \"See: https://www.inngest.com/docs\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n throw new Error(\"InngestAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n throw new Error(\"InngestAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n throw new Error(\"InngestAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n throw new Error(\"InngestAdapter.listRuns not implemented\");\n }\n\n // Checkpointing\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n throw new Error(\"InngestAdapter.saveCheckpoint not implemented\");\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n throw new Error(\"InngestAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n throw new Error(\"InngestAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"InngestAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n throw new Error(\"InngestAdapter.updateApproval not implemented\");\n }\n\n // Queue (Inngest handles this internally)\n enqueue(_job: WorkflowJob): Promise<void> {\n // Inngest uses events instead of queues\n // This would send an Inngest event\n throw new Error(\"InngestAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Inngest handles job scheduling internally\n throw new Error(\"InngestAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Inngest handles acknowledgment internally\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // No cleanup needed for Inngest\n return Promise.resolve();\n }\n}\n", "/**\n * Cloudflare Adapter\n *\n * Adapter for using Cloudflare Durable Objects as the workflow backend.\n * Ideal for edge deployments on Cloudflare Workers.\n *\n * @see https://developers.cloudflare.com/durable-objects/\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Cloudflare adapter configuration\n */\nexport interface CloudflareAdapterConfig extends BackendConfig {\n /** Durable Object namespace binding name */\n durableObjectBinding?: string;\n /** KV namespace binding name (for auxiliary storage) */\n kvBinding?: string;\n /** Queue binding name (for job queue) */\n queueBinding?: string;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Cloudflare Adapter\n *\n * Uses Cloudflare Durable Objects for workflow state and\n * Cloudflare Queues for job distribution.\n *\n * @example\n * ```typescript\n * // In your Cloudflare Worker\n * import { CloudflareAdapter } from 'veryfront/ai/workflow/backends/cloudflare';\n *\n * export default {\n * async fetch(request, env) {\n * const backend = new CloudflareAdapter({\n * durableObjectBinding: 'WORKFLOW_DO',\n * kvBinding: 'WORKFLOW_KV',\n * queueBinding: 'WORKFLOW_QUEUE',\n * });\n *\n * // Use backend...\n * }\n * }\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * Cloudflare Workers environment bindings.\n */\nexport class CloudflareAdapter implements WorkflowBackend {\n private config: CloudflareAdapterConfig;\n\n constructor(config: CloudflareAdapterConfig = {}) {\n this.config = {\n durableObjectBinding: \"WORKFLOW_DO\",\n kvBinding: \"WORKFLOW_KV\",\n queueBinding: \"WORKFLOW_QUEUE\",\n debug: false,\n ...config,\n };\n\n console.warn(\n \"[CloudflareAdapter] This is a stub implementation. \" +\n \"Full Cloudflare integration requires Workers environment bindings. \" +\n \"See: https://developers.cloudflare.com/durable-objects/\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n // This would create/get a Durable Object instance for the run\n throw new Error(\"CloudflareAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n // This would fetch state from the Durable Object\n throw new Error(\"CloudflareAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n // This would update state in the Durable Object\n throw new Error(\"CloudflareAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n // This would query KV for run indexes\n throw new Error(\"CloudflareAdapter.listRuns not implemented\");\n }\n\n // Checkpointing\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n // This would persist checkpoint to the Durable Object\n throw new Error(\"CloudflareAdapter.saveCheckpoint not implemented\");\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n throw new Error(\"CloudflareAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n throw new Error(\"CloudflareAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"CloudflareAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n throw new Error(\"CloudflareAdapter.updateApproval not implemented\");\n }\n\n // Queue (using Cloudflare Queues)\n enqueue(_job: WorkflowJob): Promise<void> {\n // This would send a message to Cloudflare Queue\n throw new Error(\"CloudflareAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Cloudflare Queues use push model, not pull\n throw new Error(\"CloudflareAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Cloudflare Queues handle acknowledgment differently\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // No cleanup needed - Cloudflare manages lifecycle\n return Promise.resolve();\n }\n}\n", "/**\n * useWorkflow Hook\n *\n * React hook for tracking and interacting with workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflow } from 'veryfront/ai/workflow/react';\n *\n * function WorkflowDashboard({ runId }: { runId: string }) {\n * const {\n * run,\n * status,\n * progress,\n * currentNodes,\n * pendingApprovals,\n * cancel,\n * retry,\n * isLoading,\n * error,\n * } = useWorkflow({ runId });\n *\n * return (\n * <div>\n * <h2>Status: {status}</h2>\n * <p>Progress: {progress}%</p>\n * {pendingApprovals.length > 0 && (\n * <p>{pendingApprovals.length} approvals pending</p>\n * )}\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useRef, useState } from \"react\";\nimport type { NodeState, PendingApproval, WorkflowRun, WorkflowStatus } from \"../types.ts\";\n\n/**\n * Options for useWorkflow hook\n */\nexport interface UseWorkflowOptions {\n /** Run ID to track */\n runId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Polling interval in ms (defaults to 2000) */\n pollInterval?: number;\n\n /** Enable automatic polling */\n autoRefresh?: boolean;\n\n /** Callback when status changes */\n onStatusChange?: (status: WorkflowStatus, previousStatus: WorkflowStatus) => void;\n\n /** Callback when workflow completes */\n onComplete?: (run: WorkflowRun) => void;\n\n /** Callback when workflow fails */\n onError?: (error: Error, run?: WorkflowRun) => void;\n\n /** Callback when approval is required */\n onApprovalRequired?: (approval: PendingApproval) => void;\n}\n\n/**\n * Result from useWorkflow hook\n */\nexport interface UseWorkflowResult {\n /** The workflow run data */\n run: WorkflowRun | null;\n\n /** Current workflow status */\n status: WorkflowStatus;\n\n /** Progress percentage (0-100) */\n progress: number;\n\n /** Currently executing node IDs */\n currentNodes: string[];\n\n /** Node states by node ID */\n nodeStates: Record<string, NodeState>;\n\n /** Pending approvals */\n pendingApprovals: PendingApproval[];\n\n /** Refresh the workflow data */\n refresh: () => Promise<void>;\n\n /** Cancel the workflow */\n cancel: () => Promise<void>;\n\n /** Retry a failed workflow */\n retry: () => Promise<void>;\n\n /** Loading state */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n}\n\n/**\n * useWorkflow - Track and interact with a workflow run\n */\nexport function useWorkflow(options: UseWorkflowOptions): UseWorkflowResult {\n const {\n runId,\n apiBase = \"/api/workflows\",\n pollInterval = 2000,\n autoRefresh = true,\n onStatusChange,\n onComplete,\n onError,\n onApprovalRequired,\n } = options;\n\n const [run, setRun] = useState<WorkflowRun | null>(null);\n const [isLoading, setIsLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n const previousStatusRef = useRef<WorkflowStatus | null>(null);\n const previousApprovalsRef = useRef<Set<string>>(new Set());\n const abortControllerRef = useRef<AbortController | null>(null);\n\n /**\n * Fetch workflow data\n */\n const fetchRun = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}`, {\n signal: abortControllerRef.current?.signal,\n });\n\n if (!response.ok) {\n throw new Error(`Failed to fetch workflow: ${response.status}`);\n }\n\n const data = await response.json();\n const workflowRun = data as WorkflowRun;\n\n // Check for status changes\n if (previousStatusRef.current && previousStatusRef.current !== workflowRun.status) {\n onStatusChange?.(workflowRun.status, previousStatusRef.current);\n }\n previousStatusRef.current = workflowRun.status;\n\n // Check for completion\n if (workflowRun.status === \"completed\") {\n onComplete?.(workflowRun);\n }\n\n // Check for failures\n if (workflowRun.status === \"failed\") {\n const failedError = new Error(\"Workflow failed\");\n onError?.(failedError, workflowRun);\n }\n\n // Check for new approvals\n if (workflowRun.pendingApprovals) {\n for (const approval of workflowRun.pendingApprovals) {\n if (approval.status === \"pending\" && !previousApprovalsRef.current.has(approval.id)) {\n previousApprovalsRef.current.add(approval.id);\n onApprovalRequired?.(approval);\n }\n }\n }\n\n setRun(workflowRun);\n setError(null);\n } catch (err) {\n if (err instanceof Error && err.name === \"AbortError\") {\n return;\n }\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n onError?.(fetchError);\n }\n }, [runId, apiBase, onStatusChange, onComplete, onError, onApprovalRequired]);\n\n /**\n * Initial fetch and polling setup\n */\n useEffect(() => {\n abortControllerRef.current = new AbortController();\n\n const doFetch = async () => {\n setIsLoading(true);\n await fetchRun();\n setIsLoading(false);\n };\n\n doFetch();\n\n // Set up polling for active workflows\n let intervalId: ReturnType<typeof setInterval> | undefined;\n if (autoRefresh) {\n intervalId = setInterval(() => {\n // Only poll if workflow is still active\n const currentStatus = previousStatusRef.current;\n if (currentStatus && ![\"completed\", \"failed\", \"cancelled\"].includes(currentStatus)) {\n fetchRun();\n }\n }, pollInterval);\n }\n\n return () => {\n abortControllerRef.current?.abort();\n if (intervalId) {\n clearInterval(intervalId);\n }\n };\n }, [runId, autoRefresh, pollInterval, fetchRun]);\n\n /**\n * Refresh workflow data\n */\n const refresh = useCallback(async () => {\n setIsLoading(true);\n await fetchRun();\n setIsLoading(false);\n }, [fetchRun]);\n\n /**\n * Cancel the workflow\n */\n const cancel = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}/cancel`, {\n method: \"POST\",\n });\n\n if (!response.ok) {\n throw new Error(`Failed to cancel workflow: ${response.status}`);\n }\n\n await refresh();\n } catch (err) {\n const cancelError = err instanceof Error ? err : new Error(String(err));\n setError(cancelError);\n throw cancelError;\n }\n }, [runId, apiBase, refresh]);\n\n /**\n * Retry a failed workflow\n */\n const retry = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}/retry`, {\n method: \"POST\",\n });\n\n if (!response.ok) {\n throw new Error(`Failed to retry workflow: ${response.status}`);\n }\n\n await refresh();\n } catch (err) {\n const retryError = err instanceof Error ? err : new Error(String(err));\n setError(retryError);\n throw retryError;\n }\n }, [runId, apiBase, refresh]);\n\n // Calculate progress\n const calculateProgress = (): number => {\n if (!run?.nodeStates) return 0;\n\n const states = Object.values(run.nodeStates);\n if (states.length === 0) return 0;\n\n const completed = states.filter(\n (s) => s.status === \"completed\" || s.status === \"skipped\",\n ).length;\n\n return Math.round((completed / states.length) * 100);\n };\n\n return {\n run,\n status: run?.status ?? \"pending\",\n progress: calculateProgress(),\n currentNodes: run?.currentNodes ?? [],\n nodeStates: run?.nodeStates ?? {},\n pendingApprovals: run?.pendingApprovals?.filter((a) => a.status === \"pending\") ?? [],\n refresh,\n cancel,\n retry,\n isLoading,\n error,\n };\n}\n", "/**\n * useApproval Hook\n *\n * React hook for handling workflow approval interactions.\n *\n * @example\n * ```tsx\n * import { useApproval } from 'veryfront/ai/workflow/react';\n *\n * function ApprovalUI({ runId, approvalId }: Props) {\n * const {\n * approval,\n * approve,\n * reject,\n * isSubmitting,\n * error,\n * } = useApproval({ runId, approvalId });\n *\n * if (!approval) return <p>Loading...</p>;\n *\n * return (\n * <div>\n * <h3>{approval.message}</h3>\n * <p>Requested by: {approval.stepId}</p>\n * <button onClick={() => approve('Looks good!')}>\n * Approve\n * </button>\n * <button onClick={() => reject('Needs changes')}>\n * Reject\n * </button>\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useState } from \"react\";\nimport type { ApprovalDecision, PendingApproval } from \"../types.ts\";\n\n/**\n * Options for useApproval hook\n */\nexport interface UseApprovalOptions {\n /** Workflow run ID */\n runId: string;\n\n /** Approval ID */\n approvalId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Current user/approver name */\n approver?: string;\n\n /** Callback on successful approval/rejection */\n onDecision?: (decision: ApprovalDecision) => void;\n\n /** Callback on error */\n onError?: (error: Error) => void;\n}\n\n/**\n * Result from useApproval hook\n */\nexport interface UseApprovalResult {\n /** The approval data */\n approval: PendingApproval | null;\n\n /** Approve the request */\n approve: (comment?: string) => Promise<void>;\n\n /** Reject the request */\n reject: (comment?: string) => Promise<void>;\n\n /** Submit a custom decision */\n submitDecision: (decision: ApprovalDecision) => Promise<void>;\n\n /** Whether a submission is in progress */\n isSubmitting: boolean;\n\n /** Loading state for initial fetch */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n\n /** Whether the approval is still pending */\n isPending: boolean;\n\n /** Whether the approval has been resolved */\n isResolved: boolean;\n}\n\n/**\n * useApproval - Handle workflow approval interactions\n */\nexport function useApproval(options: UseApprovalOptions): UseApprovalResult {\n const {\n runId,\n approvalId,\n apiBase = \"/api/workflows\",\n approver = \"unknown\",\n onDecision,\n onError,\n } = options;\n\n const [approval, setApproval] = useState<PendingApproval | null>(null);\n const [isLoading, setIsLoading] = useState(true);\n const [isSubmitting, setIsSubmitting] = useState(false);\n const [error, setError] = useState<Error | null>(null);\n\n /**\n * Fetch approval data\n */\n useEffect(() => {\n const fetchApproval = async () => {\n try {\n const response = await fetch(\n `${apiBase}/runs/${runId}/approvals/${approvalId}`,\n );\n\n if (!response.ok) {\n throw new Error(`Failed to fetch approval: ${response.status}`);\n }\n\n const data = await response.json();\n setApproval(data as PendingApproval);\n setError(null);\n } catch (err) {\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n onError?.(fetchError);\n } finally {\n setIsLoading(false);\n }\n };\n\n if (runId && approvalId) {\n fetchApproval();\n }\n }, [runId, approvalId, apiBase, onError]);\n\n /**\n * Submit a decision\n */\n const submitDecision = useCallback(\n async (decision: ApprovalDecision) => {\n if (!runId || !approvalId) return;\n\n setIsSubmitting(true);\n setError(null);\n\n try {\n const response = await fetch(\n `${apiBase}/runs/${runId}/approvals/${approvalId}`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(decision),\n },\n );\n\n if (!response.ok) {\n throw new Error(`Failed to submit decision: ${response.status}`);\n }\n\n // Update local state\n setApproval((prev) =>\n prev\n ? {\n ...prev,\n status: decision.approved ? \"approved\" : \"rejected\",\n resolvedAt: new Date(),\n resolvedBy: decision.approver,\n comment: decision.comment,\n }\n : null\n );\n\n onDecision?.(decision);\n } catch (err) {\n const submitError = err instanceof Error ? err : new Error(String(err));\n setError(submitError);\n onError?.(submitError);\n throw submitError;\n } finally {\n setIsSubmitting(false);\n }\n },\n [runId, approvalId, apiBase, onDecision, onError],\n );\n\n /**\n * Approve the request\n */\n const approve = useCallback(\n async (comment?: string) => {\n await submitDecision({\n approved: true,\n approver,\n comment,\n });\n },\n [submitDecision, approver],\n );\n\n /**\n * Reject the request\n */\n const reject = useCallback(\n async (comment?: string) => {\n await submitDecision({\n approved: false,\n approver,\n comment,\n });\n },\n [submitDecision, approver],\n );\n\n return {\n approval,\n approve,\n reject,\n submitDecision,\n isSubmitting,\n isLoading,\n error,\n isPending: approval?.status === \"pending\",\n isResolved: approval?.status !== \"pending\",\n };\n}\n", "/**\n * useWorkflowList Hook\n *\n * React hook for listing and filtering workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflowList } from 'veryfront/ai/workflow/react';\n *\n * function WorkflowList() {\n * const {\n * runs,\n * isLoading,\n * hasMore,\n * loadMore,\n * setFilter,\n * } = useWorkflowList({\n * workflowId: 'content-pipeline',\n * status: 'running',\n * });\n *\n * return (\n * <div>\n * {runs.map(run => (\n * <div key={run.id}>\n * {run.id} - {run.status}\n * </div>\n * ))}\n * {hasMore && (\n * <button onClick={loadMore}>Load More</button>\n * )}\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useState } from \"react\";\nimport type { RunFilter, WorkflowRun, WorkflowStatus } from \"../types.ts\";\n\n/**\n * Options for useWorkflowList hook\n */\nexport interface UseWorkflowListOptions {\n /** Filter by workflow ID */\n workflowId?: string;\n\n /** Filter by status */\n status?: WorkflowStatus | WorkflowStatus[];\n\n /** Filter runs created after this date */\n createdAfter?: Date;\n\n /** Filter runs created before this date */\n createdBefore?: Date;\n\n /** Page size (defaults to 20) */\n pageSize?: number;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Enable automatic refresh */\n autoRefresh?: boolean;\n\n /** Refresh interval in ms (defaults to 5000) */\n refreshInterval?: number;\n}\n\n/**\n * Result from useWorkflowList hook\n */\nexport interface UseWorkflowListResult {\n /** List of workflow runs */\n runs: WorkflowRun[];\n\n /** Total count (if available) */\n totalCount?: number;\n\n /** Loading state */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n\n /** Whether there are more results */\n hasMore: boolean;\n\n /** Load more results */\n loadMore: () => Promise<void>;\n\n /** Refresh the list */\n refresh: () => Promise<void>;\n\n /** Update the filter */\n setFilter: (filter: Partial<UseWorkflowListOptions>) => void;\n\n /** Current filter */\n filter: RunFilter;\n}\n\n/**\n * useWorkflowList - List and filter workflow runs\n */\nexport function useWorkflowList(\n options: UseWorkflowListOptions = {},\n): UseWorkflowListResult {\n const {\n workflowId,\n status,\n createdAfter,\n createdBefore,\n pageSize = 20,\n apiBase = \"/api/workflows\",\n autoRefresh = false,\n refreshInterval = 5000,\n } = options;\n\n const [runs, setRuns] = useState<WorkflowRun[]>([]);\n const [totalCount, setTotalCount] = useState<number | undefined>();\n const [isLoading, setIsLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n const [hasMore, setHasMore] = useState(false);\n const [cursor, setCursor] = useState<string | undefined>();\n\n const [filter, setFilterState] = useState<RunFilter>({\n workflowId,\n status,\n createdAfter,\n createdBefore,\n limit: pageSize,\n });\n\n /**\n * Build query string from filter\n */\n const buildQueryString = useCallback(\n (filterToUse: RunFilter, cursorToUse?: string): string => {\n const params = new URLSearchParams();\n\n if (filterToUse.workflowId) {\n params.set(\"workflowId\", filterToUse.workflowId);\n }\n\n if (filterToUse.status) {\n const statuses = Array.isArray(filterToUse.status)\n ? filterToUse.status\n : [filterToUse.status];\n statuses.forEach((s) => params.append(\"status\", s));\n }\n\n if (filterToUse.createdAfter) {\n params.set(\"createdAfter\", filterToUse.createdAfter.toISOString());\n }\n\n if (filterToUse.createdBefore) {\n params.set(\"createdBefore\", filterToUse.createdBefore.toISOString());\n }\n\n if (filterToUse.limit) {\n params.set(\"limit\", String(filterToUse.limit));\n }\n\n if (cursorToUse) {\n params.set(\"cursor\", cursorToUse);\n }\n\n return params.toString();\n },\n [],\n );\n\n /**\n * Fetch runs\n */\n const fetchRuns = useCallback(\n async (append: boolean = false) => {\n try {\n const queryString = buildQueryString(filter, append ? cursor : undefined);\n const response = await fetch(`${apiBase}/runs?${queryString}`);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch runs: ${response.status}`);\n }\n\n const data = await response.json();\n const fetchedRuns = (data.runs || data) as WorkflowRun[];\n const nextCursor = data.cursor;\n const total = data.totalCount;\n\n if (append) {\n setRuns((prev) => [...prev, ...fetchedRuns]);\n } else {\n setRuns(fetchedRuns);\n }\n\n setCursor(nextCursor);\n setHasMore(!!nextCursor || fetchedRuns.length === filter.limit);\n setTotalCount(total);\n setError(null);\n } catch (err) {\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n }\n },\n [apiBase, filter, cursor, buildQueryString],\n );\n\n /**\n * Initial fetch\n */\n useEffect(() => {\n const doFetch = async () => {\n setIsLoading(true);\n await fetchRuns(false);\n setIsLoading(false);\n };\n\n doFetch();\n }, [filter]); // Re-fetch when filter changes\n\n /**\n * Auto-refresh setup\n */\n useEffect(() => {\n if (!autoRefresh) return;\n\n const intervalId = setInterval(() => {\n fetchRuns(false);\n }, refreshInterval);\n\n return () => clearInterval(intervalId);\n }, [autoRefresh, refreshInterval, fetchRuns]);\n\n /**\n * Load more results\n */\n const loadMore = useCallback(async () => {\n if (!hasMore || isLoading) return;\n setIsLoading(true);\n await fetchRuns(true);\n setIsLoading(false);\n }, [hasMore, isLoading, fetchRuns]);\n\n /**\n * Refresh the list\n */\n const refresh = useCallback(async () => {\n setCursor(undefined);\n setIsLoading(true);\n await fetchRuns(false);\n setIsLoading(false);\n }, [fetchRuns]);\n\n /**\n * Update filter\n */\n const setFilter = useCallback(\n (newFilter: Partial<UseWorkflowListOptions>) => {\n setCursor(undefined); // Reset pagination\n setFilterState((prev) => ({\n ...prev,\n workflowId: newFilter.workflowId ?? prev.workflowId,\n status: newFilter.status ?? prev.status,\n createdAfter: newFilter.createdAfter ?? prev.createdAfter,\n createdBefore: newFilter.createdBefore ?? prev.createdBefore,\n limit: newFilter.pageSize ?? prev.limit,\n }));\n },\n [],\n );\n\n return {\n runs,\n totalCount,\n isLoading,\n error,\n hasMore,\n loadMore,\n refresh,\n setFilter,\n filter,\n };\n}\n", "/**\n * useWorkflowStart Hook\n *\n * React hook for starting workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflowStart } from 'veryfront/ai/workflow/react';\n *\n * function StartWorkflowButton() {\n * const { start, isStarting, error, lastRunId } = useWorkflowStart({\n * workflowId: 'content-pipeline',\n * onStart: (runId) => {\n * console.log('Started:', runId);\n * },\n * });\n *\n * return (\n * <button\n * onClick={() => start({ topic: 'AI Safety' })}\n * disabled={isStarting}\n * >\n * {isStarting ? 'Starting...' : 'Start Workflow'}\n * </button>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useState } from \"react\";\n\n/**\n * Options for useWorkflowStart hook\n */\nexport interface UseWorkflowStartOptions {\n /** Workflow ID to start */\n workflowId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Callback when workflow starts successfully */\n onStart?: (runId: string) => void;\n\n /** Callback on error */\n onError?: (error: Error) => void;\n}\n\n/**\n * Result from useWorkflowStart hook\n */\nexport interface UseWorkflowStartResult<TInput = unknown> {\n /** Start a new workflow run */\n start: (input: TInput) => Promise<string>;\n\n /** Whether a start is in progress */\n isStarting: boolean;\n\n /** Last started run ID */\n lastRunId: string | null;\n\n /** Error state */\n error: Error | null;\n\n /** Reset error state */\n resetError: () => void;\n}\n\n/**\n * useWorkflowStart - Start new workflow runs\n */\nexport function useWorkflowStart<TInput = unknown>(\n options: UseWorkflowStartOptions,\n): UseWorkflowStartResult<TInput> {\n const { workflowId, apiBase = \"/api/workflows\", onStart, onError } = options;\n\n const [isStarting, setIsStarting] = useState(false);\n const [lastRunId, setLastRunId] = useState<string | null>(null);\n const [error, setError] = useState<Error | null>(null);\n\n /**\n * Start a new workflow run\n */\n const start = useCallback(\n async (input: TInput): Promise<string> => {\n setIsStarting(true);\n setError(null);\n\n try {\n const response = await fetch(`${apiBase}/${workflowId}/start`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({ input }),\n });\n\n if (!response.ok) {\n const errorData = await response.json().catch(() => ({}));\n throw new Error(\n errorData.message || `Failed to start workflow: ${response.status}`,\n );\n }\n\n const data = await response.json();\n const runId = data.runId || data.id;\n\n setLastRunId(runId);\n onStart?.(runId);\n\n return runId;\n } catch (err) {\n const startError = err instanceof Error ? err : new Error(String(err));\n setError(startError);\n onError?.(startError);\n throw startError;\n } finally {\n setIsStarting(false);\n }\n },\n [workflowId, apiBase, onStart, onError],\n );\n\n /**\n * Reset error state\n */\n const resetError = useCallback(() => {\n setError(null);\n }, []);\n\n return {\n start,\n isStarting,\n lastRunId,\n error,\n resetError,\n };\n}\n"],
4
+ "sourcesContent": ["/**\n * Veryfront Workflow Types\n *\n * Core type definitions for durable, DAG-based agentic workflows\n */\n\nimport type { z } from \"zod\";\nimport type { Agent } from \"../types/agent.ts\";\nimport type { Tool } from \"../types/tool.ts\";\nimport type { BlobRef, BlobStorage } from \"./blob/types.ts\";\n\n// ============================================================================\n// Workflow Status\n// ============================================================================\n\n/**\n * Status of a workflow run\n */\nexport type WorkflowStatus =\n | \"pending\" // Created but not started\n | \"running\" // Currently executing\n | \"waiting\" // Paused, waiting for approval/event\n | \"completed\" // Successfully finished\n | \"failed\" // Failed with error\n | \"cancelled\"; // Cancelled by user\n\n/**\n * Status of a single node in the workflow\n */\nexport type NodeStatus =\n | \"pending\" // Not yet executed\n | \"running\" // Currently executing\n | \"completed\" // Successfully finished\n | \"failed\" // Failed with error\n | \"skipped\"; // Skipped due to condition\n\n// ============================================================================\n// Workflow Node Types\n// ============================================================================\n\n/**\n * Types of nodes in a workflow DAG\n */\nexport type WorkflowNodeType =\n | \"step\" // Single agent or tool execution\n | \"parallel\" // Parallel execution of multiple nodes\n | \"map\" // Dynamic fan-out/map-reduce\n | \"branch\" // Conditional branching\n | \"wait\" // Wait for approval or event\n | \"subWorkflow\"; // Nested workflow execution\n\n/**\n * Retry configuration for a step\n */\nexport interface RetryConfig {\n /** Maximum number of retry attempts */\n maxAttempts?: number;\n /** Backoff strategy */\n backoff?: \"fixed\" | \"linear\" | \"exponential\";\n /** Initial delay in milliseconds */\n initialDelay?: number;\n /** Maximum delay between retries */\n maxDelay?: number;\n}\n\n/**\n * Base configuration for all workflow nodes\n */\nexport interface BaseNodeConfig {\n /** Whether to checkpoint after this node */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for this node */\n timeout?: string | number;\n /** Condition to skip this node */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Step node configuration (agent or tool execution)\n */\nexport interface StepNodeConfig extends BaseNodeConfig {\n type: \"step\";\n /** Agent ID or agent instance to execute */\n agent?: string | Agent;\n /** Tool ID or tool instance to execute */\n tool?: string | Tool | undefined;\n /** Input for the agent/tool - can be static or computed from context */\n input?:\n | string\n | Record<string, unknown>\n | ((context: WorkflowContext) => unknown);\n}\n\n/**\n * Parallel node configuration (concurrent execution)\n */\nexport interface ParallelNodeConfig extends BaseNodeConfig {\n type: \"parallel\";\n /** Nodes to execute in parallel */\n nodes: WorkflowNode[];\n /** How to handle parallel completion */\n strategy?: \"all\" | \"race\" | \"allSettled\";\n}\n\n/**\n * Branch node configuration (conditional execution)\n */\nexport interface BranchNodeConfig extends BaseNodeConfig {\n type: \"branch\";\n /** Condition to evaluate */\n condition: (context: WorkflowContext) => boolean | Promise<boolean>;\n /** Nodes to execute if condition is true */\n then: WorkflowNode[];\n /** Nodes to execute if condition is false */\n else?: WorkflowNode[];\n}\n\n/**\n * Wait node configuration (approval or event)\n */\nexport interface WaitNodeConfig extends BaseNodeConfig {\n type: \"wait\";\n /** Type of wait */\n waitType: \"approval\" | \"event\";\n /** Message to display for approval */\n message?: string;\n /** Payload to include with approval request */\n payload?: unknown | ((context: WorkflowContext) => unknown);\n /** Allowed approvers (email or user IDs) */\n approvers?: string[];\n /** Event name to wait for (for event type) */\n eventName?: string;\n}\n\n/**\n * Sub-workflow node configuration\n */\nexport interface SubWorkflowNodeConfig extends BaseNodeConfig {\n type: \"subWorkflow\";\n /** Workflow ID or workflow definition to execute */\n workflow: string | WorkflowDefinition;\n /** Input for the sub-workflow */\n input?: unknown | ((context: WorkflowContext) => unknown);\n /** Transform the sub-workflow output */\n output?: (result: unknown) => unknown;\n}\n\n/**\n * Map node configuration (dynamic fan-out)\n */\nexport interface MapNodeConfig extends BaseNodeConfig {\n type: \"map\";\n /** Collection to iterate over (array) */\n items: unknown[] | ((context: WorkflowContext) => unknown[] | Promise<unknown[]>);\n /** Node or workflow to execute for each item */\n processor: WorkflowNode | WorkflowDefinition;\n /** Maximum concurrent executions */\n concurrency?: number;\n}\n\n/**\n * Union of all node configurations\n */\nexport type WorkflowNodeConfig =\n | StepNodeConfig\n | ParallelNodeConfig\n | MapNodeConfig\n | BranchNodeConfig\n | WaitNodeConfig\n | SubWorkflowNodeConfig;\n\n/**\n * A node in the workflow DAG\n */\nexport interface WorkflowNode {\n /** Unique node ID within the workflow */\n id: string;\n /** Node configuration */\n config: WorkflowNodeConfig;\n /** Dependencies (node IDs that must complete before this node) */\n dependsOn?: string[];\n}\n\n// ============================================================================\n// Workflow Definition\n// ============================================================================\n\n/**\n * Workflow context - accumulated data during execution\n */\nexport interface WorkflowContext {\n /** Input provided when workflow was started */\n input: unknown;\n /** Results from each completed node, keyed by node ID */\n [nodeId: string]: unknown;\n}\n\n/**\n * Helper to resolve BlobRefs into actual content.\n */\nexport interface BlobResolver {\n /** Get blob content as text. */\n getText(ref: BlobRef): Promise<string | null>;\n /** Get blob content as Uint8Array. */\n getBytes(ref: BlobRef): Promise<Uint8Array | null>;\n /** Get blob content as ReadableStream. */\n getStream(ref: BlobRef): Promise<ReadableStream | null>;\n /** Get blob metadata. */\n stat(ref: BlobRef): Promise<BlobRef | null>;\n /** Delete blob data. */\n delete(ref: BlobRef): Promise<void>;\n}\n\n/**\n * Step builder function context\n */\nexport interface StepBuilderContext<TInput = unknown> {\n /** Original workflow input */\n input: TInput;\n /** Accumulated context from previous steps */\n context: WorkflowContext;\n /** Blob storage access (if configured) */\n blobStorage?: BlobStorage;\n /** Helper to resolve BlobRefs to content */\n blob?: BlobResolver;\n}\n\n/**\n * Workflow definition\n */\nexport interface WorkflowDefinition<\n TInput = unknown,\n TOutput = unknown,\n> {\n /** Unique workflow identifier */\n id: string;\n /** Optional description */\n description?: string;\n /** Optional version */\n version?: string;\n /** Input validation schema */\n inputSchema?: z.ZodSchema<TInput>;\n /** Output validation schema */\n outputSchema?: z.ZodSchema<TOutput>;\n /** Default retry configuration for all steps */\n retry?: RetryConfig;\n /** Default timeout for the entire workflow */\n timeout?: string | number;\n /** Workflow steps - can be static or dynamic based on input */\n steps:\n | WorkflowNode[]\n | ((context: StepBuilderContext<TInput>) => WorkflowNode[]);\n /** Error handler */\n onError?: (error: Error, context: WorkflowContext) => void | Promise<void>;\n /** Completion handler */\n onComplete?: (\n result: TOutput,\n context: WorkflowContext,\n ) => void | Promise<void>;\n}\n\n// ============================================================================\n// Workflow Run State\n// ============================================================================\n\n/**\n * State of a single node during execution\n */\nexport interface NodeState {\n /** Node ID */\n nodeId: string;\n /** Current status */\n status: NodeStatus;\n /** Input provided to the node */\n input?: unknown;\n /** Output produced by the node */\n output?: unknown;\n /** Error message if failed */\n error?: string;\n /** Current attempt number (for retries) */\n attempt: number;\n /** When execution started */\n startedAt?: Date;\n /** When execution completed */\n completedAt?: Date;\n}\n\n/**\n * Checkpoint for workflow resume\n */\nexport interface Checkpoint {\n /** Unique checkpoint ID */\n id: string;\n /** Node ID where checkpoint was created */\n nodeId: string;\n /** When checkpoint was created */\n timestamp: Date;\n /** Context at checkpoint time */\n context: WorkflowContext;\n /** Node states at checkpoint time */\n nodeStates: Record<string, NodeState>;\n}\n\n/**\n * Pending approval request\n */\nexport interface PendingApproval {\n /** Unique approval ID */\n id: string;\n /** Node ID that requested approval */\n nodeId: string;\n /** Message for the approver */\n message: string;\n /** Payload with context for the approver */\n payload: unknown;\n /** Allowed approvers (if restricted) */\n approvers?: string[];\n /** When approval was requested */\n requestedAt: Date;\n /** When approval expires */\n expiresAt?: Date;\n /** Current approval status */\n status: \"pending\" | \"approved\" | \"rejected\" | \"expired\";\n /** Who approved/rejected */\n decidedBy?: string;\n /** When decision was made */\n decidedAt?: Date;\n /** Optional comment from approver */\n comment?: string;\n}\n\n/**\n * Workflow run - tracks execution of a workflow instance\n */\nexport interface WorkflowRun<TInput = unknown, TOutput = unknown> {\n /** Unique run ID */\n id: string;\n /** Workflow definition ID */\n workflowId: string;\n /** Workflow version */\n version?: string;\n /** Current status */\n status: WorkflowStatus;\n /** Input provided when started */\n input: TInput;\n /** Final output (when completed) */\n output?: TOutput;\n\n // Execution state\n /** State of each node in the workflow */\n nodeStates: Record<string, NodeState>;\n /** Currently executing node IDs */\n currentNodes: string[];\n /** Accumulated context */\n context: WorkflowContext;\n\n // Durability\n /** Checkpoints for resume */\n checkpoints: Checkpoint[];\n /** Pending approvals */\n pendingApprovals: PendingApproval[];\n\n // Error state\n /** Error information if failed */\n error?: {\n message: string;\n stack?: string;\n nodeId?: string;\n };\n\n // Timing\n /** When run was created */\n createdAt: Date;\n /** When execution started */\n startedAt?: Date;\n /** When execution completed */\n completedAt?: Date;\n}\n\n// ============================================================================\n// Approval Decision\n// ============================================================================\n\n/**\n * Decision on a pending approval\n */\nexport interface ApprovalDecision {\n /** Whether the approval was granted */\n approved: boolean;\n /** Who made the decision */\n approver: string;\n /** Optional comment */\n comment?: string;\n}\n\n// ============================================================================\n// Workflow Job (for queue-based execution)\n// ============================================================================\n\n/**\n * Job for queue-based workflow execution\n */\nexport interface WorkflowJob {\n /** Run ID */\n runId: string;\n /** Workflow ID */\n workflowId: string;\n /** Input data */\n input: unknown;\n /** Priority (higher = more urgent) */\n priority?: number;\n /** When job was created */\n createdAt: Date;\n}\n\n// ============================================================================\n// Run Filter (for querying runs)\n// ============================================================================\n\n/**\n * Filter options for listing workflow runs\n */\nexport interface RunFilter {\n /** Filter by workflow ID */\n workflowId?: string;\n /** Filter by status */\n status?: WorkflowStatus | WorkflowStatus[];\n /** Filter by creation date (after) */\n createdAfter?: Date;\n /** Filter by creation date (before) */\n createdBefore?: Date;\n /** Maximum number of results */\n limit?: number;\n /** Offset for pagination */\n offset?: number;\n}\n\n// ============================================================================\n// Duration parsing utility type\n// ============================================================================\n\n/**\n * Duration string format: \"1h\", \"30m\", \"2d\", etc.\n */\nexport type DurationString = string;\n\n/**\n * Parse duration string to milliseconds\n *\n * @throws Error if duration is invalid, zero, or negative\n */\nexport function parseDuration(duration: string | number): number {\n if (typeof duration === \"number\") {\n if (duration < 0) {\n throw new Error(`Duration cannot be negative: ${duration}`);\n }\n return duration;\n }\n\n const match = duration.match(/^(\\d+(?:\\.\\d+)?)\\s*(ms|s|m|h|d)$/);\n if (!match) {\n throw new Error(`Invalid duration format: ${duration}`);\n }\n\n const value = match[1];\n const unit = match[2];\n\n if (!value || !unit) {\n throw new Error(`Invalid duration format: ${duration}`);\n }\n\n const num = parseFloat(value);\n\n // Reject zero and negative values\n if (num <= 0) {\n throw new Error(`Duration must be positive: ${duration}`);\n }\n\n switch (unit) {\n case \"ms\":\n return num;\n case \"s\":\n return num * 1000;\n case \"m\":\n return num * 60 * 1000;\n case \"h\":\n return num * 60 * 60 * 1000;\n case \"d\":\n return num * 24 * 60 * 60 * 1000;\n default:\n throw new Error(`Unknown duration unit: ${unit}`);\n }\n}\n\n/**\n * Validate retry configuration\n *\n * @throws Error if retry config has invalid values\n */\nexport function validateRetryConfig(config: RetryConfig): void {\n if (config.maxAttempts !== undefined) {\n if (!Number.isInteger(config.maxAttempts) || config.maxAttempts < 1) {\n throw new Error(`maxAttempts must be a positive integer, got: ${config.maxAttempts}`);\n }\n }\n\n if (config.initialDelay !== undefined) {\n if (config.initialDelay < 0) {\n throw new Error(`initialDelay cannot be negative: ${config.initialDelay}`);\n }\n }\n\n if (config.maxDelay !== undefined) {\n if (config.maxDelay < 0) {\n throw new Error(`maxDelay cannot be negative: ${config.maxDelay}`);\n }\n }\n\n if (config.initialDelay !== undefined && config.maxDelay !== undefined) {\n if (config.initialDelay > config.maxDelay) {\n throw new Error(\n `initialDelay (${config.initialDelay}) cannot be greater than maxDelay (${config.maxDelay})`,\n );\n }\n }\n\n if (config.backoff !== undefined) {\n const validBackoffs = [\"fixed\", \"linear\", \"exponential\"];\n if (!validBackoffs.includes(config.backoff)) {\n throw new Error(\n `Invalid backoff strategy: ${config.backoff}. Must be one of: ${validBackoffs.join(\", \")}`,\n );\n }\n }\n}\n\n/**\n * Generate a unique ID for workflow runs, nodes, etc.\n */\nexport function generateId(prefix: string = \"wf\"): string {\n const randomPart = crypto.randomUUID().slice(0, 12);\n return `${prefix}_${randomPart}`;\n}\n", "/**\n * Workflow DSL Builder\n *\n * Main factory function for creating durable workflows\n */\n\nimport type { z } from \"zod\";\nimport type {\n RetryConfig,\n StepBuilderContext,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a workflow\n */\nexport interface WorkflowOptions<TInput = unknown, TOutput = unknown> {\n /** Unique workflow identifier */\n id: string;\n /** Optional description */\n description?: string;\n /** Optional version */\n version?: string;\n /** Input validation schema (Zod) */\n inputSchema?: z.ZodSchema<TInput>;\n /** Output validation schema (Zod) */\n outputSchema?: z.ZodSchema<TOutput>;\n /** Default retry configuration for all steps */\n retry?: RetryConfig;\n /** Default timeout for the entire workflow */\n timeout?: string | number;\n /**\n * Workflow steps - can be:\n * - An array of WorkflowNode\n * - A function that returns an array based on input\n */\n steps:\n | WorkflowNode[]\n | ((context: StepBuilderContext<TInput>) => WorkflowNode[]);\n /** Error handler called when workflow fails */\n onError?: (error: Error, context: WorkflowContext) => void | Promise<void>;\n /** Completion handler called when workflow succeeds */\n onComplete?: (\n result: TOutput,\n context: WorkflowContext,\n ) => void | Promise<void>;\n}\n\n/**\n * Created workflow with execution methods\n */\nexport interface Workflow<TInput = unknown, TOutput = unknown> {\n /** Workflow definition */\n definition: WorkflowDefinition<TInput, TOutput>;\n /** Workflow ID */\n id: string;\n /** Workflow version */\n version?: string;\n}\n\n/**\n * Create a durable workflow definition\n *\n * @example\n * ```typescript\n * import { workflow, step, parallel, branch, waitForApproval } from 'veryfront/ai/workflow';\n * import { z } from 'zod';\n *\n * export default workflow({\n * id: 'content-pipeline',\n * description: 'Generate and publish content with human review',\n *\n * inputSchema: z.object({\n * topic: z.string(),\n * requiresApproval: z.boolean().default(true),\n * }),\n *\n * timeout: '2h',\n *\n * steps: ({ input }) => [\n * // Research phase\n * step('research', {\n * agent: 'researcher',\n * input: `Research: ${input.topic}`,\n * }),\n *\n * // Generate content in parallel\n * parallel('generate', [\n * step('write-article', { agent: 'writer' }),\n * step('create-images', { tool: 'imageGenerator' }),\n * ]),\n *\n * // Optional approval gate\n * branch('approval-gate', {\n * condition: () => input.requiresApproval,\n * then: [\n * waitForApproval('human-review', {\n * timeout: '24h',\n * message: 'Please review the content',\n * }),\n * ],\n * }),\n *\n * // Publish\n * step('publish', { agent: 'publisher' }),\n * ],\n *\n * onComplete: async (result, context) => {\n * console.log('Workflow completed:', result);\n * },\n *\n * onError: async (error, context) => {\n * console.error('Workflow failed:', error);\n * },\n * });\n * ```\n */\nexport function workflow<TInput = unknown, TOutput = unknown>(\n options: WorkflowOptions<TInput, TOutput>,\n): Workflow<TInput, TOutput> {\n // Validate required fields\n if (!options.id) {\n throw new Error(\"Workflow must have an 'id'\");\n }\n\n if (!options.steps) {\n throw new Error(`Workflow \"${options.id}\" must have 'steps'`);\n }\n\n // Create the workflow definition\n const definition: WorkflowDefinition<TInput, TOutput> = {\n id: options.id,\n description: options.description,\n version: options.version,\n inputSchema: options.inputSchema,\n outputSchema: options.outputSchema,\n retry: options.retry,\n timeout: options.timeout,\n steps: options.steps,\n onError: options.onError,\n onComplete: options.onComplete,\n };\n\n return {\n definition,\n id: options.id,\n version: options.version,\n };\n}\n\n/**\n * Helper to build linear dependencies between nodes\n *\n * Takes an array of nodes and returns them with dependsOn set\n * so each node depends on the previous one.\n */\nexport function sequence(...nodes: WorkflowNode[]): WorkflowNode[] {\n return nodes.map((node, index) => {\n if (index === 0) {\n return node;\n }\n const prevNode = nodes[index - 1];\n return {\n ...node,\n dependsOn: prevNode ? [prevNode.id] : undefined,\n };\n });\n}\n\n/**\n * Create a DAG-based workflow with explicit dependencies\n *\n * @example\n * ```typescript\n * import { dag, workflow } from 'veryfront/ai/workflow';\n *\n * export default workflow({\n * id: 'data-pipeline',\n * steps: dag({\n * 'fetch': step('fetch', { tool: 'dataFetcher' }),\n * 'validate': step('validate', { agent: 'validator' }).dependsOn('fetch'),\n * 'transform-a': step('transform-a', { tool: 'transformerA' }).dependsOn('validate'),\n * 'transform-b': step('transform-b', { tool: 'transformerB' }).dependsOn('validate'),\n * 'aggregate': step('aggregate', { agent: 'aggregator' }).dependsOn('transform-a', 'transform-b'),\n * }),\n * });\n * ```\n */\nexport function dag(\n nodes: Record<string, WorkflowNode | { node: WorkflowNode; dependsOn: string[] }>,\n): WorkflowNode[] {\n const result: WorkflowNode[] = [];\n const seenIds = new Set<string>();\n\n for (const [id, value] of Object.entries(nodes)) {\n let nodeId: string;\n let node: WorkflowNode;\n\n if (\"node\" in value && \"dependsOn\" in value) {\n // Object with explicit dependencies\n nodeId = value.node.id || id;\n node = {\n ...value.node,\n id: nodeId,\n dependsOn: value.dependsOn,\n };\n } else {\n // Plain WorkflowNode\n const workflowNode = value as WorkflowNode;\n nodeId = workflowNode.id || id;\n node = {\n ...workflowNode,\n id: nodeId,\n };\n }\n\n // Check for duplicate IDs\n if (seenIds.has(nodeId)) {\n throw new Error(`Duplicate node ID detected in dag: \"${nodeId}\"`);\n }\n seenIds.add(nodeId);\n\n result.push(node);\n }\n\n return result;\n}\n\n/**\n * Helper to add dependencies to a node\n */\nexport function dependsOn(\n node: WorkflowNode,\n ...dependencies: string[]\n): WorkflowNode {\n return {\n ...node,\n dependsOn: [...(node.dependsOn || []), ...dependencies],\n };\n}\n", "/**\n * Step DSL Builder\n *\n * Creates step nodes for agent or tool execution\n */\n\nimport type { Agent } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type {\n BaseNodeConfig,\n RetryConfig,\n StepNodeConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a step node\n */\nexport interface StepOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Agent ID or agent instance to execute */\n agent?: string | Agent;\n /** Tool ID or tool instance to execute */\n tool?: string | Tool | undefined;\n /** Input for the agent/tool */\n input?:\n | string\n | Record<string, unknown>\n | ((context: WorkflowContext) => unknown);\n /** Whether to checkpoint after this step (default: true for agents) */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for this step */\n timeout?: string | number;\n /** Condition to skip this step */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a step node for agent or tool execution\n *\n * @example\n * ```typescript\n * // Agent step\n * step('research', {\n * agent: 'researcher',\n * input: 'Research AI safety',\n * checkpoint: true,\n * })\n *\n * // Tool step\n * step('fetch-data', {\n * tool: 'dataFetcher',\n * input: { url: 'https://api.example.com/data' },\n * })\n *\n * // Dynamic input from context\n * step('write', {\n * agent: 'writer',\n * input: (ctx) => ctx['research'].output,\n * })\n * ```\n */\nexport function step(id: string, options: StepOptions): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n // Validate that either agent or tool is specified\n if (!options.agent && !options.tool) {\n throw new Error(`Step \"${id}\" must specify either 'agent' or 'tool'`);\n }\n\n if (options.agent && options.tool) {\n throw new Error(`Step \"${id}\" cannot specify both 'agent' and 'tool'`);\n }\n\n // Default checkpoint to true for agent steps\n const shouldCheckpoint = options.checkpoint ?? !!options.agent;\n\n const config: StepNodeConfig = {\n type: \"step\",\n agent: options.agent,\n tool: options.tool,\n input: options.input,\n checkpoint: shouldCheckpoint,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a step that executes an agent\n * Convenience wrapper around step()\n */\nexport function agentStep(\n id: string,\n agent: string | Agent,\n options?: Omit<StepOptions, \"agent\" | \"tool\">,\n): WorkflowNode {\n return step(id, { ...options, agent });\n}\n\n/**\n * Create a step that executes a tool\n * Convenience wrapper around step()\n */\nexport function toolStep(\n id: string,\n tool: string | Tool,\n options?: Omit<StepOptions, \"agent\" | \"tool\">,\n): WorkflowNode {\n return step(id, { ...options, tool });\n}\n", "/**\n * Parallel DSL Builder\n *\n * Creates parallel nodes for concurrent execution\n */\n\nimport type {\n BaseNodeConfig,\n ParallelNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a parallel node\n */\nexport interface ParallelOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** How to handle parallel completion */\n strategy?: \"all\" | \"race\" | \"allSettled\";\n /** Whether to checkpoint after all parallel steps complete */\n checkpoint?: boolean;\n /** Retry configuration for the parallel group */\n retry?: RetryConfig;\n /** Timeout for all parallel steps */\n timeout?: string | number;\n /** Condition to skip this parallel group */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a parallel node for concurrent execution of multiple steps\n *\n * @example\n * ```typescript\n * // Execute multiple agents in parallel\n * parallel('analyze', [\n * step('security-scan', { agent: 'securityAgent' }),\n * step('code-quality', { agent: 'codeReviewAgent' }),\n * step('test-coverage', { tool: 'coverageAnalyzer' }),\n * ])\n *\n * // Race condition - first to complete wins\n * parallel('fast-response', [\n * step('gpt4', { agent: 'gpt4Agent' }),\n * step('claude', { agent: 'claudeAgent' }),\n * ], { strategy: 'race' })\n *\n * // Continue even if some fail\n * parallel('optional-checks', [\n * step('lint', { tool: 'linter' }),\n * step('typecheck', { tool: 'typechecker' }),\n * ], { strategy: 'allSettled' })\n * ```\n */\nexport function parallel(\n id: string,\n nodes: WorkflowNode[],\n options: ParallelOptions = {},\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!nodes || nodes.length === 0) {\n throw new Error(`Parallel node \"${id}\" must have at least one child node`);\n }\n\n // Generate unique IDs for child nodes if they're nested under this parallel\n // Also validate child node IDs\n const prefixedNodes = nodes.map((node, index) => {\n if (!node.id || typeof node.id !== \"string\") {\n throw new Error(`Child node at index ${index} in parallel \"${id}\" has invalid ID`);\n }\n return {\n ...node,\n id: node.id.startsWith(`${id}/`) ? node.id : `${id}/${node.id}`,\n };\n });\n\n const config: ParallelNodeConfig = {\n type: \"parallel\",\n nodes: prefixedNodes,\n strategy: options.strategy ?? \"all\",\n checkpoint: options.checkpoint ?? true,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * Map DSL Builder\n *\n * Creates map nodes for dynamic fan-out execution\n */\n\nimport type {\n BaseNodeConfig,\n MapNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a map node\n */\nexport interface MapOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Items to iterate over */\n items: unknown[] | ((context: WorkflowContext) => unknown[] | Promise<unknown[]>);\n /** Node or workflow to execute for each item */\n processor: WorkflowNode | WorkflowDefinition;\n /** Maximum concurrent executions */\n concurrency?: number;\n /** Whether to checkpoint after all items complete */\n checkpoint?: boolean;\n /** Retry configuration for the map group */\n retry?: RetryConfig;\n /** Timeout for all map items */\n timeout?: string | number;\n /** Condition to skip this map group */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a map node for dynamic fan-out execution\n *\n * @example\n * ```typescript\n * // Process a list of URLs dynamically\n * map('process-urls', {\n * items: (ctx) => ctx.input.urls,\n * processor: step('scrape', { tool: 'webScraper' }),\n * concurrency: 5\n * })\n * ```\n */\nexport function map(\n id: string,\n options: MapOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.items) {\n throw new Error(`Map node \"${id}\" must have 'items' configured`);\n }\n\n if (!options.processor) {\n throw new Error(`Map node \"${id}\" must have a 'processor' configured`);\n }\n\n const config: MapNodeConfig = {\n type: \"map\",\n items: options.items,\n processor: options.processor,\n concurrency: options.concurrency,\n checkpoint: options.checkpoint ?? true,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * SubWorkflow DSL Builder\n *\n * Creates sub-workflow nodes for nested workflow execution\n */\n\nimport type {\n BaseNodeConfig,\n SubWorkflowNodeConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a sub-workflow node\n */\nexport interface SubWorkflowOptions extends BaseNodeConfig {\n /** The workflow definition to execute */\n workflow: WorkflowDefinition;\n /** Input for the sub-workflow */\n input?: unknown | ((context: WorkflowContext) => unknown);\n /** Transform the sub-workflow output */\n output?: (result: unknown) => unknown;\n}\n\n/**\n * Create a sub-workflow node for nested execution\n *\n * @example\n * ```typescript\n * import mySubWorkflow from './my-sub-workflow';\n *\n * // Execute a sub-workflow\n * subWorkflow('nested-process', {\n * workflow: mySubWorkflow.definition,\n * input: (ctx) => ({ data: ctx.prevStep.result })\n * })\n * ```\n */\nexport function subWorkflow(\n id: string,\n options: SubWorkflowOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.workflow) {\n throw new Error(`SubWorkflow node \"${id}\" must have a 'workflow' configured`);\n }\n\n const config: SubWorkflowNodeConfig = {\n type: \"subWorkflow\",\n workflow: options.workflow,\n input: options.input,\n output: options.output,\n checkpoint: options.checkpoint,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n", "/**\n * Branch DSL Builder\n *\n * Creates conditional branch nodes for workflow control flow\n */\n\nimport type {\n BaseNodeConfig,\n BranchNodeConfig,\n RetryConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a branch node\n */\nexport interface BranchOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Condition to evaluate */\n condition: (context: WorkflowContext) => boolean | Promise<boolean>;\n /** Nodes to execute if condition is true */\n then: WorkflowNode[];\n /** Nodes to execute if condition is false (optional) */\n else?: WorkflowNode[];\n /** Whether to checkpoint after branching */\n checkpoint?: boolean;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Timeout for the entire branch */\n timeout?: string | number;\n /** Condition to skip the entire branch */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a conditional branch node\n *\n * @example\n * ```typescript\n * // Simple if-then branch\n * branch('approval-gate', {\n * condition: (ctx) => ctx.input.requiresApproval,\n * then: [\n * waitForApproval('human-review', { timeout: '24h' }),\n * ],\n * })\n *\n * // If-then-else branch\n * branch('quality-check', {\n * condition: async (ctx) => {\n * const score = ctx['analyze'].output.score;\n * return score >= 0.8;\n * },\n * then: [\n * step('publish', { agent: 'publisher' }),\n * ],\n * else: [\n * step('revise', { agent: 'editor' }),\n * step('reanalyze', { agent: 'analyzer' }),\n * ],\n * })\n * ```\n */\nexport function branch(id: string, options: BranchOptions): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.condition) {\n throw new Error(`Branch \"${id}\" must specify a condition`);\n }\n\n if (!options.then || options.then.length === 0) {\n throw new Error(`Branch \"${id}\" must have at least one 'then' node`);\n }\n\n // Prefix child node IDs for proper namespacing\n const prefixThenNodes = options.then.map((node) => ({\n ...node,\n id: node.id.startsWith(`${id}/then/`) ? node.id : `${id}/then/${node.id}`,\n }));\n\n const prefixElseNodes = options.else?.map((node) => ({\n ...node,\n id: node.id.startsWith(`${id}/else/`) ? node.id : `${id}/else/${node.id}`,\n }));\n\n const config: BranchNodeConfig = {\n type: \"branch\",\n condition: options.condition,\n then: prefixThenNodes,\n else: prefixElseNodes,\n checkpoint: options.checkpoint ?? false,\n retry: options.retry,\n timeout: options.timeout,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a branch that only executes if condition is true (no else)\n * Convenience wrapper around branch()\n */\nexport function when(\n id: string,\n condition: (context: WorkflowContext) => boolean | Promise<boolean>,\n nodes: WorkflowNode[],\n): WorkflowNode {\n return branch(id, { condition, then: nodes });\n}\n\n/**\n * Create a branch that only executes if condition is false\n * Convenience wrapper around branch()\n */\nexport function unless(\n id: string,\n condition: (context: WorkflowContext) => boolean | Promise<boolean>,\n nodes: WorkflowNode[],\n): WorkflowNode {\n return branch(id, {\n condition: async (ctx) => !(await condition(ctx)),\n then: nodes,\n });\n}\n", "/**\n * Wait DSL Builder\n *\n * Creates wait nodes for approvals and external events\n */\n\nimport type {\n BaseNodeConfig,\n RetryConfig,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowNode,\n} from \"../types.ts\";\n\n/**\n * Options for creating a wait-for-approval node\n */\nexport interface WaitForApprovalOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Message to display to the approver */\n message?: string;\n /** Payload to include with the approval request */\n payload?: unknown | ((context: WorkflowContext) => unknown);\n /** Timeout for the approval (e.g., \"24h\", \"7d\") */\n timeout?: string | number;\n /** Restrict approval to specific users */\n approvers?: string[];\n /** Retry configuration (for timeout/retry scenarios) */\n retry?: RetryConfig;\n /** Condition to skip this approval */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a wait-for-approval node\n *\n * This pauses the workflow until a human approves or rejects.\n * The workflow can be resumed via the approval API.\n *\n * @example\n * ```typescript\n * // Basic approval\n * waitForApproval('content-review', {\n * message: 'Please review the generated content',\n * timeout: '24h',\n * })\n *\n * // Approval with payload for context\n * waitForApproval('deployment-approval', {\n * message: 'Approve deployment to production?',\n * payload: (ctx) => ({\n * changes: ctx['summarize'].output,\n * riskLevel: ctx['analyze'].output.riskLevel,\n * }),\n * approvers: ['ops@company.com', 'lead@company.com'],\n * timeout: '48h',\n * })\n * ```\n */\nexport function waitForApproval(\n id: string,\n options: WaitForApprovalOptions = {},\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"approval\",\n message: options.message ?? \"Approval required\",\n payload: options.payload,\n approvers: options.approvers,\n timeout: options.timeout,\n // Always checkpoint before waiting\n checkpoint: true,\n retry: options.retry,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Options for creating a wait-for-event node\n */\nexport interface WaitForEventOptions extends Omit<BaseNodeConfig, \"checkpoint\"> {\n /** Event name to wait for */\n eventName: string;\n /** Timeout for the event (e.g., \"1h\", \"7d\") */\n timeout?: string | number;\n /** Retry configuration */\n retry?: RetryConfig;\n /** Condition to skip this wait */\n skip?: (context: WorkflowContext) => boolean | Promise<boolean>;\n}\n\n/**\n * Create a wait-for-event node\n *\n * This pauses the workflow until an external event is received.\n * Events can be sent via the workflow event API.\n *\n * @example\n * ```typescript\n * // Wait for external webhook\n * waitForEvent('payment-confirmation', {\n * eventName: 'payment.completed',\n * timeout: '30m',\n * })\n *\n * // Wait for manual trigger\n * waitForEvent('manual-continue', {\n * eventName: 'workflow.continue',\n * })\n * ```\n */\nexport function waitForEvent(\n id: string,\n options: WaitForEventOptions,\n): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n if (!options.eventName) {\n throw new Error(`waitForEvent \"${id}\" must specify an eventName`);\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"event\",\n eventName: options.eventName,\n timeout: options.timeout,\n // Always checkpoint before waiting\n checkpoint: true,\n retry: options.retry,\n skip: options.skip,\n };\n\n return {\n id,\n config,\n };\n}\n\n/**\n * Create a simple delay/sleep node\n *\n * @example\n * ```typescript\n * // Wait for 5 minutes between steps\n * delay('cooldown', '5m')\n * ```\n */\nexport function delay(id: string, duration: string | number): WorkflowNode {\n // Validate node ID\n if (!id || typeof id !== \"string\" || id.trim() === \"\") {\n throw new Error(\"Node ID must be a non-empty string\");\n }\n\n const config: WaitNodeConfig = {\n type: \"wait\",\n waitType: \"event\",\n eventName: \"__delay__\",\n timeout: duration,\n checkpoint: false, // No need to checkpoint for simple delays\n };\n\n return {\n id,\n config,\n };\n}\n", "// Conditional imports for path module\nimport nodePath from \"node:path\";\nimport type { PlatformPath } from \"node:path\";\n\n// Use node:path for Node.js or import Deno's std/path for Deno\nlet pathMod: PlatformPath | null = null;\n\n// Initialize path module synchronously for Node.js\n// @ts-ignore - Deno global\nif (typeof Deno === \"undefined\") {\n pathMod = nodePath;\n} else {\n // Deno environment - start loading asynchronously but don't await\n // @ts-ignore - Deno global\n import(\"std/path/mod.ts\").then((mod) => {\n pathMod = mod as unknown as PlatformPath;\n });\n}\n\n// Helper to get path module, ensuring it's loaded\nfunction getPathMod(): PlatformPath {\n if (pathMod) return pathMod;\n // In Deno, if pathMod is not yet loaded, use Node.js path as temporary fallback\n // This should rarely happen as the import is fast\n return nodePath;\n}\n\n// Re-export common path functions with proper types\nexport const basename = (path: string, suffix?: string): string =>\n getPathMod().basename(path, suffix);\nexport const dirname = (path: string): string => getPathMod().dirname(path);\nexport const fromFileUrl = (url: string | URL): string => {\n const mod = getPathMod();\n // @ts-ignore - Deno path module has fromFileUrl\n if (mod && typeof (mod as any).fromFileUrl === \"function\") {\n // @ts-ignore - Deno path module has fromFileUrl\n return (mod as any).fromFileUrl(url);\n }\n // Fallback for Node.js where fromFileUrl might not be directly available\n // This uses URL parsing which is generally cross-platform\n const urlObj = typeof url === \"string\" ? new URL(url) : url;\n return urlObj.pathname;\n};\nexport const join = (...paths: string[]): string => getPathMod().join(...paths);\nexport const relative = (from: string, to: string): string => getPathMod().relative(from, to);\nexport const resolve = (...paths: string[]): string => getPathMod().resolve(...paths);\nexport const extname = (path: string): string => getPathMod().extname(path);\nexport const isAbsolute = (path: string): boolean => getPathMod().isAbsolute(path);\n// Export sep - uses getter function to ensure pathMod is resolved\nexport const sep: string = nodePath.sep;\n", "export interface BuildContext {\n file?: string;\n line?: number;\n column?: number;\n moduleId?: string;\n phase?: \"parse\" | \"transform\" | \"bundle\" | \"optimize\";\n}\n\nexport interface APIContext {\n endpoint?: string;\n method?: string;\n statusCode?: number;\n headers?: Record<string, string>;\n}\n\nexport interface RenderContext {\n component?: string;\n route?: string;\n phase?: \"server\" | \"client\" | \"hydration\";\n props?: unknown;\n}\n\nexport interface ConfigContext {\n configFile?: string;\n field?: string;\n value?: unknown;\n expected?: string;\n}\n\nexport interface AgentContext {\n agentId?: string;\n intent?: string;\n timeout?: number;\n}\n\nexport interface FileContext {\n path?: string;\n operation?: \"read\" | \"write\" | \"delete\" | \"mkdir\";\n permissions?: string;\n}\n\nexport interface NetworkContext {\n url?: string;\n timeout?: number;\n retryCount?: number;\n}\n\nexport type VeryfrontError =\n | { type: \"build\"; message: string; context?: BuildContext }\n | { type: \"api\"; message: string; context?: APIContext }\n | { type: \"render\"; message: string; context?: RenderContext }\n | { type: \"config\"; message: string; context?: ConfigContext }\n | { type: \"agent\"; message: string; context?: AgentContext }\n | { type: \"file\"; message: string; context?: FileContext }\n | { type: \"network\"; message: string; context?: NetworkContext }\n | { type: \"permission\"; message: string; context?: FileContext }\n | { type: \"not_supported\"; message: string; feature?: string };\n\nexport function createError(error: VeryfrontError): VeryfrontError {\n return error;\n}\n\nexport function isBuildError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"build\" }> {\n return error.type === \"build\";\n}\n\nexport function isAPIError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"api\" }> {\n return error.type === \"api\";\n}\n\nexport function isRenderError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"render\" }> {\n return error.type === \"render\";\n}\n\nexport function isConfigError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"config\" }> {\n return error.type === \"config\";\n}\n\nexport function isAgentError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"agent\" }> {\n return error.type === \"agent\";\n}\n\nexport function isFileError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"file\" }> {\n return error.type === \"file\";\n}\n\nexport function isNetworkError(\n error: VeryfrontError,\n): error is Extract<VeryfrontError, { type: \"network\" }> {\n return error.type === \"network\";\n}\n\nexport function toError(veryfrontError: VeryfrontError): Error {\n const error = new Error(veryfrontError.message);\n error.name = `VeryfrontError[${veryfrontError.type}]`;\n Object.defineProperty(error, \"context\", {\n value: veryfrontError,\n enumerable: false,\n configurable: true,\n });\n return error;\n}\n\nexport function fromError(error: unknown): VeryfrontError | null {\n if (error && typeof error === \"object\" && \"context\" in error) {\n // Safe access after 'in' check\n const context = (error as Record<string, unknown>).context;\n if (\n context &&\n typeof context === \"object\" &&\n \"type\" in context &&\n \"message\" in context\n ) {\n return context as VeryfrontError;\n }\n }\n return null;\n}\n\nexport function logError(\n error: VeryfrontError,\n logger?: { error: (msg: string, ...args: unknown[]) => void },\n): void {\n const log = logger || console;\n const context = \"context\" in error ? error.context || {} : {};\n log.error(`[${error.type}] ${error.message}`, context);\n}\n", "export const isDeno = typeof Deno !== \"undefined\";\nexport const isNode =\n typeof (globalThis as { process?: { versions?: { node?: string } } }).process !== \"undefined\" &&\n (globalThis as { process?: { versions?: { node?: string } } }).process?.versions?.node !==\n undefined;\nexport const isBun = typeof (globalThis as { Bun?: unknown }).Bun !== \"undefined\";\nexport const isCloudflare = typeof globalThis !== \"undefined\" && \"caches\" in globalThis &&\n \"WebSocketPair\" in globalThis;\n\n/**\n * Detect if running in Node.js (vs Deno)\n * Use this function instead of the constant when runtime detection needs to happen\n * at call time (e.g., when bundled with esbuild's __esm lazy initialization pattern)\n */\nexport function isNodeRuntime(): boolean {\n // deno-lint-ignore no-explicit-any\n const _global = globalThis as any;\n return typeof Deno === \"undefined\" && typeof _global.process !== \"undefined\" &&\n !!_global.process?.versions?.node;\n}\n", "/**\n * Cross-platform filesystem abstraction for CLI commands and standalone utilities.\n *\n * This module provides a synchronous-style API for filesystem operations that works\n * across Deno, Node.js, and Bun runtimes. It's designed for CLI commands and scripts\n * where you don't have access to a RuntimeAdapter context.\n *\n * For server/rendering contexts where you have an adapter, prefer using adapter.fs directly:\n * ```ts\n * const adapter = await getAdapter();\n * const content = await adapter.fs.readFile(path);\n * ```\n *\n * For CLI commands and standalone utilities, use createFileSystem():\n * ```ts\n * import { createFileSystem } from \"@veryfront/platform/compat/fs.ts\";\n * const fs = createFileSystem();\n * const content = await fs.readTextFile(path);\n * ```\n *\n * @module\n */\n\nimport type { FileInfo } from \"@veryfront/platform/adapters/base.ts\";\nimport { createError, toError } from \"../../core/errors/veryfront-error.ts\";\nimport { isDeno, isNode } from \"./runtime.ts\";\n\n/**\n * Cross-platform filesystem interface for CLI commands and standalone utilities.\n * Compatible with RuntimeAdapter.fs (FileSystemAdapter) for easy interoperability.\n */\nexport interface FileSystem {\n readTextFile(path: string): Promise<string>;\n readFile(path: string): Promise<Uint8Array>; // Changed to Uint8Array for binary\n writeTextFile(path: string, data: string): Promise<void>;\n writeFile(path: string, data: Uint8Array): Promise<void>; // Changed to Uint8Array for binary\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<FileInfo>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n readDir(path: string): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }>;\n remove(path: string, options?: { recursive?: boolean }): Promise<void>;\n makeTempDir(options?: { prefix?: string }): Promise<string>; // New for temp dirs\n}\n\n// ============================================================================\n// Node.js Implementation\n// ============================================================================\n\ninterface NodeFsPromises {\n readFile(\n path: string,\n options?: { encoding?: string; flag?: string } | string,\n ): Promise<string | Uint8Array>;\n writeFile(\n path: string,\n data: string | Uint8Array,\n options?: { encoding?: string; flag?: string } | string,\n ): Promise<void>;\n access(path: string, mode?: number): Promise<void>;\n stat(path: string): Promise<{\n isFile(): boolean;\n isDirectory(): boolean;\n isSymbolicLink(): boolean;\n size: number;\n mtime: Date;\n }>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n readdir(path: string, options: { withFileTypes: true }): Promise<\n Array<{\n name: string;\n isFile(): boolean;\n isDirectory(): boolean;\n isSymbolicLink(): boolean;\n }>\n >;\n rm(path: string, options?: { recursive?: boolean; force?: boolean }): Promise<void>;\n}\n\nclass NodeFileSystem implements FileSystem {\n private fs: NodeFsPromises | null = null;\n private os: typeof import(\"node:os\") | null = null;\n private path: typeof import(\"node:path\") | null = null;\n private initialized = false;\n\n private async ensureInitialized(): Promise<void> {\n if (this.initialized) return;\n\n if (!isNode) {\n throw toError(createError({\n type: \"not_supported\",\n message: \"Node.js fs modules not available\",\n feature: \"Node.js\",\n }));\n }\n\n // Use dynamic ESM imports for Node.js modules\n const [fsModule, osModule, pathModule] = await Promise.all([\n import(\"node:fs/promises\"),\n import(\"node:os\"),\n import(\"node:path\"),\n ]);\n\n this.fs = fsModule as unknown as NodeFsPromises;\n this.os = osModule;\n this.path = pathModule;\n this.initialized = true;\n }\n\n async readTextFile(path: string): Promise<string> {\n await this.ensureInitialized();\n return await (this.fs!.readFile(path, { encoding: \"utf8\" }) as Promise<string>);\n }\n\n async readFile(path: string): Promise<Uint8Array> {\n await this.ensureInitialized();\n return await (this.fs!.readFile(path) as Promise<Uint8Array>);\n }\n\n async writeTextFile(path: string, data: string): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.writeFile(path, data, { encoding: \"utf8\" });\n }\n\n async writeFile(path: string, data: Uint8Array): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.writeFile(path, data);\n }\n\n async exists(path: string): Promise<boolean> {\n await this.ensureInitialized();\n try {\n await this.fs!.access(path);\n return true;\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n return false;\n }\n throw error;\n }\n }\n\n async stat(path: string): Promise<FileInfo> {\n await this.ensureInitialized();\n const stat = await this.fs!.stat(path);\n return {\n isFile: stat.isFile(),\n isDirectory: stat.isDirectory(),\n isSymlink: stat.isSymbolicLink(),\n size: stat.size,\n mtime: stat.mtime,\n };\n }\n\n async mkdir(path: string, options?: { recursive?: boolean }): Promise<void> {\n await this.ensureInitialized();\n await this.fs!.mkdir(path, { recursive: options?.recursive ?? false });\n }\n\n async *readDir(\n path: string,\n ): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }> {\n await this.ensureInitialized();\n const entries = await this.fs!.readdir(path, { withFileTypes: true });\n for (const entry of entries) {\n yield {\n name: entry.name,\n isFile: entry.isFile(),\n isDirectory: entry.isDirectory(),\n };\n }\n }\n\n async remove(path: string, options?: { recursive?: boolean }): Promise<void> {\n await this.ensureInitialized();\n // Node.js fs.rm requires force for recursive deletion of non-empty directories\n await this.fs!.rm(path, {\n recursive: options?.recursive ?? false,\n force: options?.recursive ?? false,\n });\n }\n\n async makeTempDir(options?: { prefix?: string }): Promise<string> {\n await this.ensureInitialized();\n const tempDir = this.path!.join(\n this.os!.tmpdir(),\n `${options?.prefix ?? \"tmp-\"}${Math.random().toString(36).substring(2, 8)}`,\n );\n await this.fs!.mkdir(tempDir, { recursive: true });\n return tempDir;\n }\n}\n\n// ============================================================================\n// Deno Implementation\n// ============================================================================\n\nclass DenoFileSystem implements FileSystem {\n async readTextFile(path: string): Promise<string> {\n // @ts-ignore - Deno global\n return await Deno.readTextFile(path);\n }\n\n async readFile(path: string): Promise<Uint8Array> {\n // @ts-ignore - Deno global\n return await Deno.readFile(path);\n }\n\n async writeTextFile(path: string, data: string): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.writeTextFile(path, data);\n }\n\n async writeFile(path: string, data: Uint8Array): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.writeFile(path, data);\n }\n\n async exists(path: string): Promise<boolean> {\n try {\n // @ts-ignore - Deno global\n await Deno.stat(path);\n return true;\n } catch (error: any) {\n // @ts-ignore - Deno global\n if (error instanceof Deno.errors.NotFound) {\n return false;\n }\n throw error;\n }\n }\n\n async stat(path: string): Promise<FileInfo> {\n // @ts-ignore - Deno global\n const stat = await Deno.stat(path);\n return {\n isFile: stat.isFile,\n isDirectory: stat.isDirectory,\n isSymlink: stat.isSymlink,\n size: stat.size,\n mtime: stat.mtime,\n };\n }\n\n async mkdir(path: string, options?: { recursive?: boolean }): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.mkdir(path, { recursive: options?.recursive ?? false });\n }\n\n async *readDir(\n path: string,\n ): AsyncIterable<{ name: string; isFile: boolean; isDirectory: boolean }> {\n // @ts-ignore - Deno global\n for await (const entry of Deno.readDir(path)) {\n yield {\n name: entry.name,\n isFile: entry.isFile,\n isDirectory: entry.isDirectory,\n };\n }\n }\n\n async remove(path: string, options?: { recursive?: boolean }): Promise<void> {\n // @ts-ignore - Deno global\n await Deno.remove(path, { recursive: options?.recursive ?? false });\n }\n\n async makeTempDir(options?: { prefix?: string }): Promise<string> {\n // @ts-ignore - Deno global\n return await Deno.makeTempDir({ prefix: options?.prefix });\n }\n}\n\n/**\n * Create a cross-platform filesystem instance for CLI commands and standalone utilities.\n *\n * Use this for CLI commands that don't have access to a RuntimeAdapter context:\n * ```ts\n * const fs = createFileSystem();\n * const content = await fs.readTextFile(path);\n * await fs.writeTextFile(outputPath, result);\n * ```\n *\n * For server/rendering contexts, prefer using adapter.fs directly.\n *\n * Note: For npm package, always uses Node.js fs APIs for cross-platform compatibility.\n */\nexport function createFileSystem(): FileSystem {\n if (isDeno) {\n return new DenoFileSystem();\n } else {\n // Node.js or Bun\n return new NodeFileSystem();\n }\n}\n", "export interface GlobalWithDeno {\n Deno?: {\n env: {\n get(key: string): string | undefined;\n };\n };\n}\n\nexport interface GlobalWithProcess {\n process?: {\n env: Record<string, string | undefined>;\n version?: string;\n versions?: Record<string, string>;\n };\n}\n\nexport interface GlobalWithBun {\n Bun?: {\n version: string;\n };\n}\n\nexport function hasDenoRuntime(global: unknown): global is GlobalWithDeno {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"Deno\" in global &&\n typeof (global as GlobalWithDeno).Deno?.env?.get === \"function\"\n );\n}\n\nexport function hasNodeProcess(global: unknown): global is GlobalWithProcess {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"process\" in global &&\n typeof (global as GlobalWithProcess).process?.env === \"object\"\n );\n}\n\nexport function hasBunRuntime(global: unknown): global is GlobalWithBun {\n return (\n typeof global === \"object\" &&\n global !== null &&\n \"Bun\" in global &&\n typeof (global as GlobalWithBun).Bun !== \"undefined\"\n );\n}\n", "import type { GlobalWithDeno, GlobalWithProcess } from \"../runtime-guards.ts\";\nimport { hasDenoRuntime, hasNodeProcess } from \"../runtime-guards.ts\";\n\nexport function getEnvironmentVariable(name: string): string | undefined {\n try {\n if (typeof Deno !== \"undefined\" && hasDenoRuntime(globalThis)) {\n const value = (globalThis as GlobalWithDeno).Deno?.env.get(name);\n return value === \"\" ? undefined : value;\n }\n if (hasNodeProcess(globalThis)) {\n const value = (globalThis as GlobalWithProcess).process?.env[name];\n return value === \"\" ? undefined : value;\n }\n } catch {\n return undefined;\n }\n return undefined;\n}\n\nexport function isTestEnvironment(): boolean {\n return getEnvironmentVariable(\"NODE_ENV\") === \"test\";\n}\n\nexport function isProductionEnvironment(): boolean {\n return getEnvironmentVariable(\"NODE_ENV\") === \"production\";\n}\n\nexport function isDevelopmentEnvironment(): boolean {\n const env = getEnvironmentVariable(\"NODE_ENV\");\n return env === \"development\" || env === undefined;\n}\n", "import { getEnvironmentVariable } from \"./env.ts\";\n\nexport enum LogLevel {\n DEBUG = 0,\n INFO = 1,\n WARN = 2,\n ERROR = 3,\n}\n\nexport interface Logger {\n debug(message: string, ...args: unknown[]): void;\n info(message: string, ...args: unknown[]): void;\n warn(message: string, ...args: unknown[]): void;\n error(message: string, ...args: unknown[]): void;\n time<T>(label: string, fn: () => Promise<T>): Promise<T>;\n}\n\nconst originalConsole = {\n debug: console.debug,\n log: console.log,\n warn: console.warn,\n error: console.error,\n};\n\nlet cachedLogLevel: LogLevel | undefined;\n\nfunction resolveLogLevel(force = false): LogLevel {\n if (force || cachedLogLevel === undefined) {\n cachedLogLevel = getDefaultLevel();\n }\n return cachedLogLevel;\n}\n\nclass ConsoleLogger implements Logger {\n constructor(\n private prefix: string,\n private level: LogLevel = resolveLogLevel(),\n ) {}\n\n setLevel(level: LogLevel): void {\n this.level = level;\n }\n\n getLevel(): LogLevel {\n return this.level;\n }\n\n debug(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.DEBUG) {\n console.debug(`[${this.prefix}] DEBUG: ${message}`, ...args);\n }\n }\n\n info(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.INFO) {\n console.log(`[${this.prefix}] ${message}`, ...args);\n }\n }\n\n warn(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.WARN) {\n console.warn(`[${this.prefix}] WARN: ${message}`, ...args);\n }\n }\n\n error(message: string, ...args: unknown[]): void {\n if (this.level <= LogLevel.ERROR) {\n console.error(`[${this.prefix}] ERROR: ${message}`, ...args);\n }\n }\n\n async time<T>(label: string, fn: () => Promise<T>): Promise<T> {\n const start = performance.now();\n try {\n const result = await fn();\n const end = performance.now();\n this.debug(`${label} completed in ${(end - start).toFixed(2)}ms`);\n return result;\n } catch (error) {\n const end = performance.now();\n this.error(`${label} failed after ${(end - start).toFixed(2)}ms`, error);\n throw error;\n }\n }\n}\n\nfunction parseLogLevel(levelString: string | undefined): LogLevel | undefined {\n if (!levelString) return undefined;\n const upper = levelString.toUpperCase();\n switch (upper) {\n case \"DEBUG\":\n return LogLevel.DEBUG;\n case \"WARN\":\n return LogLevel.WARN;\n case \"ERROR\":\n return LogLevel.ERROR;\n case \"INFO\":\n return LogLevel.INFO;\n default:\n return undefined;\n }\n}\n\nconst getDefaultLevel = (): LogLevel => {\n const envLevel = getEnvironmentVariable(\"LOG_LEVEL\");\n const parsedLevel = parseLogLevel(envLevel);\n if (parsedLevel !== undefined) return parsedLevel;\n\n const debugFlag = getEnvironmentVariable(\"VERYFRONT_DEBUG\");\n if (debugFlag === \"1\" || debugFlag === \"true\") return LogLevel.DEBUG;\n\n return LogLevel.INFO;\n};\n\nconst trackedLoggers = new Set<ConsoleLogger>();\n\nfunction createLogger(prefix: string): ConsoleLogger {\n const logger = new ConsoleLogger(prefix);\n trackedLoggers.add(logger);\n return logger;\n}\n\nexport const cliLogger = createLogger(\"CLI\");\nexport const serverLogger = createLogger(\"SERVER\");\nexport const rendererLogger = createLogger(\"RENDERER\");\nexport const bundlerLogger = createLogger(\"BUNDLER\");\nexport const agentLogger = createLogger(\"AGENT\");\n\nexport const logger = createLogger(\"VERYFRONT\");\n\ntype LoggerResetOptions = {\n restoreConsole?: boolean;\n};\n\nexport function __loggerResetForTests(options: LoggerResetOptions = {}): void {\n const updatedLevel = resolveLogLevel(true);\n for (const instance of trackedLoggers) {\n instance.setLevel(updatedLevel);\n }\n\n if (options.restoreConsole) {\n console.debug = originalConsole.debug;\n console.log = originalConsole.log;\n console.warn = originalConsole.warn;\n console.error = originalConsole.error;\n }\n}\n", "export const SECONDS_PER_MINUTE = 60;\n\nexport const MINUTES_PER_HOUR = 60;\n\nexport const HOURS_PER_DAY = 24;\n\nexport const MS_PER_SECOND = 1000;\n\nexport const DEFAULT_LRU_MAX_ENTRIES = 100;\n\nexport const COMPONENT_LOADER_MAX_ENTRIES = 100;\nexport const COMPONENT_LOADER_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const MDX_RENDERER_MAX_ENTRIES = 200;\nexport const MDX_RENDERER_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const RENDERER_CORE_MAX_ENTRIES = 100;\nexport const RENDERER_CORE_TTL_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const TSX_LAYOUT_MAX_ENTRIES = 50;\nexport const TSX_LAYOUT_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const DATA_FETCHING_MAX_ENTRIES = 200;\nexport const DATA_FETCHING_TTL_MS = 10 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const MDX_CACHE_TTL_PRODUCTION_MS = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE *\n MS_PER_SECOND;\nexport const MDX_CACHE_TTL_DEVELOPMENT_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const BUNDLE_CACHE_TTL_PRODUCTION_MS = HOURS_PER_DAY * MINUTES_PER_HOUR *\n SECONDS_PER_MINUTE * MS_PER_SECOND;\nexport const BUNDLE_CACHE_TTL_DEVELOPMENT_MS = 5 * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const BUNDLE_MANIFEST_PROD_TTL_MS = 7 * HOURS_PER_DAY * MINUTES_PER_HOUR *\n SECONDS_PER_MINUTE * MS_PER_SECOND;\nexport const BUNDLE_MANIFEST_DEV_TTL_MS = MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const RSC_MANIFEST_CACHE_TTL_MS = 5000;\n\nexport const SERVER_ACTION_DEFAULT_TTL_SEC = MINUTES_PER_HOUR * SECONDS_PER_MINUTE;\n\nexport const DENO_KV_SAFE_SIZE_LIMIT_BYTES = 64_000;\n\nexport const HTTP_CACHE_SHORT_MAX_AGE_SEC = 60;\nexport const HTTP_CACHE_MEDIUM_MAX_AGE_SEC = 3600;\nexport const HTTP_CACHE_LONG_MAX_AGE_SEC = 31536000;\n\nexport const ONE_DAY_MS = HOURS_PER_DAY * MINUTES_PER_HOUR * SECONDS_PER_MINUTE * MS_PER_SECOND;\n\nexport const CACHE_CLEANUP_INTERVAL_MS = 60000;\n\nexport const LRU_DEFAULT_MAX_ENTRIES = 1000;\n\nexport const LRU_DEFAULT_MAX_SIZE_BYTES = 50 * 1024 * 1024;\n\nexport const CLEANUP_INTERVAL_MULTIPLIER = 2;\n", "{\n \"name\": \"veryfront\",\n \"version\": \"0.0.53\",\n \"exclude\": [\n \"npm/\",\n \"dist/\",\n \"coverage/\",\n \"scripts/\",\n \"examples/\",\n \"tests/\",\n \"src/cli/templates/files/\",\n \"src/cli/templates/integrations/\"\n ],\n \"exports\": {\n \".\": \"./src/index.ts\",\n \"./cli\": \"./src/cli/main.ts\",\n \"./server\": \"./src/server/index.ts\",\n \"./middleware\": \"./src/middleware/index.ts\",\n \"./components\": \"./src/react/components/index.ts\",\n \"./data\": \"./src/data/index.ts\",\n \"./config\": \"./src/core/config/index.ts\",\n \"./platform\": \"./src/platform/index.ts\",\n \"./ai\": \"./src/ai/index.ts\",\n \"./ai/client\": \"./src/ai/client.ts\",\n \"./ai/react\": \"./src/ai/react/index.ts\",\n \"./ai/primitives\": \"./src/ai/react/primitives/index.ts\",\n \"./ai/components\": \"./src/ai/react/components/index.ts\",\n \"./ai/production\": \"./src/ai/production/index.ts\",\n \"./ai/dev\": \"./src/ai/dev/index.ts\",\n \"./ai/workflow\": \"./src/ai/workflow/index.ts\",\n \"./ai/workflow/react\": \"./src/ai/workflow/react/index.ts\",\n \"./oauth\": \"./src/core/oauth/index.ts\",\n \"./oauth/providers\": \"./src/core/oauth/providers/index.ts\",\n \"./oauth/handlers\": \"./src/core/oauth/handlers/index.ts\",\n \"./oauth/token-store\": \"./src/core/oauth/token-store/index.ts\"\n },\n \"imports\": {\n \"@veryfront\": \"./src/index.ts\",\n \"@veryfront/\": \"./src/\",\n \"@veryfront/ai\": \"./src/ai/index.ts\",\n \"@veryfront/ai/\": \"./src/ai/\",\n \"@veryfront/platform\": \"./src/platform/index.ts\",\n \"@veryfront/platform/\": \"./src/platform/\",\n \"@veryfront/types\": \"./src/core/types/index.ts\",\n \"@veryfront/types/\": \"./src/core/types/\",\n \"@veryfront/utils\": \"./src/core/utils/index.ts\",\n \"@veryfront/utils/\": \"./src/core/utils/\",\n \"@veryfront/middleware\": \"./src/middleware/index.ts\",\n \"@veryfront/middleware/\": \"./src/middleware/\",\n \"@veryfront/errors\": \"./src/core/errors/index.ts\",\n \"@veryfront/errors/\": \"./src/core/errors/\",\n \"@veryfront/config\": \"./src/core/config/index.ts\",\n \"@veryfront/config/\": \"./src/core/config/\",\n \"@veryfront/observability\": \"./src/observability/index.ts\",\n \"@veryfront/observability/\": \"./src/observability/\",\n \"@veryfront/routing\": \"./src/routing/index.ts\",\n \"@veryfront/routing/\": \"./src/routing/\",\n \"@veryfront/transforms\": \"./src/build/transforms/index.ts\",\n \"@veryfront/transforms/\": \"./src/build/transforms/\",\n \"@veryfront/data\": \"./src/data/index.ts\",\n \"@veryfront/data/\": \"./src/data/\",\n \"@veryfront/security\": \"./src/security/index.ts\",\n \"@veryfront/security/\": \"./src/security/\",\n \"@veryfront/components\": \"./src/react/components/index.ts\",\n \"@veryfront/react\": \"./src/react/index.ts\",\n \"@veryfront/react/\": \"./src/react/\",\n \"@veryfront/html\": \"./src/html/index.ts\",\n \"@veryfront/html/\": \"./src/html/\",\n \"@veryfront/rendering\": \"./src/rendering/index.ts\",\n \"@veryfront/rendering/\": \"./src/rendering/\",\n \"@veryfront/build\": \"./src/build/index.ts\",\n \"@veryfront/build/\": \"./src/build/\",\n \"@veryfront/server\": \"./src/server/index.ts\",\n \"@veryfront/server/\": \"./src/server/\",\n \"@veryfront/modules\": \"./src/module-system/index.ts\",\n \"@veryfront/modules/\": \"./src/module-system/\",\n \"@veryfront/compat/console\": \"./src/platform/compat/console/index.ts\",\n \"@veryfront/compat/\": \"./src/platform/compat/\",\n \"@veryfront/oauth\": \"./src/core/oauth/index.ts\",\n \"@veryfront/oauth/\": \"./src/core/oauth/\",\n \"std/\": \"https://deno.land/std@0.220.0/\",\n \"@std/path\": \"https://deno.land/std@0.220.0/path/mod.ts\",\n \"@std/testing/bdd.ts\": \"https://deno.land/std@0.220.0/testing/bdd.ts\",\n \"@std/expect\": \"https://deno.land/std@0.220.0/expect/mod.ts\",\n \"csstype\": \"https://esm.sh/csstype@3.2.3\",\n \"@types/react\": \"https://esm.sh/@types/react@18.3.27?deps=csstype@3.2.3\",\n \"@types/react-dom\": \"https://esm.sh/@types/react-dom@18.3.7?deps=csstype@3.2.3\",\n \"react\": \"https://esm.sh/react@18.3.1\",\n \"react-dom\": \"https://esm.sh/react-dom@18.3.1\",\n \"react-dom/server\": \"https://esm.sh/react-dom@18.3.1/server\",\n \"react-dom/client\": \"https://esm.sh/react-dom@18.3.1/client\",\n \"react/jsx-runtime\": \"https://esm.sh/react@18.3.1/jsx-runtime\",\n \"react/jsx-dev-runtime\": \"https://esm.sh/react@18.3.1/jsx-dev-runtime\",\n \"@mdx-js/mdx\": \"https://esm.sh/@mdx-js/mdx@3.0.0?deps=react@18.3.1,react-dom@18.3.1\",\n \"@mdx-js/react\": \"https://esm.sh/@mdx-js/react@3.0.0?deps=react@18.3.1,react-dom@18.3.1\",\n \"unist-util-visit\": \"https://esm.sh/unist-util-visit@5.0.0\",\n \"mdast-util-to-string\": \"https://esm.sh/mdast-util-to-string@4.0.0\",\n \"github-slugger\": \"https://esm.sh/github-slugger@2.0.0\",\n \"remark-gfm\": \"https://esm.sh/remark-gfm@4.0.1\",\n \"remark-frontmatter\": \"https://esm.sh/remark-frontmatter@5.0.0\",\n \"rehype-highlight\": \"https://esm.sh/rehype-highlight@7.0.2\",\n \"rehype-slug\": \"https://esm.sh/rehype-slug@6.0.0\",\n \"esbuild\": \"https://deno.land/x/esbuild@v0.20.1/wasm.js\",\n \"esbuild/mod.js\": \"https://deno.land/x/esbuild@v0.20.1/mod.js\",\n \"es-module-lexer\": \"https://esm.sh/es-module-lexer@1.5.0\",\n \"zod\": \"https://esm.sh/zod@3.22.0\",\n \"mime-types\": \"https://esm.sh/mime-types@2.1.35\",\n \"mdast\": \"https://esm.sh/@types/mdast@4.0.3\",\n \"hast\": \"https://esm.sh/@types/hast@3.0.3\",\n \"unist\": \"https://esm.sh/@types/unist@3.0.2\",\n \"unified\": \"https://esm.sh/unified@11.0.5?dts\",\n \"ai\": \"https://esm.sh/ai@5.0.76?deps=react@18.3.1,react-dom@18.3.1\",\n \"ai/react\": \"https://esm.sh/@ai-sdk/react@2.0.59?deps=react@18.3.1,react-dom@18.3.1\",\n \"@ai-sdk/react\": \"https://esm.sh/@ai-sdk/react@2.0.59?deps=react@18.3.1,react-dom@18.3.1\",\n \"@ai-sdk/openai\": \"https://esm.sh/@ai-sdk/openai@2.0.1\",\n \"@ai-sdk/anthropic\": \"https://esm.sh/@ai-sdk/anthropic@2.0.4\",\n \"unocss\": \"https://esm.sh/unocss@0.59.0\",\n \"@unocss/core\": \"https://esm.sh/@unocss/core@0.59.0\",\n \"@unocss/preset-wind\": \"https://esm.sh/@unocss/preset-wind@0.59.0\",\n \"redis\": \"npm:redis\",\n \"pg\": \"npm:pg\"\n },\n \"compilerOptions\": {\n \"jsx\": \"react-jsx\",\n \"jsxImportSource\": \"react\",\n \"strict\": true,\n \"noImplicitAny\": true,\n \"noUncheckedIndexedAccess\": true,\n \"types\": [],\n \"lib\": [\n \"deno.window\",\n \"dom\",\n \"dom.iterable\",\n \"dom.asynciterable\",\n \"deno.ns\"\n ]\n },\n \"tasks\": {\n \"setup\": \"deno run --allow-all scripts/setup.ts\",\n \"dev\": \"deno run --allow-all --no-lock --unstable-net --unstable-worker-options src/cli/main.ts dev\",\n \"build\": \"deno compile --allow-all --output ../../bin/veryfront src/cli/main.ts\",\n \"build:npm\": \"deno run -A scripts/build-npm.ts\",\n \"release\": \"deno run -A scripts/release.ts\",\n \"test\": \"DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --unstable-worker-options --unstable-net\",\n \"test:unit\": \"DENO_JOBS=1 deno test --parallel --allow-all --v8-flags=--max-old-space-size=8192 --ignore=tests --unstable-worker-options --unstable-net\",\n \"test:integration\": \"DENO_JOBS=1 deno test --parallel --fail-fast --allow-all tests --unstable-worker-options --unstable-net\",\n \"test:coverage\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage --unstable-worker-options --unstable-net || exit 1\",\n \"test:coverage:unit\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage --ignore=tests --unstable-worker-options --unstable-net || exit 1\",\n \"test:coverage:integration\": \"rm -rf coverage && DENO_JOBS=1 deno test --parallel --fail-fast --allow-all --coverage=coverage tests --unstable-worker-options --unstable-net || exit 1\",\n \"coverage:report\": \"deno coverage coverage --include=src/ --exclude=tests --exclude=src/**/*_test.ts --exclude=src/**/*_test.tsx --exclude=src/**/*.test.ts --exclude=src/**/*.test.tsx --lcov > coverage/lcov.info && deno run --allow-read scripts/check-coverage.ts 80\",\n \"coverage:html\": \"deno coverage coverage --include=src/ --exclude=tests --exclude=src/**/*_test.ts --exclude=src/**/*_test.tsx --exclude=src/**/*.test.ts --exclude=src/**/*.test.tsx --html\",\n \"lint\": \"DENO_NO_PACKAGE_JSON=1 deno lint src/\",\n \"fmt\": \"deno fmt src/\",\n \"typecheck\": \"deno check src/index.ts src/cli/main.ts src/server/index.ts src/routing/api/index.ts src/rendering/index.ts src/platform/index.ts src/platform/adapters/index.ts src/build/index.ts src/build/production-build/index.ts src/build/transforms/index.ts src/core/config/index.ts src/core/utils/index.ts src/data/index.ts src/security/index.ts src/middleware/index.ts src/server/handlers/dev/index.ts src/server/handlers/request/api/index.ts src/rendering/cache/index.ts src/rendering/cache/stores/index.ts src/rendering/rsc/actions/index.ts src/html/index.ts src/module-system/index.ts\",\n \"docs:check-links\": \"deno run -A scripts/check-doc-links.ts\",\n \"lint:ban-console\": \"deno run --allow-read scripts/ban-console.ts\",\n \"lint:ban-deep-imports\": \"deno run --allow-read scripts/ban-deep-imports.ts\",\n \"lint:ban-internal-root-imports\": \"deno run --allow-read scripts/ban-internal-root-imports.ts\",\n \"lint:check-awaits\": \"deno run --allow-read scripts/check-unawaited-promises.ts\",\n \"lint:platform\": \"deno run --allow-read scripts/lint-platform-agnostic.ts\",\n \"check:circular\": \"deno run -A jsr:@cunarist/deno-circular-deps src/index.ts\"\n },\n \"lint\": {\n \"include\": [\n \"src/**/*.ts\",\n \"src/**/*.tsx\"\n ],\n \"exclude\": [\n \"dist/\",\n \"coverage/\"\n ],\n \"rules\": {\n \"tags\": [\n \"recommended\"\n ],\n \"include\": [\n \"ban-untagged-todo\"\n ],\n \"exclude\": [\n \"no-explicit-any\",\n \"no-process-global\",\n \"no-console\"\n ]\n }\n },\n \"fmt\": {\n \"include\": [\n \"src/**/*.ts\",\n \"src/**/*.tsx\"\n ],\n \"exclude\": [\n \"dist/\",\n \"coverage/\"\n ],\n \"options\": {\n \"useTabs\": false,\n \"lineWidth\": 100,\n \"indentWidth\": 2,\n \"semiColons\": true,\n \"singleQuote\": false,\n \"proseWrap\": \"preserve\"\n }\n }\n}\n", "import { isDeno as IS_DENO } from \"./runtime.ts\";\n\nconst nodeProcess = (globalThis as { process?: typeof import(\"node:process\") }).process;\nconst hasNodeProcess = !!nodeProcess?.versions?.node;\n\nexport function getArgs(): string[] {\n if (IS_DENO) {\n return Deno.args;\n }\n if (hasNodeProcess) {\n return nodeProcess!.argv.slice(2);\n }\n return [];\n}\n\nexport function exit(code?: number): never {\n if (IS_DENO) {\n Deno.exit(code);\n }\n if (hasNodeProcess) {\n nodeProcess!.exit(code);\n }\n throw new Error(\"exit() is not supported in this runtime\");\n}\n\nexport function cwd(): string {\n if (IS_DENO) {\n return Deno.cwd();\n }\n if (hasNodeProcess) {\n return nodeProcess!.cwd();\n }\n throw new Error(\"cwd() is not supported in this runtime\");\n}\n\nexport function chdir(directory: string): void {\n if (IS_DENO) {\n Deno.chdir(directory);\n } else {\n if (hasNodeProcess) {\n nodeProcess!.chdir(directory);\n return;\n }\n throw new Error(\"chdir() is not supported in this runtime\");\n }\n}\n\nexport function env(): Record<string, string> {\n if (IS_DENO) {\n return Deno.env.toObject();\n }\n if (hasNodeProcess) {\n return nodeProcess!.env as Record<string, string>;\n }\n return {};\n}\n\nexport function getEnv(key: string): string | undefined {\n if (IS_DENO) {\n return Deno.env.get(key);\n }\n if (hasNodeProcess) {\n return nodeProcess!.env[key];\n }\n return undefined;\n}\n\n/**\n * Get an environment variable or throw if not set\n * @throws Error if the environment variable is not set\n */\nexport function requireEnv(key: string): string {\n const value = getEnv(key);\n if (value === undefined) {\n throw new Error(`Required environment variable \"${key}\" is not set`);\n }\n return value;\n}\n\nexport function setEnv(key: string, value: string): void {\n if (IS_DENO) {\n Deno.env.set(key, value);\n } else {\n if (hasNodeProcess) {\n nodeProcess!.env[key] = value;\n return;\n }\n throw new Error(\"setEnv() is not supported in this runtime\");\n }\n}\n\nexport function deleteEnv(key: string): void {\n if (IS_DENO) {\n Deno.env.delete(key);\n } else {\n if (hasNodeProcess) {\n delete nodeProcess!.env[key];\n return;\n }\n throw new Error(\"deleteEnv() is not supported in this runtime\");\n }\n}\n\nexport function pid(): number {\n if (IS_DENO) {\n return Deno.pid;\n }\n if (hasNodeProcess) {\n return nodeProcess!.pid;\n }\n return 0;\n}\n\nexport function ppid(): number {\n if (IS_DENO && \"ppid\" in Deno) {\n return Deno.ppid || 0;\n }\n if (hasNodeProcess) {\n return nodeProcess!.ppid || 0;\n }\n return 0;\n}\n\nexport function memoryUsage(): {\n rss: number;\n heapTotal: number;\n heapUsed: number;\n external: number;\n} {\n if (IS_DENO) {\n const usage = Deno.memoryUsage();\n return {\n rss: usage.rss,\n heapTotal: usage.heapTotal,\n heapUsed: usage.heapUsed,\n external: usage.external,\n };\n }\n\n if (!hasNodeProcess) {\n throw new Error(\"memoryUsage() is not supported in this runtime\");\n }\n\n const usage = nodeProcess!.memoryUsage();\n return {\n rss: usage.rss,\n heapTotal: usage.heapTotal,\n heapUsed: usage.heapUsed,\n external: usage.external || 0,\n };\n}\n\n/**\n * Check if stdin is a TTY (terminal)\n */\nexport function isInteractive(): boolean {\n if (IS_DENO) {\n return Deno.stdin.isTerminal();\n }\n if (hasNodeProcess) {\n return nodeProcess!.stdin.isTTY ?? false;\n }\n return false;\n}\n\n/**\n * Get network interfaces\n */\nexport async function getNetworkInterfaces(): Promise<\n Array<{ name: string; address: string; family: \"IPv4\" | \"IPv6\" }>\n> {\n if (IS_DENO) {\n const interfaces = Deno.networkInterfaces();\n return interfaces.map((iface) => ({\n name: iface.name,\n address: iface.address,\n family: iface.family as \"IPv4\" | \"IPv6\",\n }));\n }\n\n if (!hasNodeProcess) {\n throw new Error(\"networkInterfaces() is not supported in this runtime\");\n }\n\n const os = await import(\"node:os\");\n const interfaces = os.networkInterfaces();\n const result: Array<{ name: string; address: string; family: \"IPv4\" | \"IPv6\" }> = [];\n\n for (const [name, addrs] of Object.entries(interfaces)) {\n if (!addrs) continue;\n for (const addr of addrs) {\n result.push({\n name,\n address: addr.address,\n family: addr.family as \"IPv4\" | \"IPv6\",\n });\n }\n }\n\n return result;\n}\n\n/**\n * Get runtime version string\n */\nexport function getRuntimeVersion(): string {\n if (IS_DENO) {\n return `Deno ${Deno.version.deno}`;\n }\n if (\"Bun\" in globalThis) {\n return `Bun ${(globalThis as unknown as { Bun: { version: string } }).Bun.version}`;\n }\n if (hasNodeProcess) {\n return `Node.js ${nodeProcess!.version}`;\n }\n return \"unknown\";\n}\n\n/**\n * Register a signal handler (SIGINT, SIGTERM) for graceful shutdown\n */\nexport function onSignal(signal: \"SIGINT\" | \"SIGTERM\", handler: () => void): void {\n if (IS_DENO) {\n Deno.addSignalListener(signal, handler);\n } else if (hasNodeProcess) {\n nodeProcess!.on(signal, handler);\n }\n}\n\n/**\n * Unreference a timer to prevent it from keeping the process alive\n */\nexport function unrefTimer(timerId: ReturnType<typeof setInterval>): void {\n if (IS_DENO) {\n Deno.unrefTimer(timerId as number);\n } else if (timerId && typeof timerId === \"object\" && \"unref\" in timerId) {\n (timerId as { unref: () => void }).unref();\n }\n}\n\n/**\n * Get the executable path of the current runtime\n */\nexport function execPath(): string {\n if (IS_DENO) {\n return Deno.execPath();\n }\n if (hasNodeProcess) {\n return nodeProcess!.execPath;\n }\n return \"\";\n}\n\n/**\n * Get process uptime in seconds\n * Returns OS uptime on Deno, process uptime on Node.js\n */\nexport function uptime(): number {\n if (IS_DENO) {\n // Deno.osUptime() returns system uptime in seconds\n return Deno.osUptime?.() ?? 0;\n }\n if (hasNodeProcess) {\n // process.uptime() returns process uptime in seconds\n return nodeProcess!.uptime?.() ?? 0;\n }\n return 0;\n}\n\n/**\n * Get stdout stream for writing\n * Returns null if not available (e.g., in browser/workers)\n */\nexport function getStdout(): { write: (data: string) => void } | null {\n if (IS_DENO) {\n const encoder = new TextEncoder();\n return {\n write: (data: string) => {\n Deno.stdout.writeSync(encoder.encode(data));\n },\n };\n }\n if (hasNodeProcess && nodeProcess!.stdout) {\n return {\n write: (data: string) => {\n nodeProcess!.stdout.write(data);\n },\n };\n }\n return null;\n}\n\n// Cached Node.js modules for synchronous prompt\nlet cachedNodeFs: typeof import(\"node:fs\") | null = null;\n\n/**\n * Synchronous prompt function that works across Deno and Node.js\n * Displays a message and reads user input from stdin\n */\nexport function promptSync(message?: string): string | null {\n if (IS_DENO) {\n // Deno has a built-in prompt() function\n return prompt(message);\n }\n\n if (hasNodeProcess) {\n // Print the message\n if (message) {\n nodeProcess!.stdout.write(message + \" \");\n }\n\n // Lazy load fs module\n if (!cachedNodeFs) {\n // Dynamic import converted to sync require for bundling\n // @ts-ignore - dynamic require for Node.js\n cachedNodeFs = globalThis.require?.(\"node:fs\") || null;\n if (!cachedNodeFs) {\n // Try alternative approach\n try {\n // @ts-ignore: __require is injected by bundlers for Node.js require\n cachedNodeFs = __require(\"node:fs\");\n } catch {\n return null;\n }\n }\n }\n\n if (!cachedNodeFs) {\n return null;\n }\n\n // Read synchronously using fs\n // This works by reading from file descriptor 0 (stdin)\n // Use Uint8Array for cross-platform compatibility\n const bufferSize = 1024;\n const uint8Array = new Uint8Array(bufferSize);\n let input = \"\";\n\n try {\n // Read from stdin (fd 0) synchronously\n const bytesRead = cachedNodeFs.readSync(0, uint8Array, 0, bufferSize, null);\n if (bytesRead > 0) {\n const decoder = new TextDecoder(\"utf-8\");\n input = decoder.decode(uint8Array.subarray(0, bytesRead)).trim();\n }\n } catch {\n // If stdin is not available or EOF, return null\n return null;\n }\n\n return input || null;\n }\n\n return null;\n}\n", "import denoConfig from \"../../../deno.json\" with { type: \"json\" };\nimport { getEnv } from \"../../platform/compat/process.ts\";\n\nexport const VERSION: string = getEnv(\"VERYFRONT_VERSION\") ||\n (typeof denoConfig.version === \"string\" ? denoConfig.version : \"0.0.0\");\n", "export const KB_IN_BYTES = 1024;\n\nexport const HTTP_MODULE_FETCH_TIMEOUT_MS = 2500;\n\nexport const HMR_RECONNECT_DELAY_MS = 1000;\n\nexport const HMR_RELOAD_DELAY_MS = 1000;\n\nexport const HMR_FILE_WATCHER_DEBOUNCE_MS = 100;\n\nexport const HMR_KEEP_ALIVE_INTERVAL_MS = 30000;\n\nexport const DASHBOARD_RECONNECT_DELAY_MS = 3000;\n\nexport const SERVER_FUNCTION_DEFAULT_TIMEOUT_MS = 30000;\n\nexport const PREFETCH_MAX_SIZE_BYTES = 200 * KB_IN_BYTES;\n\nexport const PREFETCH_DEFAULT_TIMEOUT_MS = 10000;\n\nexport const PREFETCH_DEFAULT_DELAY_MS = 200;\n\nexport const HTTP_OK = 200;\n\nexport const HTTP_NO_CONTENT = 204;\n\nexport const HTTP_CREATED = 201;\n\nexport const HTTP_REDIRECT_FOUND = 302;\n\nexport const HTTP_NOT_MODIFIED = 304;\n\nexport const HTTP_BAD_REQUEST = 400;\n\nexport const HTTP_UNAUTHORIZED = 401;\n\nexport const HTTP_FORBIDDEN = 403;\n\nexport const HTTP_NOT_FOUND = 404;\n\nexport const HTTP_METHOD_NOT_ALLOWED = 405;\n\nexport const HTTP_GONE = 410;\n\nexport const HTTP_PAYLOAD_TOO_LARGE = 413;\n\nexport const HTTP_URI_TOO_LONG = 414;\n\nexport const HTTP_TOO_MANY_REQUESTS = 429;\n\nexport const HTTP_REQUEST_HEADER_FIELDS_TOO_LARGE = 431;\n\nexport const HTTP_SERVER_ERROR = 500;\n\nexport const HTTP_INTERNAL_SERVER_ERROR = 500;\n\nexport const HTTP_BAD_GATEWAY = 502;\n\nexport const HTTP_NOT_IMPLEMENTED = 501;\n\nexport const HTTP_UNAVAILABLE = 503;\n\nexport const HTTP_NETWORK_CONNECT_TIMEOUT = 599;\n\nexport const HTTP_STATUS_SUCCESS_MIN = 200;\n\nexport const HTTP_STATUS_REDIRECT_MIN = 300;\n\nexport const HTTP_STATUS_CLIENT_ERROR_MIN = 400;\n\nexport const HTTP_STATUS_SERVER_ERROR_MIN = 500;\n\nexport const HTTP_CONTENT_TYPES = {\n JS: \"application/javascript; charset=utf-8\",\n JSON: \"application/json; charset=utf-8\",\n HTML: \"text/html; charset=utf-8\",\n CSS: \"text/css; charset=utf-8\",\n TEXT: \"text/plain; charset=utf-8\",\n} as const;\n\nimport { MS_PER_SECOND, SECONDS_PER_MINUTE } from \"./cache.ts\";\n\nexport const MS_PER_MINUTE = 60000;\n\nexport { MS_PER_SECOND, SECONDS_PER_MINUTE };\n\nexport const HTTP_CONTENT_TYPE_IMAGE_PNG = \"image/png\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_JPEG = \"image/jpeg\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_WEBP = \"image/webp\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_AVIF = \"image/avif\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_SVG = \"image/svg+xml\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_GIF = \"image/gif\";\n\nexport const HTTP_CONTENT_TYPE_IMAGE_ICO = \"image/x-icon\";\n", "import { KB_IN_BYTES } from \"./http.ts\";\n\nexport const HMR_MAX_MESSAGE_SIZE_BYTES = 1024 * KB_IN_BYTES;\n\nexport const HMR_MAX_MESSAGES_PER_MINUTE = 100;\n\nexport const HMR_CLIENT_RELOAD_DELAY_MS = 3000;\n\nexport const HMR_PORT_OFFSET = 1;\n\nexport const HMR_RATE_LIMIT_WINDOW_MS = 60000;\n\nexport const HMR_CLOSE_NORMAL = 1000;\n\nexport const HMR_CLOSE_RATE_LIMIT = 1008;\n\nexport const HMR_CLOSE_MESSAGE_TOO_LARGE = 1009;\n\nexport const HMR_MESSAGE_TYPES = {\n CONNECTED: \"connected\",\n UPDATE: \"update\",\n RELOAD: \"reload\",\n PING: \"ping\",\n PONG: \"pong\",\n} as const;\n\nexport function isValidHMRMessageType(type: string): type is keyof typeof HMR_MESSAGE_TYPES {\n return Object.values(HMR_MESSAGE_TYPES).includes(\n type as typeof HMR_MESSAGE_TYPES[keyof typeof HMR_MESSAGE_TYPES],\n );\n}\n", "export const DEFAULT_DEV_SERVER_PORT = 3000;\nexport const DEFAULT_REDIS_PORT = 6379;\nexport const DEFAULT_API_SERVER_PORT = 8080;\nexport const DEFAULT_PREVIEW_SERVER_PORT = 5000;\nexport const DEFAULT_METRICS_PORT = 9000;\n\nexport const BYTES_PER_KB = 1024;\nexport const BYTES_PER_MB = 1024 * 1024;\n\nexport const DEFAULT_IMAGE_THUMBNAIL_SIZE = 256;\nexport const DEFAULT_IMAGE_SMALL_SIZE = 512;\nexport const DEFAULT_IMAGE_LARGE_SIZE = 2048;\n\nexport const RESPONSIVE_IMAGE_WIDTH_XS = 320;\nexport const RESPONSIVE_IMAGE_WIDTH_SM = 640;\nexport const RESPONSIVE_IMAGE_WIDTH_MD = 1024;\nexport const RESPONSIVE_IMAGE_WIDTH_LG = 1920;\n\nexport const RESPONSIVE_IMAGE_WIDTHS = [\n RESPONSIVE_IMAGE_WIDTH_XS,\n RESPONSIVE_IMAGE_WIDTH_SM,\n RESPONSIVE_IMAGE_WIDTH_MD,\n RESPONSIVE_IMAGE_WIDTH_LG,\n] as const;\n\nexport const MAX_CHUNK_SIZE_KB = 4096;\n\nexport const MIN_PORT = 1;\n\nexport const MAX_PORT = 65535;\n\nexport const DEFAULT_SERVER_PORT = 8000;\n", "/**\n * Centralized server endpoints and paths registry\n *\n * All internal veryfront URLs should be defined here as the single source of truth.\n * This prevents hardcoding URLs across the codebase and makes refactoring easier.\n */\n\n// Re-export DEFAULT_PORT from config/defaults.ts (the single source of truth)\nexport { DEFAULT_PORT } from \"@veryfront/config/defaults.ts\";\n\n/** Default port for development dashboard */\nexport const DEFAULT_DASHBOARD_PORT = 3002;\n\n/** Internal URL prefix for all veryfront endpoints */\nexport const INTERNAL_PREFIX = \"/_veryfront\" as const;\n\n/**\n * All internal veryfront URL path prefixes (directories)\n */\nexport const INTERNAL_PATH_PREFIXES = {\n /** React Server Components endpoints */\n RSC: `${INTERNAL_PREFIX}/rsc/`,\n /** File system access endpoints (base64 encoded paths) */\n FS: `${INTERNAL_PREFIX}/fs/`,\n /** Virtual module system */\n MODULES: `${INTERNAL_PREFIX}/modules/`,\n /** Generated page modules */\n PAGES: `${INTERNAL_PREFIX}/pages/`,\n /** Data JSON endpoints */\n DATA: `${INTERNAL_PREFIX}/data/`,\n /** Library modules (AI SDK, etc.) */\n LIB: `${INTERNAL_PREFIX}/lib/`,\n /** Chunk assets */\n CHUNKS: `${INTERNAL_PREFIX}/chunks/`,\n /** Client component modules */\n CLIENT: `${INTERNAL_PREFIX}/client/`,\n} as const;\n\n/**\n * Specific internal endpoint URLs\n */\nexport const INTERNAL_ENDPOINTS = {\n // Development endpoints\n HMR_RUNTIME: `${INTERNAL_PREFIX}/hmr-runtime.js`,\n HMR: `${INTERNAL_PREFIX}/hmr.js`,\n HYDRATE: `${INTERNAL_PREFIX}/hydrate.js`,\n ERROR_OVERLAY: `${INTERNAL_PREFIX}/error-overlay.js`,\n DEV_LOADER: `${INTERNAL_PREFIX}/dev-loader.js`,\n CLIENT_LOG: `${INTERNAL_PREFIX}/log`,\n\n // Production endpoints\n CLIENT_JS: `${INTERNAL_PREFIX}/client.js`,\n ROUTER_JS: `${INTERNAL_PREFIX}/router.js`,\n PREFETCH_JS: `${INTERNAL_PREFIX}/prefetch.js`,\n MANIFEST_JSON: `${INTERNAL_PREFIX}/manifest.json`,\n APP_JS: `${INTERNAL_PREFIX}/app.js`,\n\n // RSC endpoints\n RSC_CLIENT: `${INTERNAL_PREFIX}/rsc/client.js`,\n RSC_MANIFEST: `${INTERNAL_PREFIX}/rsc/manifest`,\n RSC_STREAM: `${INTERNAL_PREFIX}/rsc/stream`,\n RSC_PAYLOAD: `${INTERNAL_PREFIX}/rsc/payload`,\n RSC_RENDER: `${INTERNAL_PREFIX}/rsc/render`,\n RSC_PAGE: `${INTERNAL_PREFIX}/rsc/page`,\n RSC_MODULE: `${INTERNAL_PREFIX}/rsc/module`,\n RSC_DOM: `${INTERNAL_PREFIX}/rsc/dom.js`,\n RSC_HYDRATOR: `${INTERNAL_PREFIX}/rsc/hydrator.js`,\n RSC_HYDRATE_CLIENT: `${INTERNAL_PREFIX}/rsc/hydrate-client.js`,\n\n // Library module endpoints\n LIB_AI_REACT: `${INTERNAL_PREFIX}/lib/ai/react.js`,\n LIB_AI_COMPONENTS: `${INTERNAL_PREFIX}/lib/ai/components.js`,\n LIB_AI_PRIMITIVES: `${INTERNAL_PREFIX}/lib/ai/primitives.js`,\n} as const;\n\n/**\n * Build output directory paths (relative)\n */\nexport const BUILD_DIRS = {\n /** Main build output directory */\n ROOT: \"_veryfront\",\n /** Chunks directory */\n CHUNKS: \"_veryfront/chunks\",\n /** Data directory */\n DATA: \"_veryfront/data\",\n /** Assets directory */\n ASSETS: \"_veryfront/assets\",\n} as const;\n\n/**\n * Local project directory paths (relative to project root)\n * These are .gitignore'd directories for caching and temporary files\n */\nexport const PROJECT_DIRS = {\n /** Base veryfront internal directory */\n ROOT: \".veryfront\",\n /** Cache directory for build artifacts, transforms, etc. */\n CACHE: \".veryfront/cache\",\n /** KV store directory */\n KV: \".veryfront/kv\",\n /** Log files directory */\n LOGS: \".veryfront/logs\",\n /** Temporary files directory */\n TMP: \".veryfront/tmp\",\n} as const;\n\n/** Default cache directory path */\nexport const DEFAULT_CACHE_DIR = PROJECT_DIRS.CACHE;\n\n/**\n * Helper to check if a pathname is an internal veryfront endpoint\n */\nexport function isInternalEndpoint(pathname: string): boolean {\n return pathname.startsWith(INTERNAL_PREFIX + \"/\");\n}\n\n/**\n * Helper to check if a pathname is a static asset (has extension or is internal)\n */\nexport function isStaticAsset(pathname: string): boolean {\n return pathname.includes(\".\") || isInternalEndpoint(pathname);\n}\n\n/**\n * Normalize a chunk path to include the base prefix\n */\nexport function normalizeChunkPath(\n filename: string,\n basePath: string = INTERNAL_PATH_PREFIXES.CHUNKS,\n): string {\n if (filename.startsWith(\"/\")) {\n return filename;\n }\n return `${basePath.replace(/\\/$/, \"\")}/${filename}`;\n}\n\n// Re-export for backward compatibility\nexport const DEV_SERVER_ENDPOINTS = {\n HMR_RUNTIME: INTERNAL_ENDPOINTS.HMR_RUNTIME,\n ERROR_OVERLAY: INTERNAL_ENDPOINTS.ERROR_OVERLAY,\n} as const;\n", "/**\n * Project directory paths and file extensions\n *\n * For internal veryfront URL endpoints, see ./constants/server.ts\n */\n\nimport {\n BUILD_DIRS,\n INTERNAL_ENDPOINTS,\n INTERNAL_PATH_PREFIXES,\n INTERNAL_PREFIX,\n} from \"./constants/server.ts\";\n\nexport const PATHS = {\n PAGES_DIR: \"pages\",\n COMPONENTS_DIR: \"components\",\n PUBLIC_DIR: \"public\",\n STYLES_DIR: \"styles\",\n DIST_DIR: \"dist\",\n CONFIG_FILE: \"veryfront.config.js\",\n} as const;\n\n/**\n * @deprecated Use INTERNAL_PREFIX, INTERNAL_ENDPOINTS, INTERNAL_PATH_PREFIXES from ./constants/server.ts\n */\nexport const VERYFRONT_PATHS = {\n INTERNAL_PREFIX: INTERNAL_PREFIX,\n BUILD_DIR: BUILD_DIRS.ROOT,\n CHUNKS_DIR: BUILD_DIRS.CHUNKS,\n DATA_DIR: BUILD_DIRS.DATA,\n ASSETS_DIR: BUILD_DIRS.ASSETS,\n HMR_RUNTIME: INTERNAL_ENDPOINTS.HMR_RUNTIME,\n CLIENT_JS: INTERNAL_ENDPOINTS.CLIENT_JS,\n ROUTER_JS: INTERNAL_ENDPOINTS.ROUTER_JS,\n ERROR_OVERLAY: INTERNAL_ENDPOINTS.ERROR_OVERLAY,\n} as const;\n\nexport const FILE_EXTENSIONS = {\n MDX: [\".mdx\", \".md\"],\n SCRIPT: [\".tsx\", \".ts\", \".jsx\", \".js\"],\n STYLE: [\".css\", \".scss\", \".sass\"],\n ALL: [\".mdx\", \".md\", \".tsx\", \".ts\", \".jsx\", \".js\", \".css\"],\n} as const;\n\n// Re-export for convenience\nexport { BUILD_DIRS, INTERNAL_ENDPOINTS, INTERNAL_PATH_PREFIXES, INTERNAL_PREFIX };\n", "import { serverLogger as logger } from \"./logger/index.ts\";\n\nexport interface BundleMetadata {\n hash: string;\n codeHash: string;\n size: number;\n compiledAt: number;\n source: string;\n mode: \"development\" | \"production\";\n meta?: {\n type?: \"mdx\" | \"component\" | \"layout\" | \"provider\";\n depsHash?: string;\n reactVersion?: string;\n };\n}\n\nexport interface BundleCode {\n code: string;\n sourceMap?: string;\n css?: string;\n}\n\nexport interface BundleManifestStore {\n getBundleMetadata(key: string): Promise<BundleMetadata | undefined>;\n\n setBundleMetadata(key: string, metadata: BundleMetadata, ttlMs?: number): Promise<void>;\n\n getBundleCode(hash: string): Promise<BundleCode | undefined>;\n\n setBundleCode(hash: string, code: BundleCode, ttlMs?: number): Promise<void>;\n\n deleteBundle(key: string): Promise<void>;\n\n invalidateSource(source: string): Promise<number>;\n\n clear(): Promise<void>;\n\n isAvailable(): Promise<boolean>;\n\n getStats(): Promise<{\n totalBundles: number;\n totalSize: number;\n oldestBundle?: number;\n newestBundle?: number;\n }>;\n}\n\nexport class InMemoryBundleManifestStore implements BundleManifestStore {\n private metadata = new Map<string, { value: BundleMetadata; expiry?: number }>();\n private code = new Map<string, { value: BundleCode; expiry?: number }>();\n private sourceIndex = new Map<string, Set<string>>();\n\n getBundleMetadata(key: string): Promise<BundleMetadata | undefined> {\n const entry = this.metadata.get(key);\n if (!entry) return Promise.resolve(undefined);\n if (entry.expiry && Date.now() > entry.expiry) {\n this.metadata.delete(key);\n return Promise.resolve(undefined);\n }\n return Promise.resolve(entry.value);\n }\n\n setBundleMetadata(key: string, metadata: BundleMetadata, ttlMs?: number): Promise<void> {\n const expiry = ttlMs ? Date.now() + ttlMs : undefined;\n this.metadata.set(key, { value: metadata, expiry });\n\n if (!this.sourceIndex.has(metadata.source)) {\n this.sourceIndex.set(metadata.source, new Set());\n }\n this.sourceIndex.get(metadata.source)!.add(key);\n return Promise.resolve();\n }\n\n getBundleCode(hash: string): Promise<BundleCode | undefined> {\n const entry = this.code.get(hash);\n if (!entry) return Promise.resolve(undefined);\n if (entry.expiry && Date.now() > entry.expiry) {\n this.code.delete(hash);\n return Promise.resolve(undefined);\n }\n return Promise.resolve(entry.value);\n }\n\n setBundleCode(hash: string, code: BundleCode, ttlMs?: number): Promise<void> {\n const expiry = ttlMs ? Date.now() + ttlMs : undefined;\n this.code.set(hash, { value: code, expiry });\n return Promise.resolve();\n }\n\n async deleteBundle(key: string): Promise<void> {\n const metadata = await this.getBundleMetadata(key);\n this.metadata.delete(key);\n if (metadata) {\n this.code.delete(metadata.codeHash);\n const sourceKeys = this.sourceIndex.get(metadata.source);\n if (sourceKeys) {\n sourceKeys.delete(key);\n if (sourceKeys.size === 0) {\n this.sourceIndex.delete(metadata.source);\n }\n }\n }\n }\n\n async invalidateSource(source: string): Promise<number> {\n const keys = this.sourceIndex.get(source);\n if (!keys) return 0;\n\n let count = 0;\n for (const key of Array.from(keys)) {\n await this.deleteBundle(key);\n count++;\n }\n this.sourceIndex.delete(source);\n return count;\n }\n\n clear(): Promise<void> {\n this.metadata.clear();\n this.code.clear();\n this.sourceIndex.clear();\n return Promise.resolve();\n }\n\n isAvailable(): Promise<boolean> {\n return Promise.resolve(true);\n }\n\n getStats(): Promise<{\n totalBundles: number;\n totalSize: number;\n oldestBundle?: number;\n newestBundle?: number;\n }> {\n let totalSize = 0;\n let oldest: number | undefined;\n let newest: number | undefined;\n\n for (const { value } of this.metadata.values()) {\n totalSize += value.size;\n if (!oldest || value.compiledAt < oldest) oldest = value.compiledAt;\n if (!newest || value.compiledAt > newest) newest = value.compiledAt;\n }\n\n return Promise.resolve({\n totalBundles: this.metadata.size,\n totalSize,\n oldestBundle: oldest,\n newestBundle: newest,\n });\n }\n}\n\nlet manifestStore: BundleManifestStore = new InMemoryBundleManifestStore();\n\nexport function setBundleManifestStore(store: BundleManifestStore): void {\n manifestStore = store;\n logger.info(\"[bundle-manifest] Bundle manifest store configured\", {\n type: store.constructor.name,\n });\n}\n\nexport function getBundleManifestStore(): BundleManifestStore {\n return manifestStore;\n}\n\nexport { computeCodeHash, computeContentHash } from \"./hash-utils.ts\";\n", "/**\n * Local File System Blob Storage\n *\n * Stores blobs as files on the local disk\n */\n\nimport { dirname, join } from \"../../../platform/compat/path-helper.ts\";\nimport { createFileSystem, FileSystem } from \"../../../platform/compat/fs.ts\";\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\n\nexport class LocalBlobStorage implements BlobStorage {\n private rootDir: string;\n private baseUrl?: string;\n private fs: FileSystem;\n\n constructor(rootDir: string, baseUrl?: string) {\n this.rootDir = rootDir;\n this.baseUrl = baseUrl;\n this.fs = createFileSystem();\n }\n\n private getPath(id: string): string {\n // Partition by first 2 chars to avoid too many files in one dir\n const prefix = id.slice(0, 2);\n return join(this.rootDir, prefix, id);\n }\n\n private getMetadataPath(id: string): string {\n return this.getPath(id) + \".meta.json\";\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const id = options.id || crypto.randomUUID();\n const filePath = this.getPath(id);\n const metaPath = this.getMetadataPath(id);\n\n await this.fs.mkdir(dirname(filePath), { recursive: true });\n\n let size = 0;\n\n if (typeof data === \"string\") {\n await this.fs.writeTextFile(filePath, data);\n size = new TextEncoder().encode(data).length;\n } else if (data instanceof Uint8Array) {\n await this.fs.writeFile(filePath, data);\n size = data.length;\n } else if (data instanceof Blob) {\n const arr = new Uint8Array(await data.arrayBuffer());\n await this.fs.writeFile(filePath, arr);\n size = data.size;\n } else if (data instanceof ReadableStream) {\n // Normalize stream to bytes for cross-runtime compatibility\n const buffer = new Uint8Array(await new Response(data).arrayBuffer());\n await this.fs.writeFile(filePath, buffer);\n size = buffer.length;\n } else {\n throw new Error(\"Unsupported data type for LocalBlobStorage\");\n }\n\n const ref: BlobRef = {\n __kind: \"blob\",\n id,\n size,\n mimeType: options.mimeType || \"application/octet-stream\",\n createdAt: new Date(),\n expiresAt: options.ttl ? new Date(Date.now() + options.ttl * 1000) : undefined,\n metadata: options.metadata,\n url: this.baseUrl ? `${this.baseUrl}/${id}` : undefined,\n };\n\n await this.fs.writeTextFile(metaPath, JSON.stringify(ref));\n\n return ref;\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n try {\n const bytes = await this.getBytes(id);\n if (!bytes) return null;\n // Create a minimal cross-runtime ReadableStream from the bytes\n return new ReadableStream({\n start(controller) {\n controller.enqueue(bytes);\n controller.close();\n },\n });\n } catch {\n return null;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const filePath = this.getPath(id);\n try {\n return await this.fs.readTextFile(filePath);\n } catch {\n return null;\n }\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const filePath = this.getPath(id);\n try {\n return await this.fs.readFile(filePath);\n } catch {\n return null;\n }\n }\n\n async delete(id: string): Promise<void> {\n const filePath = this.getPath(id);\n const metaPath = this.getMetadataPath(id);\n try {\n await this.fs.remove(filePath);\n await this.fs.remove(metaPath);\n } catch {\n // Ignore if not found\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const filePath = this.getPath(id);\n return await this.fs.exists(filePath);\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const metaPath = this.getMetadataPath(id);\n try {\n const json = await this.fs.readTextFile(metaPath);\n const data = JSON.parse(json);\n return {\n ...data,\n createdAt: new Date(data.createdAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n };\n } catch {\n return null;\n }\n }\n\n /**\n * Cleans up all expired blobs from storage.\n * This method should typically be run periodically by an external process.\n */\n async cleanupExpiredBlobs(): Promise<void> {\n // Iterate over prefixes (00-ff)\n for (let i = 0; i < 256; i++) {\n const prefix = i.toString(16).padStart(2, \"0\");\n const prefixDir = join(this.rootDir, prefix);\n try {\n for await (const entry of this.fs.readDir(prefixDir)) {\n if (entry.isFile && entry.name.endsWith(\".meta.json\")) {\n const id = entry.name.replace(\".meta.json\", \"\");\n const blobRef = await this.stat(id);\n if (blobRef && blobRef.expiresAt && blobRef.expiresAt < new Date()) {\n logger.debug(`[LocalBlobStorage] Deleting expired blob: ${id}`);\n await this.delete(id);\n }\n }\n }\n } catch (_e) {\n // Directory not found is fine, skip\n continue;\n }\n }\n }\n}\n", "/**\n * S3 Blob Storage\n *\n * Stores blobs in AWS S3.\n *\n * NOTE: This module uses dynamic imports for @aws-sdk/client-s3 to avoid\n * requiring the AWS SDK as a mandatory dependency. The SDK is only loaded\n * when S3BlobStorage is instantiated.\n */\n\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\nimport { isDeno } from \"@veryfront/platform/compat/runtime.ts\";\n\n// Type definitions for AWS SDK (to avoid top-level import)\ntype S3ClientType = import(\"@aws-sdk/client-s3\").S3Client;\ntype PutObjectCommandType = import(\"@aws-sdk/client-s3\").PutObjectCommand;\ntype GetObjectCommandType = import(\"@aws-sdk/client-s3\").GetObjectCommand;\ntype DeleteObjectCommandType = import(\"@aws-sdk/client-s3\").DeleteObjectCommand;\ntype HeadObjectCommandType = import(\"@aws-sdk/client-s3\").HeadObjectCommand;\ntype CreateBucketCommandType = import(\"@aws-sdk/client-s3\").CreateBucketCommand;\n\n// Cached module reference for lazy loading\nlet s3Module: typeof import(\"@aws-sdk/client-s3\") | null = null;\n\n/**\n * Dynamically import the AWS SDK (lazy loading)\n * This allows the module to be loaded without requiring @aws-sdk/client-s3 to be installed\n * unless S3BlobStorage is actually used.\n */\nasync function getS3Module(): Promise<typeof import(\"@aws-sdk/client-s3\")> {\n if (s3Module) {\n return s3Module;\n }\n\n try {\n // Try Deno's esm.sh import first (for Deno runtime)\n if (isDeno) {\n s3Module = await import(\"https://esm.sh/@aws-sdk/client-s3@3.490.0\");\n } else {\n // For Node.js runtime, use bare specifier\n s3Module = await import(\"@aws-sdk/client-s3\");\n }\n return s3Module;\n } catch (error) {\n throw new Error(\n `Failed to load @aws-sdk/client-s3. Please install it: npm install @aws-sdk/client-s3\\n` +\n `Original error: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n}\n\nexport interface S3BlobStorageConfig {\n /** AWS Region */\n region: string;\n /** S3 Bucket name */\n bucket: string;\n /** AWS Access Key ID */\n accessKeyId: string;\n /** AWS Secret Access Key */\n secretAccessKey: string;\n /** Optional S3 endpoint (for localstack or compatible storage) */\n endpoint?: string;\n /** Force path style URLs (required for MinIO/Localstack) */\n forcePathStyle?: boolean;\n /** Key prefix for namespacing blobs */\n prefix?: string;\n /** Base URL for constructing public URLs (if bucket is public) */\n baseUrl?: string;\n /** Default TTL for blobs in seconds */\n defaultTtl?: number;\n /** Automatically create the bucket if it does not exist (useful for local development) */\n autoCreateBucket?: boolean;\n}\n\nexport class S3BlobStorage implements BlobStorage {\n private client: S3ClientType | null = null;\n private config: S3BlobStorageConfig;\n private initPromise: Promise<void> | null = null;\n\n constructor(config: S3BlobStorageConfig) {\n this.config = config;\n // Trigger initialization (but don't await in constructor)\n this.initPromise = this.initialize();\n }\n\n /**\n * Initialize the S3 client asynchronously\n */\n private async initialize(): Promise<void> {\n const { S3Client } = await getS3Module();\n this.client = new S3Client({\n region: this.config.region,\n credentials: {\n accessKeyId: this.config.accessKeyId,\n secretAccessKey: this.config.secretAccessKey,\n },\n endpoint: this.config.endpoint,\n forcePathStyle: this.config.forcePathStyle,\n });\n }\n\n /**\n * Ensure the S3 client is initialized before use\n */\n private async ensureInitialized(): Promise<S3ClientType> {\n if (this.initPromise) {\n await this.initPromise;\n this.initPromise = null;\n }\n if (!this.client) {\n throw new Error(\"S3BlobStorage: Client failed to initialize\");\n }\n return this.client;\n }\n\n private getKey(id: string): string {\n return this.config.prefix ? `${this.config.prefix}${id}` : id;\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const client = await this.ensureInitialized();\n const { PutObjectCommand, CreateBucketCommand, HeadObjectCommand } = await getS3Module();\n\n const id = options.id || crypto.randomUUID();\n const key = this.getKey(id);\n const mimeType = options.mimeType || \"application/octet-stream\";\n const createdAt = new Date();\n const ttl = options.ttl ?? this.config.defaultTtl;\n const expiresAt = ttl ? new Date(createdAt.getTime() + ttl * 1000) : undefined;\n\n let body: string | Uint8Array | Blob | ReadableStream;\n let contentLength: number | undefined;\n\n if (typeof data === \"string\") {\n body = new TextEncoder().encode(data);\n contentLength = body.byteLength;\n } else if (data instanceof Uint8Array) {\n body = data;\n contentLength = data.byteLength;\n } else if (data instanceof Blob) {\n body = data;\n contentLength = data.size;\n } else if (data instanceof ReadableStream) {\n // For ReadableStream, S3 PutObjectCommand can directly accept it.\n // Content-Length is often required for streams, but sometimes S3 can infer it.\n // If it consistently fails, we might need to buffer the stream or require content-length in options.\n body = data;\n // Cannot determine contentLength easily from ReadableStream without consuming it.\n // If backend requires, user must provide via options.\n } else {\n throw new Error(\"Unsupported data type for S3BlobStorage\");\n }\n\n const putCommand = new PutObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n Body: body,\n ContentType: mimeType,\n ContentLength: contentLength, // Pass if known\n Expires: expiresAt, // S3 uses Expires header for HTTP caches, not lifecycle rules directly\n Metadata: options.metadata, // Custom metadata\n });\n\n try {\n await client.send(putCommand);\n } catch (e: any) {\n if (e.name === \"NoSuchBucket\" && this.config.autoCreateBucket) {\n // Bucket doesn't exist, try to create it\n try {\n await client.send(new CreateBucketCommand({ Bucket: this.config.bucket }));\n // Retry the put operation\n await client.send(putCommand);\n } catch (createError) {\n // If creation fails (e.g., race condition), throw the original error or the new one\n logger.error(\"Failed to auto-create bucket:\", createError);\n throw e;\n }\n } else {\n throw e;\n }\n }\n\n // S3 does not return size directly on PutObject. We can do a HeadObject to get it.\n // Or, for simplicity, use the contentLength we determined or was passed.\n let size = contentLength || 0; // Fallback if stream length is unknown\n if (size === 0) {\n try {\n const headCommand = new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n const headResult = await client.send(headCommand);\n size = headResult.ContentLength || 0;\n } catch (e) {\n logger.warn(`Could not get size for S3 blob ${key} after put:`, e);\n }\n }\n\n return {\n __kind: \"blob\",\n id,\n size,\n mimeType,\n createdAt,\n expiresAt,\n metadata: options.metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : undefined,\n };\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n const client = await this.ensureInitialized();\n const { GetObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n const getCommand = new GetObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n const response = await client.send(getCommand);\n if (response.Body) {\n // The S3 SDK returns an AsyncIterable (which is also a ReadableStream in Deno)\n return response.Body as ReadableStream;\n }\n return null;\n } catch (e) {\n if (e instanceof Error && e.name === \"NoSuchKey\") {\n return null;\n }\n throw e;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n // @ts-ignore - Deno's ReadableStream vs Web ReadableStream type mismatch\n const response = new Response(stream);\n return await response.text();\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n // @ts-ignore - Deno's ReadableStream vs Web ReadableStream type mismatch\n const response = new Response(stream);\n const buffer = await response.arrayBuffer();\n return new Uint8Array(buffer);\n }\n\n async delete(id: string): Promise<void> {\n const client = await this.ensureInitialized();\n const { DeleteObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n const deleteCommand = new DeleteObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n });\n try {\n await client.send(deleteCommand);\n } catch (e) {\n if (e instanceof Error && e.name === \"NoSuchKey\") {\n // Ignore if trying to delete a non-existent key\n return;\n }\n throw e;\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const client = await this.ensureInitialized();\n const { HeadObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n await client.send(\n new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n }),\n );\n return true;\n } catch (e) {\n if (e instanceof Error && e.name === \"NotFound\") {\n return false;\n }\n throw e;\n }\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const client = await this.ensureInitialized();\n const { HeadObjectCommand } = await getS3Module();\n\n const key = this.getKey(id);\n try {\n const headResult = await client.send(\n new HeadObjectCommand({\n Bucket: this.config.bucket,\n Key: key,\n }),\n );\n\n if (!headResult.LastModified) return null; // Should always be present for existing objects\n\n // Custom metadata is returned as all lowercase keys by S3\n const metadata: Record<string, string> = {};\n const rawMetadata = headResult.Metadata as Record<string, string> | undefined;\n for (const [k, v] of Object.entries(rawMetadata || {})) {\n if (v != null) {\n metadata[k] = v;\n }\n }\n\n let expiresAt: Date | undefined;\n if (headResult.Expires) {\n expiresAt = new Date(headResult.Expires);\n } else if (headResult.Metadata && headResult.Metadata[\"expiresat\"]) {\n // Check for custom expiresAt if stored in metadata\n expiresAt = new Date(headResult.Metadata[\"expiresat\"]!);\n }\n\n // S3 Lifecycle rules or object TTLs are not exposed directly via HeadObject.\n // If `options.ttl` was used in `put`, that TTL is not natively handled by S3 `Expires` header\n // for object lifecycle management (it's for caching).\n // To support TTL, user must configure S3 bucket lifecycle rules separately based on object tags/prefix\n // OR we store expiresAt in metadata and rely on cleanup logic (if any) or user to manage.\n // For now, we only populate expiresAt if S3 provides an Expires header (HTTP caching).\n\n return {\n __kind: \"blob\",\n id,\n size: headResult.ContentLength || 0,\n mimeType: headResult.ContentType || \"application/octet-stream\",\n createdAt: headResult.LastModified,\n expiresAt: expiresAt,\n metadata: metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : undefined,\n };\n } catch (e) {\n if (e instanceof Error && e.name === \"NotFound\") {\n return null;\n }\n throw e;\n }\n }\n}\n", "/**\n * Google Cloud Storage Blob Storage\n *\n * Stores blobs in Google Cloud Storage.\n */\n\nimport type { BlobRef, BlobStorage, StoreBlobOptions } from \"./types.ts\";\n\nexport interface GCSBlobStorageConfig {\n /** Google Cloud Project ID */\n projectId: string;\n /** GCS Bucket name */\n bucket: string;\n /** Google Cloud Service Account Key (JSON string) */\n serviceAccountKey: string;\n /** Key prefix for namespacing blobs */\n prefix?: string;\n /** Base URL for constructing public URLs (if bucket is public) */\n baseUrl?: string;\n /** Default TTL for blobs in seconds */\n defaultTtl?: number;\n}\n\nexport class GCSBlobStorage implements BlobStorage {\n private config: GCSBlobStorageConfig;\n private tokenCache: { accessToken: string; expiresAt: Date } | null = null;\n\n constructor(config: GCSBlobStorageConfig) {\n this.config = config;\n try {\n JSON.parse(this.config.serviceAccountKey);\n } catch {\n throw new Error(\"GCSBlobStorage: serviceAccountKey must be a valid JSON string.\");\n }\n }\n\n private getKey(id: string): string {\n return this.config.prefix ? `${this.config.prefix}${id}` : id;\n }\n\n private async getAccessToken(): Promise<string> {\n if (this.tokenCache && this.tokenCache.expiresAt > new Date()) {\n return this.tokenCache.accessToken;\n }\n\n const sa = JSON.parse(this.config.serviceAccountKey);\n const tokenEndpoint = \"https://oauth2.googleapis.com/token\";\n const scope = \"https://www.googleapis.com/auth/devstorage.full_control\";\n\n const now = Date.now();\n const jwtHeader = btoa(JSON.stringify({ alg: \"RS256\", typ: \"JWT\" }));\n const jwtClaimSet = btoa(JSON.stringify({\n iss: sa.client_email,\n scope: scope,\n aud: tokenEndpoint,\n exp: Math.floor(now / 1000) + 3600, // 1 hour expiration\n iat: Math.floor(now / 1000),\n }));\n\n // This part requires a proper JWT signing library.\n // Deno's native crypto.subtle can sign, but creating the RS256 private key from PKCS8 (PEM)\n // is non-trivial without a dedicated library.\n // For a quick implementation, we will use a placeholder or assume a pre-signed JWT.\n // In a real-world Deno project, you'd use `djwt` or a similar library.\n console.warn(\n \"[GCSBlobStorage] JWT signing for service account requires a library like `djwt`. \" +\n \"Proceeding with a placeholder/manual approach, which is not suitable for production.\",\n );\n\n // Placeholder for actual JWT signing\n const signature = \"PLACEHOLDER_SIGNATURE\";\n const jwt = `${jwtHeader}.${jwtClaimSet}.${signature}`;\n\n // This is a simplified approach, a real implementation would correctly sign the JWT\n // and handle key loading from the service account JSON.\n\n const response = await fetch(tokenEndpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n },\n body: new URLSearchParams({\n grant_type: \"urn:ietf:params:oauth:grant-type:jwt-bearer\",\n assertion: jwt,\n }),\n });\n\n if (!response.ok) {\n const error = await response.text();\n throw new Error(`Failed to get GCS access token: ${response.status} - ${error}`);\n }\n\n const data = await response.json();\n const accessToken = data.access_token;\n const expiresIn = data.expires_in; // in seconds\n\n this.tokenCache = {\n accessToken,\n expiresAt: new Date(Date.now() + (expiresIn - 60) * 1000), // Refresh 1 min before actual expiry\n };\n\n return accessToken;\n }\n\n async put(\n data: string | Uint8Array | Blob | ReadableStream,\n options: StoreBlobOptions = {},\n ): Promise<BlobRef> {\n const id = options.id || crypto.randomUUID();\n const key = this.getKey(id);\n const mimeType = options.mimeType || \"application/octet-stream\";\n const createdAt = new Date();\n const ttl = options.ttl ?? this.config.defaultTtl;\n const expiresAt = ttl ? new Date(createdAt.getTime() + ttl * 1000) : undefined;\n\n let body: string | Uint8Array | ReadableStream | Blob;\n let contentLength: number | undefined;\n\n if (typeof data === \"string\") {\n body = new TextEncoder().encode(data);\n contentLength = body.byteLength;\n } else if (data instanceof Uint8Array) {\n body = data;\n contentLength = data.byteLength;\n } else if (data instanceof Blob) {\n body = data;\n contentLength = data.size;\n } else if (data instanceof ReadableStream) {\n body = data;\n // ContentLength cannot be easily determined for ReadableStream without consuming it.\n // GCS can handle chunked uploads without Content-Length, but specifying it is better.\n } else {\n throw new Error(\"Unsupported data type for GCSBlobStorage\");\n }\n\n const token = await this.getAccessToken();\n const uploadUrl =\n `https://storage.googleapis.com/upload/storage/v1/b/${this.config.bucket}/o?uploadType=media&name=${key}`;\n\n const headers: Record<string, string> = {\n \"Authorization\": `Bearer ${token}`,\n \"Content-Type\": mimeType,\n };\n if (contentLength !== undefined) {\n headers[\"Content-Length\"] = String(contentLength);\n }\n\n // Add custom metadata. GCS accepts x-goog-meta- prefix.\n const gcsMetadata: Record<string, string> = {};\n if (options.metadata) {\n for (const [k, v] of Object.entries(options.metadata)) {\n gcsMetadata[`x-goog-meta-${k.toLowerCase()}`] = v;\n }\n }\n if (expiresAt) {\n // Store expiresAt in metadata for stat retrieval, GCS native TTL is via object lifecycle rules\n gcsMetadata[\"x-goog-meta-expiresat\"] = expiresAt.toISOString();\n }\n Object.assign(headers, gcsMetadata);\n\n const response = await fetch(uploadUrl, {\n method: \"POST\",\n headers,\n body: body,\n });\n\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to upload to GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n const gcsObject = await response.json();\n\n return {\n __kind: \"blob\",\n id,\n size: Number(gcsObject.size),\n mimeType: gcsObject.contentType,\n createdAt: new Date(gcsObject.timeCreated),\n expiresAt: expiresAt, // Derived from TTL passed or default\n metadata: options.metadata,\n url: this.config.baseUrl ? `${this.config.baseUrl}/${key}` : gcsObject.mediaLink, // mediaLink is the direct download URL\n };\n }\n\n async getStream(id: string): Promise<ReadableStream | null> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const downloadUrl =\n `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}?alt=media`;\n\n try {\n const response = await fetch(downloadUrl, {\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n return null;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to download from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n return response.body; // Deno's fetch body is a ReadableStream\n } catch (e) {\n console.error(\"GCS getStream error:\", e);\n throw e;\n }\n }\n\n async getText(id: string): Promise<string | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n const reader = stream.getReader();\n let text = \"\";\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n text += new TextDecoder().decode(value);\n }\n return text;\n }\n\n async getBytes(id: string): Promise<Uint8Array | null> {\n const stream = await this.getStream(id);\n if (!stream) return null;\n const chunks: Uint8Array[] = [];\n const reader = stream.getReader();\n while (true) {\n const { done, value } = await reader.read();\n if (done) break;\n chunks.push(value);\n }\n const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);\n const result = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n }\n\n async delete(id: string): Promise<void> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const deleteUrl = `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}`;\n\n const response = await fetch(deleteUrl, {\n method: \"DELETE\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n // Object not found, consider it deleted\n return;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to delete from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n }\n\n async exists(id: string): Promise<boolean> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const getUrl =\n `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}?fields=id`;\n\n const response = await fetch(getUrl, {\n method: \"GET\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 200) {\n return true;\n }\n if (response.status === 404) {\n return false;\n }\n const errorBody = await response.text();\n throw new Error(\n `Failed to check existence in GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n async stat(id: string): Promise<BlobRef | null> {\n const key = this.getKey(id);\n const token = await this.getAccessToken();\n const getUrl = `https://storage.googleapis.com/storage/v1/b/${this.config.bucket}/o/${key}`;\n\n const response = await fetch(getUrl, {\n method: \"GET\",\n headers: {\n \"Authorization\": `Bearer ${token}`,\n },\n });\n\n if (response.status === 404) {\n return null;\n }\n if (!response.ok) {\n const errorBody = await response.text();\n throw new Error(\n `Failed to get metadata from GCS: ${response.status} - ${response.statusText}. Body: ${errorBody}`,\n );\n }\n\n const gcsObject = await response.json();\n\n // Custom metadata is stored with `x-goog-meta-` prefix and is all lowercase\n const metadata: Record<string, string> = {};\n if (gcsObject.metadata) {\n for (const [k, v] of Object.entries(gcsObject.metadata as Record<string, string>)) {\n if (k.startsWith(\"x-goog-meta-\")) {\n metadata[k.replace(\"x-goog-meta-\", \"\")] = v;\n } else {\n metadata[k] = v;\n }\n }\n }\n\n let expiresAt: Date | undefined;\n if (metadata[\"expiresat\"]) {\n expiresAt = new Date(metadata[\"expiresat\"]!); // Retrieve custom expiresAt from metadata\n }\n\n return {\n __kind: \"blob\",\n id,\n size: Number(gcsObject.size),\n mimeType: gcsObject.contentType,\n createdAt: new Date(gcsObject.timeCreated),\n expiresAt: expiresAt, // Populated from custom metadata if available\n metadata: metadata,\n url: gcsObject.mediaLink, // mediaLink is the direct download URL\n };\n }\n}\n", "/**\n * Workflow Backend Interface\n *\n * Defines the contract for workflow persistence and execution backends.\n * Implementations can be:\n * - MemoryBackend (development)\n * - RedisBackend (production)\n * - TemporalAdapter (enterprise)\n * - InngestAdapter (serverless)\n * - CloudflareAdapter (edge)\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n WorkflowStatus as _WorkflowStatus,\n} from \"../types.ts\";\n\n/**\n * Backend configuration options\n */\nexport interface BackendConfig {\n /** Connection URL (for Redis, Postgres, etc.) */\n url?: string;\n /** Key prefix for namespacing */\n prefix?: string;\n /** Default TTL for runs (in milliseconds) */\n defaultTtl?: number;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Lock information for distributed execution\n */\nexport interface Lock {\n /** Lock identifier */\n lockId: string;\n /** Run ID that owns the lock */\n runId: string;\n /** When lock was acquired */\n acquiredAt: Date;\n /** When lock expires */\n expiresAt: Date;\n}\n\n/**\n * Workflow backend interface\n *\n * All backend implementations must implement this interface.\n * Optional methods (marked with ?) can be omitted for simpler backends.\n */\nexport interface WorkflowBackend {\n // =========================================================================\n // Run Management\n // =========================================================================\n\n /**\n * Create a new workflow run\n */\n createRun(run: WorkflowRun): Promise<void>;\n\n /**\n * Get a workflow run by ID\n */\n getRun(runId: string): Promise<WorkflowRun | null>;\n\n /**\n * Update a workflow run\n */\n updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void>;\n\n /**\n * Delete a workflow run\n */\n deleteRun?(runId: string): Promise<void>;\n\n /**\n * List workflow runs with optional filters\n */\n listRuns(filter: RunFilter): Promise<WorkflowRun[]>;\n\n /**\n * Count workflow runs matching filter\n */\n countRuns?(filter: RunFilter): Promise<number>;\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n /**\n * Save a checkpoint for a workflow run\n */\n saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void>;\n\n /**\n * Get the latest checkpoint for a workflow run\n */\n getLatestCheckpoint(runId: string): Promise<Checkpoint | null>;\n\n /**\n * Get all checkpoints for a workflow run\n */\n getCheckpoints?(runId: string): Promise<Checkpoint[]>;\n\n /**\n * Delete a specific checkpoint\n */\n deleteCheckpoint?(runId: string, checkpointId: string): Promise<void>;\n\n /**\n * Delete multiple checkpoints by ID\n */\n deleteCheckpoints?(runId: string, checkpointIds: string[]): Promise<void>;\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n /**\n * Save a pending approval request\n */\n savePendingApproval(\n runId: string,\n approval: PendingApproval,\n ): Promise<void>;\n\n /**\n * Get all pending approvals for a workflow run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]>;\n\n /**\n * Get a specific pending approval\n */\n getPendingApproval?(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null>;\n\n /**\n * Update an approval with a decision\n */\n updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void>;\n\n /**\n * List all pending approvals across workflows\n */\n listPendingApprovals?(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>>;\n\n // =========================================================================\n // Queue Operations (optional - for distributed execution)\n // =========================================================================\n\n /**\n * Enqueue a workflow job for processing\n */\n enqueue?(job: WorkflowJob): Promise<void>;\n\n /**\n * Dequeue the next workflow job\n */\n dequeue?(): Promise<WorkflowJob | null>;\n\n /**\n * Acknowledge job completion\n */\n acknowledge?(runId: string): Promise<void>;\n\n /**\n * Negative acknowledge - return job to queue\n */\n nack?(runId: string): Promise<void>;\n\n // =========================================================================\n // Distributed Locking (optional - for distributed execution)\n // =========================================================================\n\n /**\n * Acquire a lock for exclusive workflow execution\n * Returns true if lock was acquired, false if already locked\n */\n acquireLock?(runId: string, duration: number): Promise<boolean>;\n\n /**\n * Release a lock\n */\n releaseLock?(runId: string): Promise<void>;\n\n /**\n * Extend lock duration\n */\n extendLock?(runId: string, duration: number): Promise<boolean>;\n\n /**\n * Check if a lock is held\n */\n isLocked?(runId: string): Promise<boolean>;\n\n // =========================================================================\n // Events (optional - for event-driven workflows)\n // =========================================================================\n\n /**\n * Publish an event that waiting workflows can receive\n */\n publishEvent?(\n eventName: string,\n payload: unknown,\n options?: {\n runId?: string; // Target specific run\n workflowId?: string; // Target specific workflow type\n },\n ): Promise<void>;\n\n /**\n * Subscribe to events for a workflow run\n * Returns an async iterator of events\n */\n subscribeEvents?(runId: string): AsyncIterable<{\n eventName: string;\n payload: unknown;\n timestamp: Date;\n }>;\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n /**\n * Initialize the backend (connect to database, etc.)\n */\n initialize?(): Promise<void>;\n\n /**\n * Check if the backend is healthy\n */\n healthCheck?(): Promise<boolean>;\n\n /**\n * Cleanup and close connections\n */\n destroy(): Promise<void>;\n}\n\n/**\n * Backend with queue capabilities\n * Type guard for checking if backend supports queueing\n */\nexport function hasQueueSupport(\n backend: WorkflowBackend,\n): backend is\n & WorkflowBackend\n & Required<Pick<WorkflowBackend, \"enqueue\" | \"dequeue\" | \"acknowledge\">> {\n return (\n typeof backend.enqueue === \"function\" &&\n typeof backend.dequeue === \"function\" &&\n typeof backend.acknowledge === \"function\"\n );\n}\n\n/**\n * Backend with locking capabilities\n * Type guard for checking if backend supports distributed locking\n */\nexport function hasLockSupport(\n backend: WorkflowBackend,\n): backend is WorkflowBackend & Required<Pick<WorkflowBackend, \"acquireLock\" | \"releaseLock\">> {\n return (\n typeof backend.acquireLock === \"function\" &&\n typeof backend.releaseLock === \"function\"\n );\n}\n\n/**\n * Backend with event capabilities\n * Type guard for checking if backend supports events\n */\nexport function hasEventSupport(\n backend: WorkflowBackend,\n): backend is\n & WorkflowBackend\n & Required<Pick<WorkflowBackend, \"publishEvent\" | \"subscribeEvents\">> {\n return (\n typeof backend.publishEvent === \"function\" &&\n typeof backend.subscribeEvents === \"function\"\n );\n}\n", "/**\n * Memory Workflow Backend\n *\n * In-memory implementation of WorkflowBackend for development and testing.\n * Data is NOT persisted across restarts.\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Memory backend configuration\n */\nexport interface MemoryBackendConfig extends BackendConfig {\n /** Maximum queue size (default: 10000) */\n maxQueueSize?: number;\n}\n\n/** Default max queue size */\nconst DEFAULT_MAX_QUEUE_SIZE = 10000;\n\n/**\n * In-memory workflow backend\n *\n * @example\n * ```typescript\n * import { MemoryBackend } from 'veryfront/ai/workflow/backends/memory';\n *\n * const backend = new MemoryBackend();\n * ```\n */\nexport class MemoryBackend implements WorkflowBackend {\n private runs = new Map<string, WorkflowRun>();\n private checkpoints = new Map<string, Checkpoint[]>();\n private approvals = new Map<string, PendingApproval[]>();\n private queue: WorkflowJob[] = [];\n private locks = new Map<string, { lockId: string; expiresAt: number }>();\n private config: MemoryBackendConfig;\n\n constructor(config: MemoryBackendConfig = {}) {\n this.config = {\n prefix: \"wf:\",\n debug: false,\n maxQueueSize: DEFAULT_MAX_QUEUE_SIZE,\n ...config,\n };\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n createRun(run: WorkflowRun): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Creating run: ${run.id}`);\n }\n this.runs.set(run.id, structuredClone(run));\n return Promise.resolve();\n }\n\n getRun(runId: string): Promise<WorkflowRun | null> {\n const run = this.runs.get(runId);\n return Promise.resolve(run ? structuredClone(run) : null);\n }\n\n updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void> {\n const run = this.runs.get(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Updating run: ${runId}`, patch);\n }\n\n // Deep merge the patch\n const updated = {\n ...run,\n ...patch,\n // Deep merge specific fields\n nodeStates: { ...run.nodeStates, ...patch.nodeStates },\n context: { ...run.context, ...patch.context },\n };\n\n this.runs.set(runId, updated);\n return Promise.resolve();\n }\n\n deleteRun(runId: string): Promise<void> {\n this.runs.delete(runId);\n this.checkpoints.delete(runId);\n this.approvals.delete(runId);\n return Promise.resolve();\n }\n\n listRuns(filter: RunFilter): Promise<WorkflowRun[]> {\n let runs = Array.from(this.runs.values());\n\n // Apply filters\n if (filter.workflowId) {\n runs = runs.filter((r) => r.workflowId === filter.workflowId);\n }\n\n if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n runs = runs.filter((r) => statuses.includes(r.status));\n }\n\n if (filter.createdAfter) {\n runs = runs.filter((r) => r.createdAt >= filter.createdAfter!);\n }\n\n if (filter.createdBefore) {\n runs = runs.filter((r) => r.createdAt <= filter.createdBefore!);\n }\n\n // Sort by creation date (newest first)\n runs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());\n\n // Apply pagination (offset and limit together)\n const start = filter.offset ?? 0;\n const end = filter.limit ? start + filter.limit : undefined;\n runs = runs.slice(start, end);\n\n return Promise.resolve(runs.map((r) => structuredClone(r)));\n }\n\n async countRuns(filter: RunFilter): Promise<number> {\n const runs = await this.listRuns({ ...filter, limit: undefined, offset: undefined });\n return runs.length;\n }\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Saving checkpoint: ${checkpoint.id} for run ${runId}`);\n }\n\n const existing = this.checkpoints.get(runId) || [];\n existing.push(structuredClone(checkpoint));\n this.checkpoints.set(runId, existing);\n return Promise.resolve();\n }\n\n getLatestCheckpoint(runId: string): Promise<Checkpoint | null> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints || checkpoints.length === 0) {\n return Promise.resolve(null);\n }\n\n // Return the most recent checkpoint\n const latest = checkpoints[checkpoints.length - 1];\n return Promise.resolve(latest ? structuredClone(latest) : null);\n }\n\n getCheckpoints(runId: string): Promise<Checkpoint[]> {\n const checkpoints = this.checkpoints.get(runId) || [];\n return Promise.resolve(checkpoints.map((c) => structuredClone(c)));\n }\n\n deleteCheckpoint(runId: string, checkpointId: string): Promise<void> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints) {\n return Promise.resolve();\n }\n\n const index = checkpoints.findIndex((c) => c.id === checkpointId);\n if (index !== -1) {\n checkpoints.splice(index, 1);\n if (this.config.debug) {\n console.log(`[MemoryBackend] Deleted checkpoint: ${checkpointId}`);\n }\n }\n return Promise.resolve();\n }\n\n deleteCheckpoints(runId: string, checkpointIds: string[]): Promise<void> {\n const checkpoints = this.checkpoints.get(runId);\n if (!checkpoints) {\n return Promise.resolve();\n }\n\n const idsToDelete = new Set(checkpointIds);\n const filtered = checkpoints.filter((c) => !idsToDelete.has(c.id));\n this.checkpoints.set(runId, filtered);\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Deleted ${checkpointIds.length} checkpoints`);\n }\n return Promise.resolve();\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n savePendingApproval(\n runId: string,\n approval: PendingApproval,\n ): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Saving approval: ${approval.id} for run ${runId}`);\n }\n\n const existing = this.approvals.get(runId) || [];\n existing.push(structuredClone(approval));\n this.approvals.set(runId, existing);\n return Promise.resolve();\n }\n\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n const approvals = this.approvals.get(runId) || [];\n return Promise.resolve(\n approvals\n .filter((a) => a.status === \"pending\")\n .map((a) => structuredClone(a)),\n );\n }\n\n getPendingApproval(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null> {\n const approvals = this.approvals.get(runId) || [];\n const approval = approvals.find((a) => a.id === approvalId);\n return Promise.resolve(approval ? structuredClone(approval) : null);\n }\n\n updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n const approvals = this.approvals.get(runId);\n if (!approvals) {\n throw new Error(`No approvals found for run: ${runId}`);\n }\n\n const approval = approvals.find((a) => a.id === approvalId);\n if (!approval) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Updating approval: ${approvalId}`, decision);\n }\n\n approval.status = decision.approved ? \"approved\" : \"rejected\";\n approval.decidedBy = decision.approver;\n approval.decidedAt = new Date();\n approval.comment = decision.comment;\n return Promise.resolve();\n }\n\n listPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n const result: Array<{ runId: string; approval: PendingApproval }> = [];\n\n for (const [runId, approvals] of this.approvals) {\n const run = this.runs.get(runId);\n if (!run) continue;\n\n if (filter?.workflowId && run.workflowId !== filter.workflowId) {\n continue;\n }\n\n for (const approval of approvals) {\n // Check status\n if (filter?.status === \"pending\" && approval.status !== \"pending\") {\n continue;\n }\n\n if (filter?.status === \"expired\") {\n const isExpired = approval.expiresAt && new Date() > approval.expiresAt;\n if (!isExpired) continue;\n }\n\n // Check approver\n if (\n filter?.approver &&\n approval.approvers &&\n !approval.approvers.includes(filter.approver)\n ) {\n continue;\n }\n\n result.push({ runId, approval: structuredClone(approval) });\n }\n }\n\n return Promise.resolve(result);\n }\n\n // =========================================================================\n // Queue Operations\n // =========================================================================\n\n enqueue(job: WorkflowJob): Promise<void> {\n // Check queue size limit\n const maxSize = this.config.maxQueueSize ?? DEFAULT_MAX_QUEUE_SIZE;\n if (this.queue.length >= maxSize) {\n return Promise.reject(\n new Error(`Queue full (max: ${maxSize}). Cannot enqueue job: ${job.runId}`),\n );\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Enqueueing job: ${job.runId}`);\n }\n\n // Insert based on priority (higher priority first)\n const priority = job.priority ?? 0;\n const insertIndex = this.queue.findIndex((j) => (j.priority ?? 0) < priority);\n\n if (insertIndex === -1) {\n this.queue.push(structuredClone(job));\n } else {\n this.queue.splice(insertIndex, 0, structuredClone(job));\n }\n return Promise.resolve();\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n const job = this.queue.shift();\n return Promise.resolve(job ? structuredClone(job) : null);\n }\n\n acknowledge(runId: string): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Acknowledging job: ${runId}`);\n }\n // For memory backend, acknowledgment is a no-op\n // The job is already removed from queue on dequeue\n return Promise.resolve();\n }\n\n async nack(runId: string): Promise<void> {\n // Re-enqueue the job\n const run = await this.getRun(runId);\n if (run) {\n await this.enqueue({\n runId: run.id,\n workflowId: run.workflowId,\n input: run.input,\n createdAt: new Date(),\n });\n }\n }\n\n // =========================================================================\n // Distributed Locking\n // =========================================================================\n\n acquireLock(runId: string, duration: number): Promise<boolean> {\n const existing = this.locks.get(runId);\n const now = Date.now();\n\n // If lock exists and hasn't expired, fail to acquire\n if (existing && existing.expiresAt > now) {\n return Promise.resolve(false);\n }\n\n if (this.config.debug) {\n console.log(`[MemoryBackend] Acquiring lock for: ${runId}`);\n }\n\n this.locks.set(runId, {\n lockId: crypto.randomUUID(),\n expiresAt: now + duration,\n });\n\n return Promise.resolve(true);\n }\n\n releaseLock(runId: string): Promise<void> {\n if (this.config.debug) {\n console.log(`[MemoryBackend] Releasing lock for: ${runId}`);\n }\n this.locks.delete(runId);\n return Promise.resolve();\n }\n\n extendLock(runId: string, duration: number): Promise<boolean> {\n const existing = this.locks.get(runId);\n const now = Date.now();\n\n if (!existing || existing.expiresAt <= now) {\n return Promise.resolve(false);\n }\n\n existing.expiresAt = now + duration;\n return Promise.resolve(true);\n }\n\n isLocked(runId: string): Promise<boolean> {\n const existing = this.locks.get(runId);\n return Promise.resolve(!!existing && existing.expiresAt > Date.now());\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n initialize(): Promise<void> {\n if (this.config.debug) {\n console.log(\"[MemoryBackend] Initialized\");\n }\n return Promise.resolve();\n }\n\n healthCheck(): Promise<boolean> {\n return Promise.resolve(true);\n }\n\n destroy(): Promise<void> {\n this.runs.clear();\n this.checkpoints.clear();\n this.approvals.clear();\n this.queue = [];\n this.locks.clear();\n\n if (this.config.debug) {\n console.log(\"[MemoryBackend] Destroyed\");\n }\n return Promise.resolve();\n }\n\n // =========================================================================\n // Development Helpers\n // =========================================================================\n\n /**\n * Get statistics about the backend (for debugging)\n */\n getStats(): {\n runs: number;\n checkpoints: number;\n approvals: number;\n queueLength: number;\n locks: number;\n } {\n let totalCheckpoints = 0;\n let totalApprovals = 0;\n\n for (const checkpoints of this.checkpoints.values()) {\n totalCheckpoints += checkpoints.length;\n }\n\n for (const approvals of this.approvals.values()) {\n totalApprovals += approvals.length;\n }\n\n return {\n runs: this.runs.size,\n checkpoints: totalCheckpoints,\n approvals: totalApprovals,\n queueLength: this.queue.length,\n locks: this.locks.size,\n };\n }\n\n /**\n * Clear all data (for testing)\n */\n clear(): Promise<void> {\n this.runs.clear();\n this.checkpoints.clear();\n this.approvals.clear();\n this.queue = [];\n this.locks.clear();\n return Promise.resolve();\n }\n}\n", "/**\n * Redis Workflow Backend\n *\n * Production-grade Redis implementation of WorkflowBackend.\n * Uses Redis hashes for state storage and Redis Streams for job queuing.\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\nimport { isDeno } from \"@veryfront/platform/compat/runtime.ts\";\n\n// Lazy-loaded Redis client modules (loaded only when Redis backend is used)\n// @ts-ignore - Deno global\nlet DenoRedis: any = null;\nlet NodeRedis: any = null;\n\n/**\n * Lazily load the Redis module for the current runtime.\n * This ensures the redis package is only required when the Redis backend is actually used.\n *\n * NOTE: We construct module names dynamically to prevent Deno's static analyzer\n * from pre-fetching these optional dependencies during lint/check tasks.\n */\nasync function getRedisModule(): Promise<{ DenoRedis: any; NodeRedis: any }> {\n // Return cached modules if already loaded\n if (DenoRedis || NodeRedis) {\n return { DenoRedis, NodeRedis };\n }\n\n if (isDeno) {\n try {\n // Construct URL dynamically to prevent static analysis from pre-fetching\n const denoRedisUrl = [\"https://deno.land/x/redis\", \"@v0.32.1/mod.ts\"].join(\"\");\n // @ts-ignore - Deno global\n DenoRedis = await import(denoRedisUrl);\n } catch (error) {\n throw new Error(\n `Failed to load Deno Redis module. Error: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n }\n } else {\n try {\n // Construct module name dynamically to prevent Deno static analyzer\n // from trying to resolve this npm package during lint/check\n const redisModuleName = [\"re\", \"dis\"].join(\"\");\n NodeRedis = await import(redisModuleName);\n } catch (error) {\n throw new Error(\n `Failed to load 'redis' package. Please install it with: npm install redis\\n` +\n `Error: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n return { DenoRedis, NodeRedis };\n}\n\n/**\n * Standardized Redis Adapter Interface\n * Normalizes differences between Deno and Node Redis clients\n */\nexport interface RedisAdapter {\n // Hash operations\n hset(key: string, fields: Record<string, string>): Promise<number | string>;\n hgetall(key: string): Promise<Record<string, string>>;\n hdel(key: string, ...fields: string[]): Promise<number>;\n del(...keys: string[]): Promise<number>;\n\n // Set operations (for indexing)\n sadd(key: string, ...members: string[]): Promise<number>;\n srem(key: string, ...members: string[]): Promise<number>;\n smembers(key: string): Promise<string[]>;\n\n // List operations (for checkpoints)\n rpush(key: string, ...values: string[]): Promise<number>;\n lrange(key: string, start: number, stop: number): Promise<string[]>;\n lindex(key: string, index: number): Promise<string | null>;\n lset(key: string, index: number, value: string): Promise<string | \"OK\">;\n llen(key: string): Promise<number>;\n\n // Stream operations\n xadd(key: string, id: string, fields: Record<string, string>): Promise<string>;\n xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string>;\n xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>>;\n xack(key: string, group: string, ...ids: string[]): Promise<number>;\n\n // Key operations\n keys(pattern: string): Promise<string[]>;\n exists(...keys: string[]): Promise<number>;\n expire(key: string, seconds: number): Promise<number>;\n\n // Lock operations (using SET with NX and PX)\n set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null>;\n get(key: string): Promise<string | null>;\n\n // Connection\n quit(): Promise<void>;\n disconnect(): Promise<void>;\n}\n\n// Helper to convert array [k1, v1, k2, v2] to object\nfunction arrayToObject(arr: string[]): Record<string, string> {\n const obj: Record<string, string> = {};\n for (let i = 0; i < arr.length; i += 2) {\n const key = arr[i];\n const value = arr[i + 1];\n if (key && value !== undefined) {\n obj[key] = value;\n }\n }\n return obj;\n}\n\n/**\n * Adapter for Node.js 'redis' package\n */\nclass NodeRedisAdapter implements RedisAdapter {\n constructor(private client: any) {}\n\n async hset(key: string, fields: Record<string, string>): Promise<number | string> {\n return await this.client.hSet(key, fields);\n }\n\n async hgetall(key: string): Promise<Record<string, string>> {\n return await this.client.hGetAll(key);\n }\n\n async hdel(key: string, ...fields: string[]): Promise<number> {\n return await this.client.hDel(key, fields);\n }\n\n async del(...keys: string[]): Promise<number> {\n return await this.client.del(keys);\n }\n\n async sadd(key: string, ...members: string[]): Promise<number> {\n return await this.client.sAdd(key, members);\n }\n\n async srem(key: string, ...members: string[]): Promise<number> {\n return await this.client.sRem(key, members);\n }\n\n async smembers(key: string): Promise<string[]> {\n return await this.client.sMembers(key);\n }\n\n async rpush(key: string, ...values: string[]): Promise<number> {\n return await this.client.rPush(key, values);\n }\n\n async lrange(key: string, start: number, stop: number): Promise<string[]> {\n return await this.client.lRange(key, start, stop);\n }\n\n async lindex(key: string, index: number): Promise<string | null> {\n return await this.client.lIndex(key, index);\n }\n\n async lset(key: string, index: number, value: string): Promise<string | \"OK\"> {\n return await this.client.lSet(key, index, value);\n }\n\n async llen(key: string): Promise<number> {\n return await this.client.lLen(key);\n }\n\n async xadd(key: string, id: string, fields: Record<string, string>): Promise<string> {\n return await this.client.xAdd(key, id, fields);\n }\n\n async xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string> {\n return await this.client.xGroupCreate(key, group, id, { MKSTREAM: mkstream });\n }\n\n async xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<\n Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>\n > {\n // Node redis format: { key: string, messages: Array<{ id: string, message: Record<string, string> }> }\n // OR if single stream: Array<{ id: string, message: Record<string, string> }> ??\n // The node-redis v4 API is slightly different.\n // Assuming commandOptions style:\n const result = await this.client.xReadGroup(\n options.group,\n options.consumer,\n streams.map((s) => ({ key: s.key, id: s.xid })),\n {\n BLOCK: options.block,\n COUNT: options.count,\n },\n );\n\n if (!result) return [];\n\n // Normalize output\n // node-redis v4 returns: Array<{ name: string, messages: Array<{ id: string, message: Record<string, string> }> }>\n return (result as any[]).map((stream: any) => ({\n key: stream.name,\n messages: stream.messages.map((msg: any) => ({\n id: msg.id,\n data: msg.message,\n })),\n }));\n }\n\n async xack(key: string, group: string, ...ids: string[]): Promise<number> {\n return await this.client.xAck(key, group, ids);\n }\n\n async keys(pattern: string): Promise<string[]> {\n return await this.client.keys(pattern);\n }\n\n async exists(...keys: string[]): Promise<number> {\n return await this.client.exists(keys);\n }\n\n async expire(key: string, seconds: number): Promise<number> {\n return await this.client.expire(key, seconds);\n }\n\n async set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null> {\n const opts: any = {};\n if (options?.nx) opts.NX = true;\n if (options?.px) opts.PX = options.px;\n if (options?.ex) opts.EX = options.ex;\n return await this.client.set(key, value, opts);\n }\n\n async get(key: string): Promise<string | null> {\n return await this.client.get(key);\n }\n\n async quit(): Promise<void> {\n await this.client.quit();\n }\n\n async disconnect(): Promise<void> {\n await this.client.disconnect();\n }\n}\n\n/**\n * Adapter for Deno 'redis' module\n */\nclass DenoRedisAdapter implements RedisAdapter {\n constructor(private client: any) {}\n\n async hset(key: string, fields: Record<string, string>): Promise<number | string> {\n return await this.client.hset(key, fields);\n }\n\n async hgetall(key: string): Promise<Record<string, string>> {\n const res = await this.client.hgetall(key);\n // Deno redis returns array [k1, v1, k2, v2]\n return arrayToObject(res);\n }\n\n async hdel(key: string, ...fields: string[]): Promise<number> {\n return await this.client.hdel(key, ...fields);\n }\n\n async del(...keys: string[]): Promise<number> {\n return await this.client.del(...keys);\n }\n\n async sadd(key: string, ...members: string[]): Promise<number> {\n return await this.client.sadd(key, ...members);\n }\n\n async srem(key: string, ...members: string[]): Promise<number> {\n return await this.client.srem(key, ...members);\n }\n\n async smembers(key: string): Promise<string[]> {\n return await this.client.smembers(key);\n }\n\n async rpush(key: string, ...values: string[]): Promise<number> {\n return await this.client.rpush(key, ...values);\n }\n\n async lrange(key: string, start: number, stop: number): Promise<string[]> {\n return await this.client.lrange(key, start, stop);\n }\n\n async lindex(key: string, index: number): Promise<string | null> {\n return await this.client.lindex(key, index);\n }\n\n async lset(key: string, index: number, value: string): Promise<string | \"OK\"> {\n return await this.client.lset(key, index, value);\n }\n\n async llen(key: string): Promise<number> {\n return await this.client.llen(key);\n }\n\n async xadd(key: string, id: string, fields: Record<string, string>): Promise<string> {\n return await this.client.xadd(key, id, fields);\n }\n\n async xgroupCreate(key: string, group: string, id: string, mkstream?: boolean): Promise<string> {\n return await this.client.xgroupCreate(key, group, id, mkstream);\n }\n\n async xreadgroup(\n streams: Array<{ key: string; xid: string }>,\n options: { group: string; consumer: string; block?: number; count?: number },\n ): Promise<\n Array<{ key: string; messages: Array<{ id: string; data: Record<string, string> }> }>\n > {\n if (streams.length === 0) return [];\n\n // Deno redis returns: Array<{ key: string, messages: Array<{ id: string, fieldValues: string[] }> }>\n const res = await this.client.xreadgroup(\n streams.map((s) => ({ key: s.key, xid: s.xid })),\n options,\n );\n\n if (!res) return [];\n\n return (res as any[]).map((stream: any) => ({\n key: stream.key,\n messages: stream.messages.map((msg: any) => ({\n id: msg.id,\n data: arrayToObject(msg.fieldValues),\n })),\n }));\n }\n\n async xack(key: string, group: string, ...ids: string[]): Promise<number> {\n return await this.client.xack(key, group, ...ids);\n }\n\n async keys(pattern: string): Promise<string[]> {\n return await this.client.keys(pattern);\n }\n\n async exists(...keys: string[]): Promise<number> {\n return await this.client.exists(...keys);\n }\n\n async expire(key: string, seconds: number): Promise<number> {\n return await this.client.expire(key, seconds);\n }\n\n async set(\n key: string,\n value: string,\n options?: { nx?: boolean; px?: number; ex?: number },\n ): Promise<string | null> {\n return await this.client.set(key, value, options);\n }\n\n async get(key: string): Promise<string | null> {\n return await this.client.get(key);\n }\n\n async quit(): Promise<void> {\n await this.client.close(); // Deno redis uses close\n }\n\n async disconnect(): Promise<void> {\n await this.client.close();\n }\n}\n\n/**\n * Redis backend configuration\n */\nexport interface RedisBackendConfig extends BackendConfig {\n /** Redis connection URL or config */\n url?: string;\n /** Redis hostname */\n hostname?: string;\n /** Redis port */\n port?: number;\n /** Key prefix for namespacing */\n prefix?: string;\n /** Stream name for job queue */\n streamKey?: string;\n /** Consumer group name */\n groupName?: string;\n /** Consumer name (unique per worker) */\n consumerName?: string;\n /** Default TTL for runs (in seconds) */\n runTtl?: number;\n /** Enable debug logging */\n debug?: boolean;\n /** Existing Redis client (optional) */\n client?: RedisAdapter;\n}\n\n/**\n * Redis Workflow Backend\n */\nexport class RedisBackend implements WorkflowBackend {\n private client: RedisAdapter | null = null;\n private connectionPromise: Promise<RedisAdapter> | null = null;\n private config:\n & Required<\n Pick<RedisBackendConfig, \"prefix\" | \"streamKey\" | \"groupName\" | \"consumerName\" | \"debug\">\n >\n & RedisBackendConfig;\n private initialized = false;\n\n constructor(config: RedisBackendConfig = {}) {\n this.config = {\n prefix: \"vf:workflow:\",\n streamKey: \"vf:workflow:stream\",\n groupName: \"vf:workflow:workers\",\n consumerName: `worker-${crypto.randomUUID().slice(0, 8)}`,\n debug: false,\n ...config,\n };\n\n // Use provided client if available\n if (config.client) {\n this.client = config.client;\n }\n }\n\n // =========================================================================\n // Key Generation\n // =========================================================================\n\n private runKey(runId: string): string {\n return `${this.config.prefix}run:${runId}`;\n }\n\n private checkpointsKey(runId: string): string {\n return `${this.config.prefix}checkpoints:${runId}`;\n }\n\n private approvalsKey(runId: string): string {\n return `${this.config.prefix}approvals:${runId}`;\n }\n\n private statusIndexKey(status: WorkflowStatus): string {\n return `${this.config.prefix}index:status:${status}`;\n }\n\n private workflowIndexKey(workflowId: string): string {\n return `${this.config.prefix}index:workflow:${workflowId}`;\n }\n\n private lockKey(runId: string): string {\n return `${this.config.prefix}lock:${runId}`;\n }\n\n // =========================================================================\n // Serialization\n // =========================================================================\n\n private serializeRun(run: WorkflowRun): Record<string, string> {\n return {\n id: run.id,\n workflowId: run.workflowId,\n version: run.version || \"\",\n status: run.status,\n input: JSON.stringify(run.input),\n output: run.output !== undefined ? JSON.stringify(run.output) : \"\",\n nodeStates: JSON.stringify(run.nodeStates),\n currentNodes: JSON.stringify(run.currentNodes),\n context: JSON.stringify(run.context),\n error: run.error ? JSON.stringify(run.error) : \"\",\n createdAt: run.createdAt.toISOString(),\n startedAt: run.startedAt?.toISOString() || \"\",\n completedAt: run.completedAt?.toISOString() || \"\",\n };\n }\n\n private deserializeRun(data: Record<string, string>): WorkflowRun {\n // Validate required fields\n if (!data.id) {\n throw new Error(\"Invalid workflow run data: missing 'id' field\");\n }\n if (!data.workflowId) {\n throw new Error(`Invalid workflow run data for run \"${data.id}\": missing 'workflowId' field`);\n }\n\n // Validate status is a known value\n const validStatuses: WorkflowStatus[] = [\n \"pending\",\n \"running\",\n \"completed\",\n \"failed\",\n \"cancelled\",\n \"waiting\",\n ];\n const status = data.status as WorkflowStatus;\n if (data.status && !validStatuses.includes(status)) {\n throw new Error(\n `Invalid workflow run data for run \"${data.id}\": unknown status \"${data.status}\". ` +\n `Expected one of: ${validStatuses.join(\", \")}`,\n );\n }\n\n // Safely parse JSON fields with error context\n const safeJsonParse = <T>(field: string, value: string | undefined, defaultValue: T): T => {\n if (!value) return defaultValue;\n try {\n return JSON.parse(value) as T;\n } catch (e) {\n throw new Error(\n `Invalid workflow run data for run \"${data.id}\": failed to parse '${field}' as JSON. ` +\n `Error: ${e instanceof Error ? e.message : String(e)}`,\n );\n }\n };\n\n return {\n id: data.id,\n workflowId: data.workflowId,\n version: data.version || undefined,\n status: status ?? \"pending\",\n input: safeJsonParse(\"input\", data.input, undefined),\n output: safeJsonParse(\"output\", data.output, undefined),\n nodeStates: safeJsonParse(\"nodeStates\", data.nodeStates, {}),\n currentNodes: safeJsonParse(\"currentNodes\", data.currentNodes, []),\n context: safeJsonParse(\"context\", data.context, { input: undefined }),\n checkpoints: [], // Loaded separately\n pendingApprovals: [], // Loaded separately\n error: safeJsonParse(\"error\", data.error, undefined),\n createdAt: data.createdAt ? new Date(data.createdAt) : new Date(),\n startedAt: data.startedAt ? new Date(data.startedAt) : undefined,\n completedAt: data.completedAt ? new Date(data.completedAt) : undefined,\n };\n }\n\n // =========================================================================\n // Connection Management\n // =========================================================================\n\n private ensureClient(): Promise<RedisAdapter> {\n // Return existing client if available\n if (this.client) {\n return Promise.resolve(this.client);\n }\n\n // Use existing connection promise to prevent race conditions\n // Multiple concurrent calls will share the same connection promise\n if (!this.connectionPromise) {\n this.connectionPromise = this.createConnection();\n }\n\n return this.connectionPromise;\n }\n\n /**\n * Create a new Redis connection\n */\n private async createConnection(): Promise<RedisAdapter> {\n // Lazily load the Redis module for the current runtime\n const { DenoRedis: denoRedis, NodeRedis: nodeRedis } = await getRedisModule();\n\n if (nodeRedis) {\n const client = nodeRedis.createClient({\n url: this.config.url,\n socket: {\n host: this.config.hostname,\n port: this.config.port,\n },\n });\n await client.connect();\n this.client = new NodeRedisAdapter(client);\n } else if (denoRedis) {\n const client = await denoRedis.connect({\n hostname: this.config.hostname,\n port: this.config.port,\n });\n this.client = new DenoRedisAdapter(client);\n } else {\n throw new Error(\"No Redis client available for this runtime.\");\n }\n\n const hostname = this.config.hostname || \"127.0.0.1\";\n const port = this.config.port || 6379;\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Connecting to ${hostname}:${port}`);\n }\n\n // Ensure client is not null for TS\n return this.client!;\n }\n\n async initialize(): Promise<void> {\n if (this.initialized) return;\n\n const client = await this.ensureClient();\n\n // Create consumer group for stream\n try {\n await client.xgroupCreate(\n this.config.streamKey,\n this.config.groupName,\n \"0\",\n true,\n );\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Created consumer group: ${this.config.groupName}`);\n }\n } catch (e) {\n const msg = String(e instanceof Error ? e.message : e);\n if (!msg.includes(\"BUSYGROUP\")) {\n logger.error(\"[RedisBackend] Error creating consumer group:\", e);\n }\n }\n\n this.initialized = true;\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n async createRun(run: WorkflowRun): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Creating run: ${run.id}`);\n }\n\n // Store run in hash\n await client.hset(this.runKey(run.id), this.serializeRun(run));\n\n // Add to indexes\n await client.sadd(this.statusIndexKey(run.status), run.id);\n await client.sadd(this.workflowIndexKey(run.workflowId), run.id);\n\n // Set TTL if configured\n if (this.config.runTtl) {\n await client.expire(this.runKey(run.id), this.config.runTtl);\n }\n }\n\n async getRun(runId: string): Promise<WorkflowRun | null> {\n const client = await this.ensureClient();\n const data = await client.hgetall(this.runKey(runId));\n\n if (!data || Object.keys(data).length === 0) {\n return null;\n }\n\n const run = this.deserializeRun(data);\n\n // Load approvals\n run.pendingApprovals = await this.getPendingApprovals(runId);\n\n return run;\n }\n\n async updateRun(runId: string, patch: Partial<WorkflowRun>): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Updating run: ${runId}`);\n }\n\n // Get current status for index update\n const currentRun = await this.getRun(runId);\n const oldStatus = currentRun?.status;\n\n // Build fields to update\n const fields: Record<string, string> = {};\n\n if (patch.status !== undefined) fields.status = patch.status;\n if (patch.output !== undefined) fields.output = JSON.stringify(patch.output);\n if (patch.nodeStates !== undefined) fields.nodeStates = JSON.stringify(patch.nodeStates);\n if (patch.currentNodes !== undefined) fields.currentNodes = JSON.stringify(patch.currentNodes);\n if (patch.context !== undefined) fields.context = JSON.stringify(patch.context);\n if (patch.error !== undefined) fields.error = JSON.stringify(patch.error);\n if (patch.startedAt !== undefined) fields.startedAt = patch.startedAt.toISOString();\n if (patch.completedAt !== undefined) fields.completedAt = patch.completedAt.toISOString();\n\n if (Object.keys(fields).length > 0) {\n await client.hset(this.runKey(runId), fields);\n }\n\n // Update status index\n if (patch.status && oldStatus && patch.status !== oldStatus) {\n await client.srem(this.statusIndexKey(oldStatus), runId);\n await client.sadd(this.statusIndexKey(patch.status), runId);\n }\n }\n\n async deleteRun(runId: string): Promise<void> {\n const client = await this.ensureClient();\n\n // Get run for index cleanup\n const run = await this.getRun(runId);\n if (!run) return;\n\n // Delete run data\n await client.del(\n this.runKey(runId),\n this.checkpointsKey(runId),\n this.approvalsKey(runId),\n );\n\n // Remove from indexes\n await client.srem(this.statusIndexKey(run.status), runId);\n await client.srem(this.workflowIndexKey(run.workflowId), runId);\n }\n\n async listRuns(filter: RunFilter): Promise<WorkflowRun[]> {\n const client = await this.ensureClient();\n let runIds: string[] = [];\n\n // Get run IDs from indexes\n if (filter.workflowId) {\n runIds = await client.smembers(this.workflowIndexKey(filter.workflowId));\n } else if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n for (const status of statuses) {\n const ids = await client.smembers(this.statusIndexKey(status));\n runIds.push(...ids);\n }\n // Deduplicate\n runIds = [...new Set(runIds)];\n } else {\n // Get all runs (expensive - should use cursor in production)\n const keys = await client.keys(`${this.config.prefix}run:*`);\n runIds = keys.map((k) => k.replace(`${this.config.prefix}run:`, \"\"));\n }\n\n // Load runs\n const runs: WorkflowRun[] = [];\n for (const runId of runIds) {\n const run = await this.getRun(runId);\n if (!run) continue;\n\n // Apply filters\n if (filter.status) {\n const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];\n if (!statuses.includes(run.status)) continue;\n }\n\n if (filter.createdAfter && run.createdAt < filter.createdAfter) continue;\n if (filter.createdBefore && run.createdAt > filter.createdBefore) continue;\n\n runs.push(run);\n }\n\n // Sort by creation date (newest first)\n runs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());\n\n // Apply pagination\n let result = runs;\n if (filter.offset) {\n result = result.slice(filter.offset);\n }\n if (filter.limit) {\n result = result.slice(0, filter.limit);\n }\n\n return result;\n }\n\n async countRuns(filter: RunFilter): Promise<number> {\n const runs = await this.listRuns({ ...filter, limit: undefined, offset: undefined });\n return runs.length;\n }\n\n // =========================================================================\n // Checkpointing\n // =========================================================================\n\n async saveCheckpoint(runId: string, checkpoint: Checkpoint): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Saving checkpoint: ${checkpoint.id}`);\n }\n\n const serialized = JSON.stringify({\n ...checkpoint,\n timestamp: checkpoint.timestamp.toISOString(),\n });\n\n await client.rpush(this.checkpointsKey(runId), serialized);\n }\n\n async getLatestCheckpoint(runId: string): Promise<Checkpoint | null> {\n const client = await this.ensureClient();\n\n // Get last element\n const raw = await client.lindex(this.checkpointsKey(runId), -1);\n if (!raw) return null;\n\n const data = JSON.parse(raw);\n return {\n ...data,\n timestamp: new Date(data.timestamp),\n };\n }\n\n async getCheckpoints(runId: string): Promise<Checkpoint[]> {\n const client = await this.ensureClient();\n\n const rawList = await client.lrange(this.checkpointsKey(runId), 0, -1);\n\n return rawList.map((raw) => {\n const data = JSON.parse(raw);\n return {\n ...data,\n timestamp: new Date(data.timestamp),\n };\n });\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n async savePendingApproval(runId: string, approval: PendingApproval): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Saving approval: ${approval.id}`);\n }\n\n const serialized = JSON.stringify({\n ...approval,\n requestedAt: approval.requestedAt.toISOString(),\n expiresAt: approval.expiresAt?.toISOString(),\n decidedAt: approval.decidedAt?.toISOString(),\n });\n\n await client.rpush(this.approvalsKey(runId), serialized);\n }\n\n async getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n const client = await this.ensureClient();\n\n const rawList = await client.lrange(this.approvalsKey(runId), 0, -1);\n\n return rawList\n .map((raw) => {\n const data = JSON.parse(raw);\n return {\n ...data,\n requestedAt: new Date(data.requestedAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n decidedAt: data.decidedAt ? new Date(data.decidedAt) : undefined,\n } as PendingApproval;\n })\n .filter((a) => a.status === \"pending\");\n }\n\n async getPendingApproval(runId: string, approvalId: string): Promise<PendingApproval | null> {\n const approvals = await this.getPendingApprovals(runId);\n return approvals.find((a) => a.id === approvalId) || null;\n }\n\n async updateApproval(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n const client = await this.ensureClient();\n const key = this.approvalsKey(runId);\n\n // Get all approvals to find the index\n const rawList = await client.lrange(key, 0, -1);\n\n // Find the index of the approval to update\n let targetIndex = -1;\n for (let i = 0; i < rawList.length; i++) {\n const data = JSON.parse(rawList[i]!);\n if (data.id === approvalId) {\n targetIndex = i;\n break;\n }\n }\n\n if (targetIndex === -1) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n // Parse and update the approval data\n const data = JSON.parse(rawList[targetIndex]!);\n data.status = decision.approved ? \"approved\" : \"rejected\";\n data.decidedBy = decision.approver;\n data.decidedAt = new Date().toISOString();\n data.comment = decision.comment;\n\n // Use LSET to atomically update the specific index\n // This is more atomic than del + rpush as it only modifies one element\n await client.lset(key, targetIndex, JSON.stringify(data));\n }\n\n async listPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n status?: \"pending\" | \"expired\";\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n const client = await this.ensureClient();\n const result: Array<{ runId: string; approval: PendingApproval }> = [];\n\n // Get all approval keys\n const keys = await client.keys(`${this.config.prefix}approvals:*`);\n\n for (const key of keys) {\n const runId = key.replace(`${this.config.prefix}approvals:`, \"\");\n\n // Check workflow filter\n if (filter?.workflowId) {\n const run = await this.getRun(runId);\n if (!run || run.workflowId !== filter.workflowId) continue;\n }\n\n const rawList = await client.lrange(key, 0, -1);\n\n for (const raw of rawList) {\n const data = JSON.parse(raw);\n const approval: PendingApproval = {\n ...data,\n requestedAt: new Date(data.requestedAt),\n expiresAt: data.expiresAt ? new Date(data.expiresAt) : undefined,\n decidedAt: data.decidedAt ? new Date(data.decidedAt) : undefined,\n };\n\n // Check status filter\n if (filter?.status === \"pending\" && approval.status !== \"pending\") continue;\n if (filter?.status === \"expired\") {\n const isExpired = approval.expiresAt && new Date() > approval.expiresAt;\n if (!isExpired) continue;\n }\n\n // Check approver filter\n if (\n filter?.approver && approval.approvers && !approval.approvers.includes(filter.approver)\n ) {\n continue;\n }\n\n result.push({ runId, approval });\n }\n }\n\n return result;\n }\n\n // =========================================================================\n // Queue Operations\n // =========================================================================\n\n async enqueue(job: WorkflowJob): Promise<void> {\n const client = await this.ensureClient();\n\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Enqueueing job: ${job.runId}`);\n }\n\n await client.xadd(this.config.streamKey, \"*\", {\n runId: job.runId,\n workflowId: job.workflowId,\n input: JSON.stringify(job.input),\n priority: String(job.priority || 0),\n createdAt: job.createdAt.toISOString(),\n });\n }\n\n async dequeue(): Promise<WorkflowJob | null> {\n const client = await this.ensureClient();\n\n const streams = await client.xreadgroup(\n [{ key: this.config.streamKey, xid: \">\" }],\n {\n group: this.config.groupName,\n consumer: this.config.consumerName,\n block: 5000, // 5 second timeout\n count: 1,\n },\n );\n\n if (!streams || streams.length === 0) {\n return null;\n }\n\n // Now streams is strongly typed due to Adapter\n const stream = streams[0];\n if (!stream || !stream.messages || stream.messages.length === 0) {\n return null;\n }\n\n const message = stream.messages[0];\n if (!message) {\n return null;\n }\n\n const data = message.data;\n\n return {\n runId: data.runId ?? \"\",\n workflowId: data.workflowId ?? \"\",\n input: data.input ? JSON.parse(data.input) : undefined,\n priority: data.priority ? parseInt(data.priority) : undefined,\n createdAt: data.createdAt ? new Date(data.createdAt) : new Date(),\n };\n }\n\n acknowledge(runId: string): Promise<void> {\n // Note: In a full implementation, we'd need to track the message ID\n // For now, this is a placeholder\n if (this.config.debug) {\n logger.debug(`[RedisBackend] Acknowledged: ${runId}`);\n }\n return Promise.resolve();\n }\n\n async nack(runId: string): Promise<void> {\n // Re-enqueue the job\n const run = await this.getRun(runId);\n if (run) {\n await this.enqueue({\n runId: run.id,\n workflowId: run.workflowId,\n input: run.input,\n createdAt: new Date(),\n });\n }\n }\n\n // =========================================================================\n // Distributed Locking\n // =========================================================================\n\n async acquireLock(runId: string, duration: number): Promise<boolean> {\n const client = await this.ensureClient();\n const lockValue = crypto.randomUUID();\n\n const result = await client.set(this.lockKey(runId), lockValue, {\n nx: true,\n px: duration,\n });\n\n return result === \"OK\";\n }\n\n async releaseLock(runId: string): Promise<void> {\n const client = await this.ensureClient();\n await client.del(this.lockKey(runId));\n }\n\n async extendLock(runId: string, duration: number): Promise<boolean> {\n const client = await this.ensureClient();\n const exists = await client.exists(this.lockKey(runId));\n\n if (exists === 0) return false;\n\n await client.expire(this.lockKey(runId), Math.ceil(duration / 1000));\n return true;\n }\n\n async isLocked(runId: string): Promise<boolean> {\n const client = await this.ensureClient();\n const exists = await client.exists(this.lockKey(runId));\n return exists > 0;\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n async healthCheck(): Promise<boolean> {\n try {\n const client = await this.ensureClient();\n await client.set(\"__health_check__\", \"ok\", { ex: 1 });\n return true;\n } catch {\n return false;\n }\n }\n\n destroy(): Promise<void> {\n if (this.client) {\n if (typeof this.client.quit === \"function\") {\n this.client.quit();\n } else if (typeof this.client.disconnect === \"function\") {\n this.client.disconnect();\n }\n this.client = null;\n }\n this.connectionPromise = null;\n this.initialized = false;\n\n if (this.config.debug) {\n logger.debug(\"[RedisBackend] Destroyed\");\n }\n return Promise.resolve();\n }\n}\n", "/**\n * DAG Executor\n *\n * Executes workflow DAGs with proper dependency ordering and parallel execution\n */\n\nimport type {\n BranchNodeConfig,\n Checkpoint,\n MapNodeConfig,\n NodeState,\n ParallelNodeConfig,\n SubWorkflowNodeConfig,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n WorkflowNodeConfig,\n WorkflowRun,\n} from \"../types.ts\";\nimport { generateId } from \"../types.ts\";\nimport type { StepExecutor } from \"./step-executor.ts\";\nimport type { CheckpointManager } from \"./checkpoint-manager.ts\";\n\n/**\n * DAG executor configuration\n */\nexport interface DAGExecutorConfig {\n /** Step executor for running individual steps */\n stepExecutor: StepExecutor;\n /** Checkpoint manager for durability */\n checkpointManager?: CheckpointManager;\n /** Maximum concurrent parallel executions */\n maxConcurrency?: number;\n /** Callback when node execution starts */\n onNodeStart?: (nodeId: string) => void;\n /** Callback when node execution completes */\n onNodeComplete?: (nodeId: string, state: NodeState) => void;\n /** Callback when waiting for approval/event */\n onWaiting?: (nodeId: string, waitConfig: WaitNodeConfig) => void;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Result of DAG execution\n */\nexport interface DAGExecutionResult {\n /** Whether the DAG completed successfully */\n completed: boolean;\n /** Whether the DAG is waiting (for approval/event) */\n waiting: boolean;\n /** Node that is waiting (if waiting) */\n waitingNode?: string;\n /** Final context after execution */\n context: WorkflowContext;\n /** Final node states */\n nodeStates: Record<string, NodeState>;\n /** Error if failed */\n error?: string;\n}\n\n/**\n * DAG Executor class\n *\n * Responsible for executing workflow DAGs with:\n * - Topological ordering for dependencies\n * - Parallel execution of independent nodes\n * - Support for branching and conditional logic\n * - Checkpointing for durability\n */\nexport class DAGExecutor {\n private config: DAGExecutorConfig;\n\n constructor(config: DAGExecutorConfig) {\n this.config = {\n maxConcurrency: 10,\n debug: false,\n ...config,\n };\n }\n\n /**\n * Execute a workflow DAG\n */\n async execute(\n nodes: WorkflowNode[],\n run: WorkflowRun,\n startFromNode?: string,\n ): Promise<DAGExecutionResult> {\n const context = { ...run.context };\n const nodeStates = { ...run.nodeStates };\n\n // Build dependency graph\n const { adjList, inDegree, nodeMap } = this.buildGraph(nodes);\n\n // Update in-degrees for nodes whose dependencies are already completed\n // This handles resuming from checkpoints\n for (const [nodeId, state] of Object.entries(nodeStates)) {\n if (state.status === \"completed\" || state.status === \"skipped\") {\n // Decrement in-degree for all dependents of this completed node\n for (const dependent of adjList.get(nodeId) || []) {\n const currentDegree = inDegree.get(dependent) ?? 0;\n if (currentDegree > 0) {\n inDegree.set(dependent, currentDegree - 1);\n }\n }\n }\n }\n\n // Validate DAG (no cycles)\n if (this.hasCycle(nodes, adjList)) {\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: \"Workflow DAG contains cycles\",\n };\n }\n\n // Find starting nodes\n let ready: string[];\n if (startFromNode) {\n // Resume from specific node\n ready = [startFromNode];\n } else {\n // Start from nodes with no dependencies that haven't been completed\n ready = this.getReadyNodes(inDegree, nodeStates);\n }\n\n // Execute nodes in topological order\n while (ready.length > 0) {\n // Execute ready nodes in parallel (respecting max concurrency)\n const batch = ready.slice(0, this.config.maxConcurrency);\n ready = ready.slice(this.config.maxConcurrency);\n\n const results = await Promise.allSettled(\n batch.map((nodeId) => this.executeNode(nodeMap.get(nodeId)!, context, nodeStates)),\n );\n\n // Process results\n for (let i = 0; i < batch.length; i++) {\n const nodeId = batch[i]!;\n const result = results[i]!;\n\n if (result.status === \"fulfilled\") {\n const nodeResult = result.value;\n\n // Update node state\n nodeStates[nodeId] = nodeResult.state;\n Object.assign(context, nodeResult.contextUpdates);\n\n // Handle waiting state\n if (nodeResult.waiting) {\n return {\n completed: false,\n waiting: true,\n waitingNode: nodeId,\n context,\n nodeStates,\n };\n }\n\n // Checkpoint if configured\n const nodeConfig = nodeMap.get(nodeId);\n if (\n nodeResult.state.status === \"completed\" &&\n nodeConfig && this.shouldCheckpoint(nodeConfig)\n ) {\n await this.checkpoint(run.id, nodeId, context, nodeStates);\n }\n\n // Check if node failed (step returned success: false)\n if (nodeResult.state.status === \"failed\") {\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: `Node \"${nodeId}\" failed: ${nodeResult.state.error || \"Unknown error\"}`,\n };\n }\n\n // Update ready nodes based on completed dependencies\n if (nodeResult.state.status === \"completed\" || nodeResult.state.status === \"skipped\") {\n for (const dependent of adjList.get(nodeId) || []) {\n const newDegree = inDegree.get(dependent)! - 1;\n inDegree.set(dependent, newDegree);\n }\n }\n } else {\n // Node execution failed\n const error = result.reason instanceof Error\n ? result.reason.message\n : String(result.reason);\n\n nodeStates[nodeId] = {\n nodeId,\n status: \"failed\",\n error,\n attempt: (nodeStates[nodeId]?.attempt || 0) + 1,\n completedAt: new Date(),\n };\n\n // Fail fast - don't continue with other nodes\n return {\n completed: false,\n waiting: false,\n context,\n nodeStates,\n error: `Node \"${nodeId}\" failed: ${error}`,\n };\n }\n }\n\n // Get newly ready nodes\n const newReady = this.getReadyNodes(inDegree, nodeStates);\n ready = [...ready, ...newReady];\n }\n\n return {\n completed: true,\n waiting: false,\n context,\n nodeStates,\n };\n }\n\n /**\n * Execute a single node\n */\n private async executeNode(\n node: WorkflowNode,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const nodeId = node.id;\n\n // Check if node is already completed (resuming from checkpoint)\n const existingState = nodeStates[nodeId];\n if (existingState?.status === \"completed\") {\n return { state: existingState, contextUpdates: {}, waiting: false };\n }\n\n this.config.onNodeStart?.(nodeId);\n\n // Check if should skip\n if (node.config.skip && (await node.config.skip(context))) {\n const state = this.config.stepExecutor.createSkippedState(nodeId);\n this.config.onNodeComplete?.(nodeId, state);\n return { state, contextUpdates: {}, waiting: false };\n }\n\n // Execute based on node type\n const config = node.config;\n\n switch (config.type) {\n case \"step\":\n return await this.executeStepNode(node, context);\n\n case \"parallel\":\n return await this.executeParallelNode(node, config, context, nodeStates);\n\n case \"map\":\n return await this.executeMapNode(node, config as MapNodeConfig, context, nodeStates);\n\n case \"branch\":\n return await this.executeBranchNode(node, config as BranchNodeConfig, context, nodeStates);\n\n case \"wait\":\n return await this.executeWaitNode(node, config as WaitNodeConfig, context);\n\n case \"subWorkflow\":\n return await this.executeSubWorkflowNode(\n node,\n config as SubWorkflowNodeConfig,\n context,\n nodeStates,\n );\n\n default:\n throw new Error(\n `Unknown node type \"${(config as WorkflowNodeConfig).type}\" for node \"${node.id}\". ` +\n `Valid types are: step, parallel, map, branch, wait, subWorkflow`,\n );\n }\n }\n\n /**\n * Execute a map node (dynamic fan-out)\n */\n private async executeMapNode(\n node: WorkflowNode,\n config: MapNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // 1. Resolve items collection\n const items = typeof config.items === \"function\" ? await config.items(context) : config.items;\n\n if (!Array.isArray(items)) {\n throw new Error(`Map node \"${node.id}\" items must be an array`);\n }\n\n if (items.length === 0) {\n // Empty collection, done immediately\n const state: NodeState = {\n nodeId: node.id,\n status: \"completed\",\n output: [],\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: new Date(),\n };\n return { state, contextUpdates: { [node.id]: [] }, waiting: false };\n }\n\n // 2. Generate child nodes for each item\n const childNodes: WorkflowNode[] = [];\n\n // Check if processor is a WorkflowDefinition or a single node\n const isWorkflowDef = (p: any): p is WorkflowDefinition => !!p.steps;\n\n // We'll map each item to a set of nodes\n // For simplicity in this implementation, if processor is a single node, we clone it.\n // If it's a workflow def, we'd need to expand it (similar to subworkflow).\n // Here we assume it's a single node structure for the \"map\" pattern or a simple chain.\n // To support complex subworkflows per item, best to wrap in a SubWorkflowNode.\n\n for (let i = 0; i < items.length; i++) {\n const item = items[i];\n const childId = `${node.id}_${i}`;\n\n let childNode: WorkflowNode;\n\n if (isWorkflowDef(config.processor)) {\n // Create a SubWorkflow node for this item\n childNode = {\n id: childId,\n config: {\n type: \"subWorkflow\",\n workflow: config.processor,\n input: item,\n retry: config.retry,\n checkpoint: false, // Don't checkpoint individual map items by default\n } as SubWorkflowNodeConfig,\n };\n } else {\n // Clone the single processor node\n // We must override the input to be the current item\n const processorConfig = { ...config.processor.config } as any;\n\n // If it's a step node, ensure input receives the item\n if (processorConfig.type === \"step\") {\n processorConfig.input = item;\n }\n\n childNode = {\n id: childId,\n config: processorConfig,\n };\n }\n\n childNodes.push(childNode);\n }\n\n // 3. Execute child nodes\n // We use a temporary DAG execution for these nodes\n // The maxConcurrency from config overrides default\n const originalConcurrency = this.config.maxConcurrency;\n if (config.concurrency) {\n this.config.maxConcurrency = config.concurrency;\n }\n\n try {\n const result = await this.execute(childNodes, {\n id: `${node.id}_map`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {}, // Start fresh for map iteration\n currentNodes: [],\n context: { ...context }, // Pass copy of context so they can read global state\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states into parent for visibility\n Object.assign(nodeStates, result.nodeStates);\n\n // Collect outputs in order\n const outputs = childNodes.map((child) => {\n const childState = result.nodeStates[child.id];\n return childState?.output;\n });\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: outputs,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.completed ? { [node.id]: outputs } : {},\n waiting: result.waiting,\n };\n } finally {\n // Restore concurrency setting\n this.config.maxConcurrency = originalConcurrency!;\n }\n }\n\n /**\n * Execute a sub-workflow node\n */\n private async executeSubWorkflowNode(\n node: WorkflowNode,\n config: SubWorkflowNodeConfig,\n context: WorkflowContext,\n _nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // 1. Resolve workflow definition\n let workflowDef: WorkflowDefinition;\n if (typeof config.workflow === \"string\") {\n throw new Error(\n \"Resolving workflow by ID is not yet supported in this execution context. Pass the WorkflowDefinition object.\",\n );\n } else {\n workflowDef = config.workflow;\n }\n\n // 2. Resolve input\n const input = typeof config.input === \"function\"\n ? await config.input(context)\n : (config.input ?? context.input);\n\n // 3. Expand steps (handle dynamic steps builder)\n let steps: WorkflowNode[];\n if (typeof workflowDef.steps === \"function\") {\n steps = workflowDef.steps({\n input,\n context,\n });\n } else {\n steps = workflowDef.steps;\n }\n\n // 4. Execute sub-workflow\n // We create a new isolated run context for the subworkflow\n const subRunId = `${node.id}_sub_${generateId()}`;\n\n // Execute recursively\n const result = await this.execute(steps, {\n id: subRunId,\n workflowId: workflowDef.id,\n status: \"running\",\n input,\n nodeStates: {},\n currentNodes: [],\n context: {\n input, // Subworkflow starts with fresh context scoped to its input\n // We do NOT inherit parent context to ensure isolation,\n // unless explicitly passed via input.\n },\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // 5. Process result\n let finalOutput = result.context; // Default output is the final context\n\n // If sub-workflow has explicit output transformation\n if (result.completed && config.output) {\n finalOutput = config.output(result.context) as any;\n }\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: finalOutput,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.completed ? { [node.id]: finalOutput } : {},\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a step node\n */\n private async executeStepNode(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const result = await this.config.stepExecutor.execute(node, context);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.success ? \"completed\" : \"failed\",\n input: context.input,\n output: result.output,\n error: result.error,\n attempt: 1,\n startedAt: new Date(Date.now() - result.executionTime),\n completedAt: new Date(),\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.success ? { [node.id]: result.output } : {},\n waiting: false,\n };\n }\n\n /**\n * Execute a parallel node\n */\n private async executeParallelNode(\n node: WorkflowNode,\n config: ParallelNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // Execute child nodes using DAG executor recursively\n const result = await this.execute(config.nodes, {\n id: `${node.id}_parallel`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {},\n currentNodes: [],\n context,\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states\n Object.assign(nodeStates, result.nodeStates);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: result.context,\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.context,\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a branch node\n */\n private async executeBranchNode(\n node: WorkflowNode,\n config: BranchNodeConfig,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n const startTime = Date.now();\n\n // Evaluate condition\n const conditionResult = await config.condition(context);\n\n // Select branch to execute\n const branchNodes = conditionResult ? config.then : (config.else || []);\n\n if (branchNodes.length === 0) {\n // No nodes to execute\n const state: NodeState = {\n nodeId: node.id,\n status: \"completed\",\n output: { branch: conditionResult ? \"then\" : \"else\", skipped: true },\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: new Date(),\n };\n\n return { state, contextUpdates: {}, waiting: false };\n }\n\n // Execute branch nodes\n const result = await this.execute(branchNodes, {\n id: `${node.id}_branch`,\n workflowId: \"\",\n status: \"running\",\n input: context.input,\n nodeStates: {},\n currentNodes: [],\n context,\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n });\n\n // Merge child node states\n Object.assign(nodeStates, result.nodeStates);\n\n const state: NodeState = {\n nodeId: node.id,\n status: result.completed ? \"completed\" : (result.waiting ? \"running\" : \"failed\"),\n output: {\n branch: conditionResult ? \"then\" : \"else\",\n result: result.context,\n },\n error: result.error,\n attempt: 1,\n startedAt: new Date(startTime),\n completedAt: result.completed ? new Date() : undefined,\n };\n\n this.config.onNodeComplete?.(node.id, state);\n\n return {\n state,\n contextUpdates: result.context,\n waiting: result.waiting,\n };\n }\n\n /**\n * Execute a wait node (approval or event)\n */\n private async executeWaitNode(\n node: WorkflowNode,\n config: WaitNodeConfig,\n context: WorkflowContext,\n ): Promise<{\n state: NodeState;\n contextUpdates: Record<string, unknown>;\n waiting: boolean;\n }> {\n // Notify that we're waiting\n this.config.onWaiting?.(node.id, config);\n\n const state: NodeState = {\n nodeId: node.id,\n status: \"running\",\n input: {\n type: config.waitType,\n message: config.message,\n payload: typeof config.payload === \"function\"\n ? await config.payload(context)\n : config.payload,\n },\n attempt: 1,\n startedAt: new Date(),\n };\n\n // Signal that workflow is now waiting\n return {\n state,\n contextUpdates: {},\n waiting: true,\n };\n }\n\n /**\n * Build dependency graph from nodes\n */\n private buildGraph(nodes: WorkflowNode[]): {\n adjList: Map<string, string[]>;\n inDegree: Map<string, number>;\n nodeMap: Map<string, WorkflowNode>;\n } {\n const adjList = new Map<string, string[]>();\n const inDegree = new Map<string, number>();\n const nodeMap = new Map<string, WorkflowNode>();\n\n // Initialize\n for (const node of nodes) {\n adjList.set(node.id, []);\n inDegree.set(node.id, 0);\n nodeMap.set(node.id, node);\n }\n\n // Build edges from dependencies\n for (const node of nodes) {\n for (const dep of node.dependsOn || []) {\n if (!adjList.has(dep)) {\n throw new Error(\n `Node \"${node.id}\" depends on unknown node \"${dep}\"`,\n );\n }\n adjList.get(dep)!.push(node.id);\n inDegree.set(node.id, inDegree.get(node.id)! + 1);\n }\n }\n\n // Also handle implicit sequential dependencies (nodes without explicit deps)\n // If no dependencies specified (undefined), assume sequential order\n // If dependsOn is explicitly set (even to []), respect that choice\n let prevNodeId: string | null = null;\n for (const node of nodes) {\n // Only add implicit deps if:\n // 1. dependsOn is undefined (not explicitly set)\n // 2. No other node explicitly depends on this node\n // 3. This node has no incoming edges yet\n if (node.dependsOn === undefined && prevNodeId) {\n const isDependent = this.hasAnyDependents(nodes, node.id);\n const currentInDegree = inDegree.get(node.id) ?? 0;\n\n if (!isDependent && currentInDegree === 0) {\n // This node is \"floating\" - no explicit deps and nothing depends on it\n // Create implicit dependency on previous node\n adjList.get(prevNodeId)!.push(node.id);\n inDegree.set(node.id, inDegree.get(node.id)! + 1);\n }\n }\n prevNodeId = node.id;\n }\n\n return { adjList, inDegree, nodeMap };\n }\n\n /**\n * Check if any node explicitly depends on the given node\n */\n private hasAnyDependents(nodes: WorkflowNode[], nodeId: string): boolean {\n return nodes.some((n) => n.dependsOn?.includes(nodeId));\n }\n\n /**\n * Get nodes that are ready to execute\n */\n private getReadyNodes(\n inDegree: Map<string, number>,\n nodeStates: Record<string, NodeState>,\n ): string[] {\n const ready: string[] = [];\n\n for (const [nodeId, degree] of inDegree) {\n // Node is ready if:\n // 1. No remaining dependencies (in-degree = 0)\n // 2. Not already completed/running/failed\n const state = nodeStates[nodeId];\n const isReady = degree === 0 &&\n (!state || state.status === \"pending\");\n\n if (isReady) {\n ready.push(nodeId);\n }\n }\n\n return ready;\n }\n\n /**\n * Check if DAG has cycles (using DFS)\n */\n private hasCycle(\n nodes: WorkflowNode[],\n adjList: Map<string, string[]>,\n ): boolean {\n const visited = new Set<string>();\n const recursionStack = new Set<string>();\n\n const dfs = (nodeId: string): boolean => {\n visited.add(nodeId);\n recursionStack.add(nodeId);\n\n for (const neighbor of adjList.get(nodeId) || []) {\n if (!visited.has(neighbor)) {\n if (dfs(neighbor)) return true;\n } else if (recursionStack.has(neighbor)) {\n return true;\n }\n }\n\n recursionStack.delete(nodeId);\n return false;\n };\n\n for (const node of nodes) {\n if (!visited.has(node.id)) {\n if (dfs(node.id)) return true;\n }\n }\n\n return false;\n }\n\n /**\n * Check if node should be checkpointed\n */\n private shouldCheckpoint(node: WorkflowNode): boolean {\n return node.config.checkpoint ?? false;\n }\n\n /**\n * Create a checkpoint\n */\n private async checkpoint(\n runId: string,\n nodeId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n if (!this.config.checkpointManager) {\n return;\n }\n\n const checkpoint: Checkpoint = {\n id: generateId(\"cp\"),\n nodeId,\n timestamp: new Date(),\n context: structuredClone(context),\n nodeStates: structuredClone(nodeStates),\n };\n\n await this.config.checkpointManager.save(runId, checkpoint);\n }\n}\n", "/**\n * Checkpoint Manager\n *\n * Handles workflow state checkpointing for durability and resume\n */\n\nimport type { Checkpoint, NodeState, WorkflowContext, WorkflowNode } from \"../types.ts\";\nimport { generateId } from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\n\n/**\n * Checkpoint manager configuration\n */\nexport interface CheckpointManagerConfig {\n /** Backend for persisting checkpoints */\n backend: WorkflowBackend;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Resume information returned when resuming from checkpoint\n */\nexport interface ResumeInfo {\n /** Checkpoint to resume from */\n checkpoint: Checkpoint;\n /** Node to start execution from */\n startFromNode: string;\n /** Restored context */\n context: WorkflowContext;\n /** Restored node states */\n nodeStates: Record<string, NodeState>;\n}\n\n/**\n * Checkpoint Manager class\n *\n * Responsible for:\n * - Saving checkpoints after step completion\n * - Loading checkpoints for resume\n * - Determining resume points\n */\nexport class CheckpointManager {\n private config: CheckpointManagerConfig;\n\n constructor(config: CheckpointManagerConfig) {\n this.config = {\n debug: false,\n ...config,\n };\n }\n\n /**\n * Save a checkpoint for a workflow run\n */\n async save(runId: string, checkpoint: Checkpoint): Promise<void> {\n if (this.config.debug) {\n console.log(`[CheckpointManager] Saving checkpoint ${checkpoint.id} for run ${runId}`);\n }\n\n await this.config.backend.saveCheckpoint(runId, checkpoint);\n }\n\n /**\n * Create and save a checkpoint\n */\n async createCheckpoint(\n runId: string,\n nodeId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<Checkpoint> {\n const checkpoint: Checkpoint = {\n id: generateId(\"cp\"),\n nodeId,\n timestamp: new Date(),\n context: structuredClone(context),\n nodeStates: structuredClone(nodeStates),\n };\n\n await this.save(runId, checkpoint);\n\n return checkpoint;\n }\n\n /**\n * Get the latest checkpoint for a workflow run\n */\n async getLatest(runId: string): Promise<Checkpoint | null> {\n return await this.config.backend.getLatestCheckpoint(runId);\n }\n\n /**\n * Get all checkpoints for a workflow run\n */\n async getAll(runId: string): Promise<Checkpoint[]> {\n if (this.config.backend.getCheckpoints) {\n return await this.config.backend.getCheckpoints(runId);\n }\n\n // Fallback: just return latest if getCheckpoints not implemented\n const latest = await this.getLatest(runId);\n return latest ? [latest] : [];\n }\n\n /**\n * Prepare resume information from a checkpoint\n */\n async prepareResume(\n runId: string,\n nodes: WorkflowNode[],\n fromCheckpoint?: string,\n ): Promise<ResumeInfo | null> {\n let checkpoint: Checkpoint | null;\n\n if (fromCheckpoint) {\n // Find specific checkpoint\n const all = await this.getAll(runId);\n checkpoint = all.find((c) => c.id === fromCheckpoint) || null;\n } else {\n // Use latest checkpoint\n checkpoint = await this.getLatest(runId);\n }\n\n if (!checkpoint) {\n return null;\n }\n\n // Find next node to execute after checkpoint\n const startFromNode = this.findNextNode(nodes, checkpoint);\n\n if (!startFromNode) {\n // No more nodes to execute\n return null;\n }\n\n return {\n checkpoint,\n startFromNode,\n context: structuredClone(checkpoint.context),\n nodeStates: structuredClone(checkpoint.nodeStates),\n };\n }\n\n /**\n * Find the next node to execute after a checkpoint\n */\n private findNextNode(\n nodes: WorkflowNode[],\n checkpoint: Checkpoint,\n ): string | null {\n const completedNodeId = checkpoint.nodeId;\n const nodeStates = checkpoint.nodeStates;\n\n // Build node lookup\n const nodeIndex = new Map<string, number>();\n nodes.forEach((node, index) => nodeIndex.set(node.id, index));\n\n // Find the checkpoint node's position\n const checkpointIndex = nodeIndex.get(completedNodeId);\n if (checkpointIndex === undefined) {\n // Checkpoint node not found, start from beginning\n const firstNode = nodes[0];\n return firstNode?.id ?? null;\n }\n\n // Look for the first incomplete node after the checkpoint\n for (let i = checkpointIndex + 1; i < nodes.length; i++) {\n const node = nodes[i];\n if (!node) continue;\n\n const state = nodeStates[node.id];\n\n // Find first node that hasn't completed\n if (!state || state.status === \"pending\") {\n return node.id;\n }\n }\n\n // Also check nodes that depend on the checkpoint node\n for (const node of nodes) {\n if (node.dependsOn?.includes(completedNodeId)) {\n const state = nodeStates[node.id];\n if (!state || state.status === \"pending\") {\n return node.id;\n }\n }\n }\n\n // No incomplete nodes found\n return null;\n }\n\n /**\n * Determine if a node should be checkpointed\n */\n shouldCheckpoint(node: WorkflowNode): boolean {\n const config = node.config;\n\n // Explicit checkpoint configuration\n if (config.checkpoint !== undefined) {\n return config.checkpoint;\n }\n\n // Default checkpointing rules:\n // - Always checkpoint after agent steps\n // - Always checkpoint before wait/approval\n // - Checkpoint after parallel completion\n switch (config.type) {\n case \"step\":\n // Checkpoint agent steps, but not tool steps by default\n return \"agent\" in config && !!config.agent;\n\n case \"wait\":\n // Always checkpoint before waiting\n return true;\n\n case \"parallel\":\n // Checkpoint after all parallel steps complete\n return true;\n\n case \"branch\":\n // Don't checkpoint branches by default\n return false;\n\n case \"subWorkflow\":\n // Always checkpoint after sub-workflow\n return true;\n\n default:\n return false;\n }\n }\n\n /**\n * Clean up old checkpoints (keep only the most recent N)\n */\n async cleanup(runId: string, keepCount: number = 5): Promise<void> {\n const all = await this.getAll(runId);\n\n if (all.length <= keepCount) {\n return;\n }\n\n // Sort by timestamp (newest first)\n all.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());\n\n // Get checkpoints to delete (all except the newest keepCount)\n const toDelete = all.slice(keepCount);\n const idsToDelete = toDelete.map((c) => c.id);\n\n if (idsToDelete.length === 0) {\n return;\n }\n\n if (this.config.debug) {\n console.log(\n `[CheckpointManager] Cleaning up ${idsToDelete.length} old checkpoints for run ${runId}`,\n );\n }\n\n // Use batch delete if available, otherwise delete one by one\n if (this.config.backend.deleteCheckpoints) {\n await this.config.backend.deleteCheckpoints(runId, idsToDelete);\n } else if (this.config.backend.deleteCheckpoint) {\n for (const id of idsToDelete) {\n await this.config.backend.deleteCheckpoint(runId, id);\n }\n }\n // If neither method is available, cleanup is a no-op\n }\n}\n", "/**\n * Step Executor\n *\n * Executes individual workflow steps (agents and tools)\n */\n\nimport type { Agent, AgentResponse } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type { NodeState, StepNodeConfig, WorkflowContext, WorkflowNode } from \"../types.ts\";\nimport { parseDuration } from \"../types.ts\";\nimport type { BlobStorage } from \"../blob/types.ts\";\n\n/** Default timeout for workflow steps (5 minutes) */\nconst DEFAULT_STEP_TIMEOUT_MS = 5 * 60 * 1000;\n\n/**\n * Agent registry for looking up agents by ID\n */\nexport interface AgentRegistry {\n get(id: string): Agent | undefined;\n /** Optional: List all registered agent IDs (for error messages) */\n list?(): string[];\n}\n\n/**\n * Tool registry for looking up tools by ID\n */\nexport interface ToolRegistry {\n get(id: string): Tool | undefined;\n /** Optional: List all registered tool IDs (for error messages) */\n list?(): string[];\n}\n\n/**\n * Step executor configuration\n */\nexport interface StepExecutorConfig {\n /** Agent registry for looking up agents */\n agentRegistry?: AgentRegistry;\n /** Tool registry for looking up tools */\n toolRegistry?: ToolRegistry;\n /** Default timeout for steps (in milliseconds) */\n defaultTimeout?: number;\n /** Blob storage access */\n blobStorage?: BlobStorage;\n /** Callback when step starts */\n onStepStart?: (nodeId: string, input: unknown) => void;\n /** Callback when step completes */\n onStepComplete?: (nodeId: string, output: unknown) => void;\n /** Callback when step fails */\n onStepError?: (nodeId: string, error: Error) => void;\n}\n\n/**\n * Result of executing a step\n */\nexport interface StepResult {\n /** Whether the step succeeded */\n success: boolean;\n /** Output from the step (if successful) */\n output?: unknown;\n /** Error message (if failed) */\n error?: string;\n /** Execution time in milliseconds */\n executionTime: number;\n}\n\n/**\n * Step Executor class\n *\n * Responsible for executing individual workflow steps by invoking\n * the appropriate agent or tool.\n */\nexport class StepExecutor {\n private config: StepExecutorConfig;\n\n constructor(config: StepExecutorConfig = {}) {\n this.config = {\n defaultTimeout: DEFAULT_STEP_TIMEOUT_MS,\n ...config,\n };\n }\n\n /**\n * Execute a step node\n */\n async execute(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<StepResult> {\n const startTime = Date.now();\n const config = node.config as StepNodeConfig;\n\n if (config.type !== \"step\") {\n throw new Error(\n `StepExecutor can only execute 'step' nodes, but node \"${node.id}\" has type '${config.type}'. ` +\n `This is likely a bug in the DAG executor routing.`,\n );\n }\n\n try {\n // Notify start\n const resolvedInput = await this.resolveInput(config.input, context);\n this.config.onStepStart?.(node.id, resolvedInput);\n\n // Execute with timeout\n const timeout = config.timeout ? parseDuration(config.timeout) : this.config.defaultTimeout!;\n\n const output = await this.executeWithTimeout(\n () => this.executeStep(config, resolvedInput, context),\n timeout,\n node.id,\n );\n\n // Notify completion\n this.config.onStepComplete?.(node.id, output);\n\n return {\n success: true,\n output,\n executionTime: Date.now() - startTime,\n };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n\n // Notify error\n this.config.onStepError?.(node.id, error as Error);\n\n return {\n success: false,\n error: errorMessage,\n executionTime: Date.now() - startTime,\n };\n }\n }\n\n /**\n * Resolve step input from context\n */\n private async resolveInput(\n input: StepNodeConfig[\"input\"],\n context: WorkflowContext,\n ): Promise<unknown> {\n if (input === undefined) {\n // Default to the original workflow input\n return context.input;\n }\n\n if (typeof input === \"function\") {\n return await input(context);\n }\n\n return input;\n }\n\n /**\n * Execute step with timeout\n *\n * Uses Promise.race() to properly handle timeout cleanup.\n * The timeout is always cleared in the finally block to prevent memory leaks.\n */\n private async executeWithTimeout<T>(\n fn: () => Promise<T>,\n timeout: number,\n nodeId: string,\n ): Promise<T> {\n let timeoutId: ReturnType<typeof setTimeout> | undefined;\n\n const timeoutPromise = new Promise<never>((_, reject) => {\n timeoutId = setTimeout(() => {\n reject(new Error(`Step \"${nodeId}\" timed out after ${timeout}ms`));\n }, timeout);\n });\n\n try {\n return await Promise.race([fn(), timeoutPromise]);\n } finally {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n }\n }\n\n /**\n * Execute the actual step (agent or tool)\n */\n private async executeStep(\n config: StepNodeConfig,\n input: unknown,\n context: WorkflowContext,\n ): Promise<unknown> {\n if (config.agent) {\n return await this.executeAgent(config.agent, input, context);\n }\n\n if (config.tool) {\n return await this.executeTool(config.tool, input);\n }\n\n throw new Error(\"Step must have either 'agent' or 'tool' specified\");\n }\n\n /**\n * Execute an agent\n */\n private async executeAgent(\n agent: string | Agent,\n input: unknown,\n context: WorkflowContext,\n ): Promise<unknown> {\n // Resolve agent from registry if string\n const resolvedAgent = typeof agent === \"string\" ? this.getAgent(agent) : agent;\n\n // Prepare input for agent\n const agentInput = typeof input === \"string\" ? input : JSON.stringify(input);\n\n // Execute agent\n const response: AgentResponse = await resolvedAgent.generate({\n input: agentInput,\n context,\n });\n\n // Return the agent's response\n return {\n text: response.text,\n toolCalls: response.toolCalls,\n status: response.status,\n usage: response.usage,\n };\n }\n\n /**\n * Execute a tool\n */\n private async executeTool(\n tool: string | Tool,\n input: unknown,\n ): Promise<unknown> {\n // Resolve tool from registry if string\n const resolvedTool = typeof tool === \"string\" ? this.getTool(tool) : tool;\n\n // Execute tool\n const result = await resolvedTool.execute(\n input as Record<string, unknown>,\n {\n agentId: \"workflow\",\n blobStorage: this.config.blobStorage,\n },\n );\n\n return result;\n }\n\n /**\n * Get agent from registry\n */\n private getAgent(id: string): Agent {\n if (!this.config.agentRegistry) {\n throw new Error(\n `Agent registry not configured. Cannot resolve agent \"${id}\"`,\n );\n }\n\n const agent = this.config.agentRegistry.get(id);\n if (!agent) {\n const available = this.config.agentRegistry.list?.() ?? [];\n const suggestion = available.length > 0\n ? ` Available agents: ${available.slice(0, 5).join(\", \")}${\n available.length > 5 ? \"...\" : \"\"\n }`\n : \" No agents are registered.\";\n throw new Error(`Agent not found: \"${id}\".${suggestion}`);\n }\n\n return agent;\n }\n\n /**\n * Get tool from registry\n */\n private getTool(id: string): Tool {\n if (!this.config.toolRegistry) {\n throw new Error(\n `Tool registry not configured. Cannot resolve tool \"${id}\"`,\n );\n }\n\n const tool = this.config.toolRegistry.get(id);\n if (!tool) {\n const available = this.config.toolRegistry.list?.() ?? [];\n const suggestion = available.length > 0\n ? ` Available tools: ${available.slice(0, 5).join(\", \")}${\n available.length > 5 ? \"...\" : \"\"\n }`\n : \" No tools are registered.\";\n throw new Error(`Tool not found: \"${id}\".${suggestion}`);\n }\n\n return tool;\n }\n\n /**\n * Check if a step should be skipped\n */\n async shouldSkip(\n node: WorkflowNode,\n context: WorkflowContext,\n ): Promise<boolean> {\n const config = node.config;\n\n if (!config.skip) {\n return false;\n }\n\n return await config.skip(context);\n }\n\n /**\n * Create initial node state\n */\n createInitialState(nodeId: string): NodeState {\n return {\n nodeId,\n status: \"pending\",\n attempt: 0,\n };\n }\n\n /**\n * Update node state for running\n */\n createRunningState(nodeId: string, input: unknown, attempt: number): NodeState {\n return {\n nodeId,\n status: \"running\",\n input,\n attempt,\n startedAt: new Date(),\n };\n }\n\n /**\n * Update node state for completion\n *\n * @param result - The step execution result\n * @param previousState - The previous node state (contains nodeId)\n */\n createCompletedState(\n result: StepResult,\n previousState: NodeState,\n ): NodeState {\n if (result.success) {\n return {\n ...previousState,\n status: \"completed\",\n output: result.output,\n completedAt: new Date(),\n };\n }\n\n return {\n ...previousState,\n status: \"failed\",\n error: result.error,\n completedAt: new Date(),\n };\n }\n\n /**\n * Update node state for skip\n */\n createSkippedState(nodeId: string): NodeState {\n return {\n nodeId,\n status: \"skipped\",\n attempt: 0,\n completedAt: new Date(),\n };\n }\n}\n", "/**\n * Workflow Executor\n *\n * Main orchestrator for executing durable workflows\n */\n\nimport type {\n BlobResolver,\n NodeState,\n StepBuilderContext,\n WorkflowContext,\n WorkflowDefinition,\n WorkflowNode,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport { generateId, parseDuration } from \"../types.ts\";\nimport { hasLockSupport, type WorkflowBackend } from \"../backends/types.ts\";\nimport { DAGExecutor } from \"./dag-executor.ts\";\nimport { CheckpointManager } from \"./checkpoint-manager.ts\";\nimport { StepExecutor, type StepExecutorConfig } from \"./step-executor.ts\";\nimport type { BlobStorage } from \"../blob/types.ts\";\n\n/**\n * Workflow executor configuration\n */\nexport interface WorkflowExecutorConfig {\n /** Backend for persistence */\n backend: WorkflowBackend;\n /** Blob storage for large data */\n blobStorage?: BlobStorage;\n /** Step executor configuration */\n stepExecutor?: StepExecutorConfig;\n /** Maximum concurrent parallel executions */\n maxConcurrency?: number;\n /** Enable debug logging */\n debug?: boolean;\n /** Lock duration in milliseconds for distributed execution (default: 30000) */\n lockDuration?: number;\n /** Enable distributed locking (default: true if backend supports it) */\n enableLocking?: boolean;\n /** Callback when workflow starts */\n onStart?: (run: WorkflowRun) => void;\n /** Callback when workflow completes */\n onComplete?: (run: WorkflowRun) => void;\n /** Callback when workflow fails */\n onError?: (run: WorkflowRun, error: Error) => void;\n /** Callback when workflow is waiting */\n onWaiting?: (run: WorkflowRun, nodeId: string) => void;\n}\n\n/**\n * Handle for a running workflow\n */\nexport interface WorkflowHandle<TOutput = unknown> {\n /** Run ID */\n runId: string;\n /** Get current status */\n status(): Promise<WorkflowRun>;\n /** Wait for completion and get result */\n result(): Promise<TOutput>;\n /** Cancel the workflow */\n cancel(): Promise<void>;\n}\n\n/**\n * Workflow Executor class\n *\n * Main entry point for executing workflows. Handles:\n * - Starting new workflow runs\n * - Resuming from checkpoints\n * - Coordinating DAG execution\n * - Managing workflow lifecycle\n */\nexport class WorkflowExecutor {\n private config: WorkflowExecutorConfig;\n private stepExecutor: StepExecutor;\n private checkpointManager: CheckpointManager;\n private dagExecutor: DAGExecutor;\n private workflows = new Map<string, WorkflowDefinition<any, any>>();\n private blobResolver?: BlobResolver;\n\n /** Default lock duration: 30 seconds */\n private static readonly DEFAULT_LOCK_DURATION = 30000;\n\n constructor(config: WorkflowExecutorConfig) {\n this.config = {\n maxConcurrency: 10,\n debug: false,\n lockDuration: WorkflowExecutor.DEFAULT_LOCK_DURATION,\n ...config,\n };\n\n // Initialize components\n this.stepExecutor = new StepExecutor({\n ...this.config.stepExecutor,\n blobStorage: this.config.blobStorage,\n });\n\n this.checkpointManager = new CheckpointManager({\n backend: this.config.backend,\n debug: this.config.debug,\n });\n\n this.dagExecutor = new DAGExecutor({\n stepExecutor: this.stepExecutor,\n checkpointManager: this.checkpointManager,\n maxConcurrency: this.config.maxConcurrency,\n debug: this.config.debug,\n // onWaiting is intentionally a no-op here - waiting state is handled\n // by executeAsync() after DAG execution returns with waiting: true\n onWaiting: () => {},\n });\n\n if (this.config.blobStorage) {\n const bs = this.config.blobStorage;\n this.blobResolver = {\n getText: (ref) => ref.__kind === \"blob\" ? bs.getText(ref.id) : Promise.resolve(null),\n getBytes: (ref) => ref.__kind === \"blob\" ? bs.getBytes(ref.id) : Promise.resolve(null),\n getStream: (ref) => ref.__kind === \"blob\" ? bs.getStream(ref.id) : Promise.resolve(null),\n stat: (ref) => ref.__kind === \"blob\" ? bs.stat(ref.id) : Promise.resolve(null),\n delete: (ref) => ref.__kind === \"blob\" ? bs.delete(ref.id) : Promise.resolve(undefined),\n };\n }\n }\n\n /**\n * Register a workflow definition\n */\n register<TInput, TOutput>(workflow: WorkflowDefinition<TInput, TOutput>): void {\n this.workflows.set(workflow.id, workflow);\n }\n\n /**\n * Get a registered workflow\n */\n getWorkflow(id: string): WorkflowDefinition<any, any> | undefined {\n return this.workflows.get(id);\n }\n\n /**\n * Start a new workflow run\n */\n async start<TInput, TOutput>(\n workflowId: string,\n input: TInput,\n options?: { runId?: string },\n ): Promise<WorkflowHandle<TOutput>> {\n const workflow = this.workflows.get(workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${workflowId}`);\n }\n\n // Validate input if schema provided\n if (workflow.inputSchema) {\n workflow.inputSchema.parse(input);\n }\n\n // Create run\n const run: WorkflowRun<TInput, TOutput> = {\n id: options?.runId || generateId(\"run\"),\n workflowId,\n version: workflow.version,\n status: \"pending\",\n input,\n nodeStates: {},\n currentNodes: [],\n context: { input },\n checkpoints: [],\n pendingApprovals: [],\n createdAt: new Date(),\n };\n\n // Persist run\n await this.config.backend.createRun(run);\n\n // Start execution asynchronously\n this.executeAsync(run.id).catch((error) => {\n console.error(`Workflow ${run.id} failed:`, error);\n });\n\n return this.createHandle<TOutput>(run.id);\n }\n\n /**\n * Resume a paused/waiting workflow\n */\n async resume(runId: string, fromCheckpoint?: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status !== \"waiting\" && run.status !== \"pending\") {\n throw new Error(\n `Cannot resume workflow run \"${runId}\": current status is \"${run.status}\". ` +\n `Only runs in \"waiting\" or \"pending\" status can be resumed.`,\n );\n }\n\n // Get workflow definition\n const workflow = this.workflows.get(run.workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${run.workflowId}`);\n }\n\n // Get nodes\n const nodes = this.resolveNodes(workflow, run.context);\n\n // Get resume point\n const resumeInfo = await this.checkpointManager.prepareResume(\n runId,\n nodes,\n fromCheckpoint,\n );\n\n // If an explicit checkpoint was requested but not found, throw error\n if (fromCheckpoint && !resumeInfo) {\n throw new Error(\n `Checkpoint \"${fromCheckpoint}\" not found for run \"${runId}\". ` +\n `Cannot resume from non-existent checkpoint.`,\n );\n }\n\n if (resumeInfo) {\n // Update run state from checkpoint\n await this.config.backend.updateRun(runId, {\n status: \"running\",\n context: resumeInfo.context,\n nodeStates: resumeInfo.nodeStates,\n });\n }\n\n // Resume execution\n await this.executeAsync(runId, resumeInfo?.startFromNode);\n }\n\n /**\n * Execute a workflow run asynchronously\n *\n * Uses distributed locking (when backend supports it) to prevent\n * concurrent execution of the same workflow run.\n */\n async executeAsync(runId: string, startFromNode?: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n // Get workflow definition\n const workflow = this.workflows.get(run.workflowId);\n if (!workflow) {\n throw new Error(`Workflow not found: ${run.workflowId}`);\n }\n\n // Try to acquire lock if backend supports it and locking is enabled\n const useLocking = this.config.enableLocking !== false &&\n hasLockSupport(this.config.backend);\n const lockDuration = this.config.lockDuration!;\n\n if (useLocking) {\n const acquired = await this.config.backend.acquireLock!(runId, lockDuration);\n if (!acquired) {\n throw new Error(\n `Cannot execute workflow run \"${runId}\": another worker is already executing it. ` +\n `This can happen when multiple workers try to execute the same run concurrently.`,\n );\n }\n\n if (this.config.debug) {\n console.log(`[WorkflowExecutor] Acquired lock for run: ${runId}`);\n }\n }\n\n try {\n // Update status to running\n await this.config.backend.updateRun(runId, {\n status: \"running\",\n startedAt: run.startedAt || new Date(),\n });\n\n // Notify start\n const updatedRun = await this.config.backend.getRun(runId);\n this.config.onStart?.(updatedRun!);\n\n // Resolve workflow nodes\n const nodes = this.resolveNodes(workflow, run.context);\n\n // Execute with timeout if configured\n const result = await this.executeWithTimeout(\n () => this.dagExecutor.execute(nodes, run as WorkflowRun, startFromNode),\n workflow.timeout,\n );\n\n // Update run based on result\n if (result.completed) {\n // Workflow completed successfully\n const finalRun = await this.completeRun(\n runId,\n result.context,\n result.nodeStates,\n );\n\n // Validate output if schema provided\n if (workflow.outputSchema) {\n workflow.outputSchema.parse(finalRun.output);\n }\n\n // Call completion handler\n await workflow.onComplete?.(finalRun.output, finalRun.context);\n this.config.onComplete?.(finalRun);\n } else if (result.waiting) {\n // Workflow is waiting for approval/event\n await this.pauseRun(\n runId,\n result.waitingNode!,\n result.context,\n result.nodeStates,\n );\n\n const pausedRun = await this.config.backend.getRun(runId);\n this.config.onWaiting?.(pausedRun!, result.waitingNode!);\n } else {\n // Workflow failed\n const error = new Error(result.error || \"Unknown error\");\n await this.failRun(runId, error, result.context, result.nodeStates);\n\n await workflow.onError?.(error, result.context);\n this.config.onError?.(run, error);\n }\n } catch (error) {\n // Unexpected error during execution\n const err = error instanceof Error ? error : new Error(String(error));\n await this.failRun(runId, err, run.context, run.nodeStates);\n\n await workflow.onError?.(err, run.context);\n this.config.onError?.(run, err);\n\n throw error;\n } finally {\n // Always release lock when done\n if (useLocking) {\n await this.config.backend.releaseLock!(runId);\n\n if (this.config.debug) {\n console.log(`[WorkflowExecutor] Released lock for run: ${runId}`);\n }\n }\n }\n }\n\n /**\n * Resolve workflow nodes from definition\n */\n private resolveNodes(\n workflow: WorkflowDefinition,\n context: WorkflowContext,\n ): WorkflowNode[] {\n let nodes: WorkflowNode[];\n\n if (Array.isArray(workflow.steps)) {\n nodes = workflow.steps;\n } else {\n // Dynamic steps - call the function\n if (!this.config.blobStorage) {\n // Warn if blobStorage is missing but dynamic steps might need it?\n // For now, we allow it to be undefined if user doesn't use it.\n }\n\n const builderContext: StepBuilderContext = {\n input: context.input,\n context,\n blobStorage: this.config.blobStorage,\n blob: this.blobResolver,\n };\n nodes = workflow.steps(builderContext);\n }\n\n // Validate resolved nodes\n this.validateNodes(nodes, workflow.id);\n\n return nodes;\n }\n\n /**\n * Validate workflow nodes\n */\n private validateNodes(nodes: WorkflowNode[], workflowId: string): void {\n if (!Array.isArray(nodes)) {\n throw new Error(`Workflow \"${workflowId}\" steps must resolve to an array`);\n }\n\n if (nodes.length === 0) {\n throw new Error(`Workflow \"${workflowId}\" must have at least one step`);\n }\n\n const seenIds = new Set<string>();\n\n for (let i = 0; i < nodes.length; i++) {\n const node = nodes[i];\n\n if (!node) {\n throw new Error(`Workflow \"${workflowId}\" has undefined node at index ${i}`);\n }\n\n if (!node.id || typeof node.id !== \"string\") {\n throw new Error(`Workflow \"${workflowId}\" node at index ${i} has invalid ID`);\n }\n\n if (seenIds.has(node.id)) {\n throw new Error(`Workflow \"${workflowId}\" has duplicate node ID: \"${node.id}\"`);\n }\n seenIds.add(node.id);\n\n if (!node.config || typeof node.config !== \"object\") {\n throw new Error(`Workflow \"${workflowId}\" node \"${node.id}\" has invalid config`);\n }\n\n if (!node.config.type) {\n throw new Error(`Workflow \"${workflowId}\" node \"${node.id}\" config missing type`);\n }\n }\n }\n\n /**\n * Execute with optional timeout\n *\n * Uses Promise.race() to properly handle timeout cleanup.\n * The timeout is always cleared in the finally block to prevent memory leaks.\n */\n private async executeWithTimeout<T>(\n fn: () => Promise<T>,\n timeout?: string | number,\n ): Promise<T> {\n if (!timeout) {\n return fn();\n }\n\n const timeoutMs = parseDuration(timeout);\n let timeoutId: ReturnType<typeof setTimeout> | undefined;\n\n const timeoutPromise = new Promise<never>((_, reject) => {\n timeoutId = setTimeout(() => {\n reject(new Error(`Workflow timed out after ${timeoutMs}ms`));\n }, timeoutMs);\n });\n\n try {\n return await Promise.race([fn(), timeoutPromise]);\n } finally {\n if (timeoutId !== undefined) {\n clearTimeout(timeoutId);\n }\n }\n }\n\n /**\n * Mark run as completed\n */\n private async completeRun(\n runId: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<WorkflowRun> {\n // Determine output (last node's output or accumulated context)\n const output = this.determineOutput(context);\n\n await this.config.backend.updateRun(runId, {\n status: \"completed\" as WorkflowStatus,\n output,\n context,\n nodeStates,\n completedAt: new Date(),\n });\n\n return (await this.config.backend.getRun(runId))!;\n }\n\n /**\n * Mark run as failed\n */\n private async failRun(\n runId: string,\n error: Error,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n await this.config.backend.updateRun(runId, {\n status: \"failed\" as WorkflowStatus,\n context,\n nodeStates,\n error: {\n message: error.message,\n stack: error.stack,\n },\n completedAt: new Date(),\n });\n }\n\n /**\n * Mark run as waiting\n */\n private async pauseRun(\n runId: string,\n waitingNode: string,\n context: WorkflowContext,\n nodeStates: Record<string, NodeState>,\n ): Promise<void> {\n await this.config.backend.updateRun(runId, {\n status: \"waiting\" as WorkflowStatus,\n currentNodes: [waitingNode],\n context,\n nodeStates,\n });\n }\n\n /**\n * Determine workflow output from context\n */\n private determineOutput(context: WorkflowContext): unknown {\n // Remove 'input' and return the rest as output\n const { input: _input, ...rest } = context;\n return rest;\n }\n\n /**\n * Create a handle for a workflow run\n */\n private createHandle<TOutput>(runId: string): WorkflowHandle<TOutput> {\n return {\n runId,\n status: () => this.config.backend.getRun(runId) as Promise<WorkflowRun>,\n result: () => this.waitForResult<TOutput>(runId),\n cancel: () => this.cancel(runId),\n };\n }\n\n /**\n * Wait for workflow result\n */\n private async waitForResult<TOutput>(\n runId: string,\n pollInterval: number = 1000,\n ): Promise<TOutput> {\n while (true) {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status === \"completed\") {\n return run.output as TOutput;\n }\n\n if (run.status === \"failed\") {\n throw new Error(run.error?.message || \"Workflow failed\");\n }\n\n if (run.status === \"cancelled\") {\n throw new Error(\"Workflow was cancelled\");\n }\n\n // Wait before polling again\n await new Promise((resolve) => setTimeout(resolve, pollInterval));\n }\n }\n\n /**\n * Cancel a workflow run\n */\n async cancel(runId: string): Promise<void> {\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n if (run.status === \"completed\" || run.status === \"failed\") {\n throw new Error(\n `Cannot cancel workflow run \"${runId}\": run has already ${run.status}. ` +\n `Only active runs (pending, running, waiting) can be cancelled.`,\n );\n }\n\n await this.config.backend.updateRun(runId, {\n status: \"cancelled\" as WorkflowStatus,\n completedAt: new Date(),\n });\n }\n\n /**\n * Get workflow run status\n */\n getStatus(runId: string): Promise<WorkflowRun | null> {\n return this.config.backend.getRun(runId);\n }\n\n /**\n * List workflow runs\n */\n listRuns(options?: {\n workflowId?: string;\n status?: WorkflowStatus | WorkflowStatus[];\n limit?: number;\n }): Promise<WorkflowRun[]> {\n return this.config.backend.listRuns({\n workflowId: options?.workflowId,\n status: options?.status,\n limit: options?.limit,\n });\n }\n}\n", "/**\n * Approval Manager\n *\n * Handles human-in-the-loop approval workflows\n */\n\nimport type {\n ApprovalDecision,\n PendingApproval,\n WaitNodeConfig,\n WorkflowContext,\n WorkflowRun,\n} from \"../types.ts\";\nimport { generateId, parseDuration } from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\nimport type { WorkflowExecutor } from \"../executor/workflow-executor.ts\";\n\n/**\n * Approval notification callback\n */\nexport type ApprovalNotifier = (\n approval: PendingApproval,\n run: WorkflowRun,\n) => Promise<void>;\n\n/**\n * Approval manager configuration\n */\nexport interface ApprovalManagerConfig {\n /** Backend for persistence */\n backend: WorkflowBackend;\n /** Workflow executor for resuming after approval */\n executor?: WorkflowExecutor;\n /** Notification callback */\n notifier?: ApprovalNotifier;\n /** Check expired approvals interval (ms) */\n expirationCheckInterval?: number;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Approval request result\n */\nexport interface ApprovalRequest {\n /** Approval ID */\n approvalId: string;\n /** Run ID */\n runId: string;\n /** Node ID */\n nodeId: string;\n /** Message for approver */\n message: string;\n /** Payload with context */\n payload: unknown;\n /** When approval expires */\n expiresAt?: Date;\n}\n\n/**\n * Approval Manager class\n *\n * Responsible for:\n * - Creating pending approvals\n * - Processing approval decisions\n * - Resuming workflows after approval\n * - Handling approval timeouts\n */\nexport class ApprovalManager {\n private config: ApprovalManagerConfig;\n private expirationTimer?: ReturnType<typeof setInterval>;\n private destroyed = false;\n\n constructor(config: ApprovalManagerConfig) {\n this.config = {\n expirationCheckInterval: 60000, // Check every minute\n debug: false,\n ...config,\n };\n\n // Start expiration checker if interval is set\n if (this.config.expirationCheckInterval && this.config.expirationCheckInterval > 0) {\n this.startExpirationChecker();\n }\n }\n\n /**\n * Create a pending approval request\n */\n async createApproval(\n run: WorkflowRun,\n nodeId: string,\n waitConfig: WaitNodeConfig,\n context: WorkflowContext,\n ): Promise<ApprovalRequest> {\n // Resolve payload if it's a function\n const payload = typeof waitConfig.payload === \"function\"\n ? await waitConfig.payload(context)\n : waitConfig.payload;\n\n // Calculate expiration\n const expiresAt = waitConfig.timeout\n ? new Date(Date.now() + parseDuration(waitConfig.timeout))\n : undefined;\n\n const approval: PendingApproval = {\n id: generateId(\"apr\"),\n nodeId,\n message: waitConfig.message || \"Approval required\",\n payload,\n approvers: waitConfig.approvers,\n requestedAt: new Date(),\n expiresAt,\n status: \"pending\",\n };\n\n if (this.config.debug) {\n console.log(`[ApprovalManager] Creating approval ${approval.id} for run ${run.id}`);\n }\n\n // Save to backend\n await this.config.backend.savePendingApproval(run.id, approval);\n\n // Notify approvers\n if (this.config.notifier) {\n try {\n await this.config.notifier(approval, run);\n } catch (error) {\n console.error(`[ApprovalManager] Failed to notify approvers:`, error);\n }\n }\n\n return {\n approvalId: approval.id,\n runId: run.id,\n nodeId,\n message: approval.message,\n payload: approval.payload,\n expiresAt: approval.expiresAt,\n };\n }\n\n /**\n * Get pending approval by ID\n */\n async getApproval(\n runId: string,\n approvalId: string,\n ): Promise<PendingApproval | null> {\n if (this.config.backend.getPendingApproval) {\n return this.config.backend.getPendingApproval(runId, approvalId);\n }\n\n // Fallback: get all and find\n const all = await this.config.backend.getPendingApprovals(runId);\n return all.find((a) => a.id === approvalId) || null;\n }\n\n /**\n * Get all pending approvals for a run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n return this.config.backend.getPendingApprovals(runId);\n }\n\n /**\n * Process an approval decision\n */\n async processDecision(\n runId: string,\n approvalId: string,\n decision: ApprovalDecision,\n ): Promise<void> {\n if (this.config.debug) {\n console.log(\n `[ApprovalManager] Processing decision for ${approvalId}: ${\n decision.approved ? \"approved\" : \"rejected\"\n }`,\n );\n }\n\n // Get the approval\n const approval = await this.getApproval(runId, approvalId);\n if (!approval) {\n throw new Error(`Approval not found: ${approvalId}`);\n }\n\n // Check if already decided\n if (approval.status !== \"pending\") {\n throw new Error(`Approval already processed: ${approval.status}`);\n }\n\n // Check if expired\n if (approval.expiresAt && new Date() > approval.expiresAt) {\n throw new Error(\"Approval has expired\");\n }\n\n // Check if approver is authorized\n if (\n approval.approvers &&\n approval.approvers.length > 0 &&\n !approval.approvers.includes(decision.approver)\n ) {\n throw new Error(\"Not authorized to approve this request\");\n }\n\n // Update the approval\n await this.config.backend.updateApproval(runId, approvalId, decision);\n\n // Get the run\n const run = await this.config.backend.getRun(runId);\n if (!run) {\n throw new Error(`Run not found: ${runId}`);\n }\n\n // Update run context with approval result\n const updatedContext = {\n ...run.context,\n [approval.nodeId]: {\n approved: decision.approved,\n approver: decision.approver,\n comment: decision.comment,\n decidedAt: new Date().toISOString(),\n },\n };\n\n // Update node state\n const updatedNodeStates = {\n ...run.nodeStates,\n [approval.nodeId]: {\n nodeId: approval.nodeId,\n status: \"completed\" as const,\n output: {\n approved: decision.approved,\n approver: decision.approver,\n comment: decision.comment,\n },\n attempt: 1,\n completedAt: new Date(),\n },\n };\n\n await this.config.backend.updateRun(runId, {\n context: updatedContext,\n nodeStates: updatedNodeStates,\n });\n\n // Resume workflow if approved and executor is available\n if (decision.approved && this.config.executor) {\n try {\n await this.config.executor.resume(runId);\n } catch (error) {\n console.error(`[ApprovalManager] Failed to resume workflow:`, error);\n throw error;\n }\n } else if (!decision.approved) {\n // If rejected, fail the workflow\n await this.config.backend.updateRun(runId, {\n status: \"failed\",\n error: {\n message: `Approval \"${approvalId}\" was rejected${\n decision.comment ? `: ${decision.comment}` : \"\"\n }`,\n },\n completedAt: new Date(),\n });\n }\n }\n\n /**\n * Approve an approval request\n */\n async approve(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n await this.processDecision(runId, approvalId, {\n approved: true,\n approver,\n comment,\n });\n }\n\n /**\n * Reject an approval request\n */\n async reject(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n await this.processDecision(runId, approvalId, {\n approved: false,\n approver,\n comment,\n });\n }\n\n /**\n * List all pending approvals across workflows\n */\n listAllPending(filter?: {\n workflowId?: string;\n approver?: string;\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n if (this.config.backend.listPendingApprovals) {\n return this.config.backend.listPendingApprovals({\n ...filter,\n status: \"pending\",\n });\n }\n\n // Fallback: not supported by backend\n console.warn(\n \"[ApprovalManager] listPendingApprovals not supported by backend\",\n );\n return Promise.resolve([]);\n }\n\n /**\n * Check and expire stale approvals\n */\n async checkExpiredApprovals(): Promise<void> {\n // Guard against post-stop execution\n if (this.destroyed) {\n return;\n }\n\n if (!this.config.backend.listPendingApprovals) {\n return;\n }\n\n const pending = await this.config.backend.listPendingApprovals({\n status: \"pending\",\n });\n\n const now = new Date();\n\n for (const { runId, approval } of pending) {\n if (approval.expiresAt && now > approval.expiresAt) {\n if (this.config.debug) {\n console.log(`[ApprovalManager] Expiring approval ${approval.id}`);\n }\n\n // Mark as expired\n await this.config.backend.updateApproval(runId, approval.id, {\n approved: false,\n approver: \"system\",\n comment: \"Approval expired\",\n });\n\n // Fail the workflow\n await this.config.backend.updateRun(runId, {\n status: \"failed\",\n error: {\n message: `Approval \"${approval.id}\" expired`,\n },\n completedAt: new Date(),\n });\n }\n }\n }\n\n /**\n * Start the expiration checker timer\n */\n private startExpirationChecker(): void {\n this.expirationTimer = setInterval(() => {\n this.checkExpiredApprovals().catch((error) => {\n console.error(`[ApprovalManager] Expiration check failed:`, error);\n });\n }, this.config.expirationCheckInterval);\n }\n\n /**\n * Stop the approval manager\n */\n stop(): void {\n this.destroyed = true;\n if (this.expirationTimer) {\n clearInterval(this.expirationTimer);\n this.expirationTimer = undefined;\n }\n }\n}\n", "/**\n * Agent Registry\n *\n * Registry for managing and looking up agents in workflow execution\n */\n\nimport type { Agent } from \"../../types/agent.ts\";\nimport type { Tool } from \"../../types/tool.ts\";\nimport type { AgentRegistry, ToolRegistry } from \"../executor/step-executor.ts\";\n\n/**\n * Default agent registry implementation\n *\n * Provides in-memory storage for agents that can be used in workflow steps.\n *\n * @example\n * ```typescript\n * import { DefaultAgentRegistry } from 'veryfront/ai/workflow/runtime/agent-registry';\n *\n * const registry = new DefaultAgentRegistry();\n *\n * // Register agents\n * registry.registerAgent(researchAgent);\n * registry.registerAgent(writerAgent);\n *\n * // Use with workflow client\n * const client = createWorkflowClient({\n * executor: {\n * agentRegistry: registry,\n * },\n * });\n * ```\n */\nexport class DefaultAgentRegistry implements AgentRegistry {\n private agents = new Map<string, Agent>();\n\n /**\n * Register an agent\n */\n registerAgent(agent: Agent): void {\n this.agents.set(agent.id, agent);\n }\n\n /**\n * Register multiple agents\n */\n registerAgents(agents: Agent[]): void {\n for (const agent of agents) {\n this.registerAgent(agent);\n }\n }\n\n /**\n * Get an agent by ID (implements AgentRegistry.get)\n */\n get(id: string): Agent | undefined {\n return this.agents.get(id);\n }\n\n /**\n * Check if an agent exists\n */\n hasAgent(id: string): boolean {\n return this.agents.has(id);\n }\n\n /**\n * List all registered agent IDs\n */\n listAgentIds(): string[] {\n return Array.from(this.agents.keys());\n }\n\n /**\n * Remove an agent\n */\n removeAgent(id: string): boolean {\n return this.agents.delete(id);\n }\n\n /**\n * Clear all registrations\n */\n clear(): void {\n this.agents.clear();\n }\n}\n\n/**\n * Default tool registry implementation\n *\n * Provides in-memory storage for tools that can be used in workflow steps.\n */\nexport class DefaultToolRegistry implements ToolRegistry {\n private tools = new Map<string, Tool>();\n\n /**\n * Register a tool\n */\n registerTool(tool: Tool): void {\n this.tools.set(tool.id, tool);\n }\n\n /**\n * Register multiple tools\n */\n registerTools(tools: Tool[]): void {\n for (const tool of tools) {\n this.registerTool(tool);\n }\n }\n\n /**\n * Get a tool by name (implements ToolRegistry.get)\n */\n get(name: string): Tool | undefined {\n return this.tools.get(name);\n }\n\n /**\n * Check if a tool exists\n */\n hasTool(name: string): boolean {\n return this.tools.has(name);\n }\n\n /**\n * List all registered tool names\n */\n listToolNames(): string[] {\n return Array.from(this.tools.keys());\n }\n\n /**\n * Remove a tool\n */\n removeTool(name: string): boolean {\n return this.tools.delete(name);\n }\n\n /**\n * Clear all tools\n */\n clear(): void {\n this.tools.clear();\n }\n}\n\n/**\n * Create a mock agent for testing\n *\n * Creates an agent that returns a predictable response without\n * making actual API calls.\n *\n * @example\n * ```typescript\n * const mockAgent = createMockAgent('test-agent', {\n * response: 'This is the mock response',\n * });\n *\n * registry.registerAgent(mockAgent);\n * ```\n */\nexport function createMockAgent(\n id: string,\n options: {\n response?: string;\n responseFunc?: (input: string) => string | Promise<string>;\n toolCalls?: Array<{\n id: string;\n name: string;\n args: Record<string, unknown>;\n }>;\n } = {},\n): Agent {\n return {\n id,\n config: {\n model: \"mock/test-model\",\n system: \"Mock agent for testing\",\n },\n async generate(input: { input: string | unknown[]; context?: Record<string, unknown> }) {\n const inputStr = typeof input.input === \"string\" ? input.input : JSON.stringify(input.input);\n\n let text: string;\n if (options.responseFunc) {\n text = await options.responseFunc(inputStr);\n } else {\n text = options.response ?? `Mock response for: ${inputStr.slice(0, 50)}...`;\n }\n\n return {\n text,\n messages: [\n { role: \"user\" as const, content: inputStr },\n { role: \"assistant\" as const, content: text },\n ],\n toolCalls: options.toolCalls?.map((tc) => ({\n ...tc,\n status: \"completed\" as const,\n })) ?? [],\n status: \"completed\" as const,\n usage: {\n promptTokens: 100,\n completionTokens: 50,\n totalTokens: 150,\n },\n };\n },\n stream() {\n throw new Error(\"Mock agent does not support streaming\");\n },\n respond() {\n throw new Error(\"Mock agent does not support HTTP responses\");\n },\n getMemory() {\n throw new Error(\"Mock agent does not have memory\");\n },\n getMemoryStats() {\n return Promise.resolve({\n totalMessages: 0,\n estimatedTokens: 0,\n type: \"mock\",\n });\n },\n async clearMemory() {\n // No-op\n },\n };\n}\n\n/**\n * Create a mock tool for testing\n *\n * @example\n * ```typescript\n * const mockTool = createMockTool('fetchData', {\n * result: { data: 'test' },\n * });\n *\n * registry.registerTool(mockTool);\n * ```\n */\nexport function createMockTool(\n id: string,\n options: {\n description?: string;\n result?: unknown;\n executeFunc?: (\n args: Record<string, unknown>,\n ) => unknown | Promise<unknown>;\n } = {},\n): Tool {\n // Import z dynamically to avoid bundling issues\n const mockSchema = { parse: (x: unknown) => x } as unknown as import(\"zod\").z.ZodSchema;\n\n return {\n id,\n description: options.description ?? `Mock tool: ${id}`,\n inputSchema: mockSchema,\n async execute(args: Record<string, unknown>) {\n if (options.executeFunc) {\n return await options.executeFunc(args);\n }\n return options.result ?? { success: true, tool: id, args };\n },\n };\n}\n", "/**\n * Workflow Client\n *\n * High-level API for interacting with workflows\n */\n\nimport type {\n PendingApproval,\n RunFilter,\n WorkflowDefinition,\n WorkflowRun,\n WorkflowStatus,\n} from \"../types.ts\";\nimport type { WorkflowBackend } from \"../backends/types.ts\";\nimport { MemoryBackend } from \"../backends/memory.ts\";\nimport {\n WorkflowExecutor,\n type WorkflowExecutorConfig,\n type WorkflowHandle,\n} from \"../executor/workflow-executor.ts\";\nimport { ApprovalManager, type ApprovalManagerConfig } from \"../runtime/approval-manager.ts\";\nimport type { Workflow } from \"../dsl/workflow.ts\";\n\n/**\n * Workflow client configuration\n */\nexport interface WorkflowClientConfig {\n /** Backend for persistence (default: MemoryBackend) */\n backend?: WorkflowBackend;\n /** Executor configuration */\n executor?: Partial<WorkflowExecutorConfig>;\n /** Approval manager configuration */\n approval?: Partial<ApprovalManagerConfig>;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Workflow Client class\n *\n * The main entry point for working with workflows.\n * Provides a simple API for:\n * - Registering workflow definitions\n * - Starting and managing workflow runs\n * - Handling approvals\n */\nexport class WorkflowClient {\n private backend: WorkflowBackend;\n private executor: WorkflowExecutor;\n private approvalManager: ApprovalManager;\n private debug: boolean;\n\n constructor(config: WorkflowClientConfig = {}) {\n this.debug = config.debug ?? false;\n this.backend = config.backend ?? new MemoryBackend({ debug: this.debug });\n\n // Initialize executor\n this.executor = new WorkflowExecutor({\n backend: this.backend,\n debug: this.debug,\n ...config.executor,\n });\n\n // Initialize approval manager\n this.approvalManager = new ApprovalManager({\n backend: this.backend,\n executor: this.executor,\n debug: this.debug,\n ...config.approval,\n });\n }\n\n // =========================================================================\n // Workflow Registration\n // =========================================================================\n\n /**\n * Register a workflow definition\n */\n register(\n workflow: Workflow | WorkflowDefinition,\n ): void {\n const definition = \"definition\" in workflow ? workflow.definition : workflow;\n\n this.executor.register(definition as WorkflowDefinition);\n\n if (this.debug) {\n console.log(`[WorkflowClient] Registered workflow: ${definition.id}`);\n }\n }\n\n /**\n * Register multiple workflows\n */\n registerAll(\n workflows: Array<Workflow | WorkflowDefinition>,\n ): void {\n for (const workflow of workflows) {\n this.register(workflow);\n }\n }\n\n // =========================================================================\n // Workflow Execution\n // =========================================================================\n\n /**\n * Start a new workflow run\n *\n * @example\n * ```typescript\n * const handle = await client.start('content-pipeline', {\n * topic: 'AI Safety',\n * requiresApproval: true,\n * });\n *\n * const result = await handle.result();\n * ```\n */\n start<TInput, TOutput = unknown>(\n workflowId: string,\n input: TInput,\n options?: { runId?: string },\n ): Promise<WorkflowHandle<TOutput>> {\n return this.executor.start<TInput, TOutput>(workflowId, input, options);\n }\n\n /**\n * Resume a paused/waiting workflow\n */\n resume(runId: string): Promise<void> {\n return this.executor.resume(runId);\n }\n\n /**\n * Cancel a workflow run\n */\n cancel(runId: string): Promise<void> {\n return this.executor.cancel(runId);\n }\n\n // =========================================================================\n // Run Management\n // =========================================================================\n\n /**\n * Get a workflow run by ID\n */\n getRun(runId: string): Promise<WorkflowRun | null> {\n return this.backend.getRun(runId);\n }\n\n /**\n * List workflow runs\n */\n listRuns(filter?: RunFilter): Promise<WorkflowRun[]> {\n return this.backend.listRuns(filter ?? {});\n }\n\n /**\n * Get runs by status\n */\n getRunsByStatus(\n status: WorkflowStatus | WorkflowStatus[],\n limit?: number,\n ): Promise<WorkflowRun[]> {\n return this.backend.listRuns({ status, limit });\n }\n\n /**\n * Get runs for a specific workflow\n */\n getRunsForWorkflow(\n workflowId: string,\n limit?: number,\n ): Promise<WorkflowRun[]> {\n return this.backend.listRuns({ workflowId, limit });\n }\n\n // =========================================================================\n // Approvals\n // =========================================================================\n\n /**\n * Get pending approvals for a run\n */\n getPendingApprovals(runId: string): Promise<PendingApproval[]> {\n return this.approvalManager.getPendingApprovals(runId);\n }\n\n /**\n * Approve an approval request\n *\n * @example\n * ```typescript\n * await client.approve(runId, approvalId, 'user@example.com', 'Looks good!');\n * ```\n */\n approve(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n return this.approvalManager.approve(runId, approvalId, approver, comment);\n }\n\n /**\n * Reject an approval request\n */\n reject(\n runId: string,\n approvalId: string,\n approver: string,\n comment?: string,\n ): Promise<void> {\n return this.approvalManager.reject(runId, approvalId, approver, comment);\n }\n\n /**\n * List all pending approvals across workflows\n */\n listAllPendingApprovals(filter?: {\n workflowId?: string;\n approver?: string;\n }): Promise<Array<{ runId: string; approval: PendingApproval }>> {\n return this.approvalManager.listAllPending(filter);\n }\n\n // =========================================================================\n // Lifecycle\n // =========================================================================\n\n /**\n * Get the underlying backend\n */\n getBackend(): WorkflowBackend {\n return this.backend;\n }\n\n /**\n * Get the underlying executor\n */\n getExecutor(): WorkflowExecutor {\n return this.executor;\n }\n\n /**\n * Get the underlying approval manager\n */\n getApprovalManager(): ApprovalManager {\n return this.approvalManager;\n }\n\n /**\n * Cleanup and shutdown\n */\n async destroy(): Promise<void> {\n this.approvalManager.stop();\n await this.backend.destroy();\n\n if (this.debug) {\n console.log(\"[WorkflowClient] Destroyed\");\n }\n }\n}\n\n/**\n * Create a workflow client with default configuration\n */\nexport function createWorkflowClient(\n config?: WorkflowClientConfig,\n): WorkflowClient {\n return new WorkflowClient(config);\n}\n", "/**\n * Temporal Adapter\n *\n * Adapter for using Temporal as the workflow execution backend.\n * Temporal is ideal for enterprise-grade, long-running workflows.\n *\n * @see https://docs.temporal.io/\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Temporal adapter configuration\n */\nexport interface TemporalAdapterConfig extends BackendConfig {\n /** Temporal server address */\n address?: string;\n /** Temporal namespace */\n namespace?: string;\n /** Task queue name */\n taskQueue?: string;\n /** TLS configuration */\n tls?: {\n clientCertPath?: string;\n clientKeyPath?: string;\n serverRootCACertPath?: string;\n };\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Temporal Adapter\n *\n * Translates Veryfront workflow operations to Temporal workflows.\n *\n * @example\n * ```typescript\n * import { TemporalAdapter } from 'veryfront/ai/workflow/backends/temporal';\n *\n * const backend = new TemporalAdapter({\n * address: 'localhost:7233',\n * namespace: 'default',\n * taskQueue: 'veryfront-workflows',\n * });\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * the Temporal SDK and worker setup.\n */\nexport class TemporalAdapter implements WorkflowBackend {\n private config: TemporalAdapterConfig;\n\n constructor(config: TemporalAdapterConfig = {}) {\n this.config = {\n address: \"localhost:7233\",\n namespace: \"default\",\n taskQueue: \"veryfront-workflows\",\n debug: false,\n ...config,\n };\n\n console.warn(\n \"[TemporalAdapter] This is a stub implementation. \" +\n \"Full Temporal integration requires the Temporal SDK and worker setup. \" +\n \"See: https://docs.temporal.io/\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n // This would start a Temporal workflow execution\n throw new Error(\"TemporalAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n // This would query the Temporal workflow state\n throw new Error(\"TemporalAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n // This would signal the Temporal workflow\n throw new Error(\"TemporalAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n // This would use Temporal's visibility API\n throw new Error(\"TemporalAdapter.listRuns not implemented\");\n }\n\n // Checkpointing (Temporal handles this internally via event sourcing)\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n // Temporal provides automatic checkpointing via event sourcing\n // This is essentially a no-op\n return Promise.resolve();\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n // Temporal maintains full workflow history\n throw new Error(\"TemporalAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n // This would update a Temporal workflow signal handler\n throw new Error(\"TemporalAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"TemporalAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n // This would send a signal to the Temporal workflow\n throw new Error(\"TemporalAdapter.updateApproval not implemented\");\n }\n\n // Queue (Temporal handles this internally)\n enqueue(_job: WorkflowJob): Promise<void> {\n // Temporal uses workflow execution, not explicit queues\n throw new Error(\"TemporalAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Temporal workers poll for tasks automatically\n throw new Error(\"TemporalAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Temporal handles acknowledgment automatically\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // Cleanup Temporal client connection\n return Promise.resolve();\n }\n}\n", "/**\n * Inngest Adapter\n *\n * Adapter for using Inngest as the workflow execution backend.\n * Inngest is ideal for serverless deployments (Vercel, Cloudflare, etc.)\n *\n * @see https://www.inngest.com/docs\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\nimport { agentLogger as logger } from \"@veryfront/utils\";\n\n/**\n * Inngest adapter configuration\n */\nexport interface InngestAdapterConfig extends BackendConfig {\n /** Inngest event key */\n eventKey?: string;\n /** Inngest signing key (for production) */\n signingKey?: string;\n /** Inngest API base URL (for self-hosted) */\n baseUrl?: string;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Inngest Adapter\n *\n * Translates Veryfront workflow operations to Inngest functions.\n *\n * @example\n * ```typescript\n * import { InngestAdapter } from 'veryfront/ai/workflow/backends/inngest';\n *\n * const backend = new InngestAdapter({\n * eventKey: process.env.INNGEST_EVENT_KEY,\n * signingKey: process.env.INNGEST_SIGNING_KEY,\n * });\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * the Inngest SDK and server-side setup.\n */\nexport class InngestAdapter implements WorkflowBackend {\n private config: InngestAdapterConfig;\n\n constructor(config: InngestAdapterConfig = {}) {\n this.config = {\n debug: false,\n ...config,\n };\n\n logger.warn(\n \"[InngestAdapter] This is a stub implementation. \" +\n \"Full Inngest integration requires additional setup. \" +\n \"See: https://www.inngest.com/docs\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n throw new Error(\"InngestAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n throw new Error(\"InngestAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n throw new Error(\"InngestAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n throw new Error(\"InngestAdapter.listRuns not implemented\");\n }\n\n // Checkpointing\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n throw new Error(\"InngestAdapter.saveCheckpoint not implemented\");\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n throw new Error(\"InngestAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n throw new Error(\"InngestAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"InngestAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n throw new Error(\"InngestAdapter.updateApproval not implemented\");\n }\n\n // Queue (Inngest handles this internally)\n enqueue(_job: WorkflowJob): Promise<void> {\n // Inngest uses events instead of queues\n // This would send an Inngest event\n throw new Error(\"InngestAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Inngest handles job scheduling internally\n throw new Error(\"InngestAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Inngest handles acknowledgment internally\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // No cleanup needed for Inngest\n return Promise.resolve();\n }\n}\n", "/**\n * Cloudflare Adapter\n *\n * Adapter for using Cloudflare Durable Objects as the workflow backend.\n * Ideal for edge deployments on Cloudflare Workers.\n *\n * @see https://developers.cloudflare.com/durable-objects/\n */\n\nimport type {\n ApprovalDecision,\n Checkpoint,\n PendingApproval,\n RunFilter,\n WorkflowJob,\n WorkflowRun,\n} from \"../types.ts\";\nimport type { BackendConfig, WorkflowBackend } from \"./types.ts\";\n\n/**\n * Cloudflare adapter configuration\n */\nexport interface CloudflareAdapterConfig extends BackendConfig {\n /** Durable Object namespace binding name */\n durableObjectBinding?: string;\n /** KV namespace binding name (for auxiliary storage) */\n kvBinding?: string;\n /** Queue binding name (for job queue) */\n queueBinding?: string;\n /** Enable debug logging */\n debug?: boolean;\n}\n\n/**\n * Cloudflare Adapter\n *\n * Uses Cloudflare Durable Objects for workflow state and\n * Cloudflare Queues for job distribution.\n *\n * @example\n * ```typescript\n * // In your Cloudflare Worker\n * import { CloudflareAdapter } from 'veryfront/ai/workflow/backends/cloudflare';\n *\n * export default {\n * async fetch(request, env) {\n * const backend = new CloudflareAdapter({\n * durableObjectBinding: 'WORKFLOW_DO',\n * kvBinding: 'WORKFLOW_KV',\n * queueBinding: 'WORKFLOW_QUEUE',\n * });\n *\n * // Use backend...\n * }\n * }\n * ```\n *\n * @note This is a stub implementation. Full implementation requires\n * Cloudflare Workers environment bindings.\n */\nexport class CloudflareAdapter implements WorkflowBackend {\n private config: CloudflareAdapterConfig;\n\n constructor(config: CloudflareAdapterConfig = {}) {\n this.config = {\n durableObjectBinding: \"WORKFLOW_DO\",\n kvBinding: \"WORKFLOW_KV\",\n queueBinding: \"WORKFLOW_QUEUE\",\n debug: false,\n ...config,\n };\n\n console.warn(\n \"[CloudflareAdapter] This is a stub implementation. \" +\n \"Full Cloudflare integration requires Workers environment bindings. \" +\n \"See: https://developers.cloudflare.com/durable-objects/\",\n );\n }\n\n // Run Management\n createRun(_run: WorkflowRun): Promise<void> {\n // This would create/get a Durable Object instance for the run\n throw new Error(\"CloudflareAdapter.createRun not implemented\");\n }\n\n getRun(_runId: string): Promise<WorkflowRun | null> {\n // This would fetch state from the Durable Object\n throw new Error(\"CloudflareAdapter.getRun not implemented\");\n }\n\n updateRun(_runId: string, _patch: Partial<WorkflowRun>): Promise<void> {\n // This would update state in the Durable Object\n throw new Error(\"CloudflareAdapter.updateRun not implemented\");\n }\n\n listRuns(_filter: RunFilter): Promise<WorkflowRun[]> {\n // This would query KV for run indexes\n throw new Error(\"CloudflareAdapter.listRuns not implemented\");\n }\n\n // Checkpointing\n saveCheckpoint(_runId: string, _checkpoint: Checkpoint): Promise<void> {\n // This would persist checkpoint to the Durable Object\n throw new Error(\"CloudflareAdapter.saveCheckpoint not implemented\");\n }\n\n getLatestCheckpoint(_runId: string): Promise<Checkpoint | null> {\n throw new Error(\"CloudflareAdapter.getLatestCheckpoint not implemented\");\n }\n\n // Approvals\n savePendingApproval(_runId: string, _approval: PendingApproval): Promise<void> {\n throw new Error(\"CloudflareAdapter.savePendingApproval not implemented\");\n }\n\n getPendingApprovals(_runId: string): Promise<PendingApproval[]> {\n throw new Error(\"CloudflareAdapter.getPendingApprovals not implemented\");\n }\n\n updateApproval(\n _runId: string,\n _approvalId: string,\n _decision: ApprovalDecision,\n ): Promise<void> {\n throw new Error(\"CloudflareAdapter.updateApproval not implemented\");\n }\n\n // Queue (using Cloudflare Queues)\n enqueue(_job: WorkflowJob): Promise<void> {\n // This would send a message to Cloudflare Queue\n throw new Error(\"CloudflareAdapter.enqueue not implemented\");\n }\n\n dequeue(): Promise<WorkflowJob | null> {\n // Cloudflare Queues use push model, not pull\n throw new Error(\"CloudflareAdapter.dequeue not implemented\");\n }\n\n acknowledge(_runId: string): Promise<void> {\n // Cloudflare Queues handle acknowledgment differently\n return Promise.resolve();\n }\n\n // Lifecycle\n destroy(): Promise<void> {\n // No cleanup needed - Cloudflare manages lifecycle\n return Promise.resolve();\n }\n}\n", "/**\n * useWorkflow Hook\n *\n * React hook for tracking and interacting with workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflow } from 'veryfront/ai/workflow/react';\n *\n * function WorkflowDashboard({ runId }: { runId: string }) {\n * const {\n * run,\n * status,\n * progress,\n * currentNodes,\n * pendingApprovals,\n * cancel,\n * retry,\n * isLoading,\n * error,\n * } = useWorkflow({ runId });\n *\n * return (\n * <div>\n * <h2>Status: {status}</h2>\n * <p>Progress: {progress}%</p>\n * {pendingApprovals.length > 0 && (\n * <p>{pendingApprovals.length} approvals pending</p>\n * )}\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useRef, useState } from \"react\";\nimport type { NodeState, PendingApproval, WorkflowRun, WorkflowStatus } from \"../types.ts\";\n\n/**\n * Options for useWorkflow hook\n */\nexport interface UseWorkflowOptions {\n /** Run ID to track */\n runId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Polling interval in ms (defaults to 2000) */\n pollInterval?: number;\n\n /** Enable automatic polling */\n autoRefresh?: boolean;\n\n /** Callback when status changes */\n onStatusChange?: (status: WorkflowStatus, previousStatus: WorkflowStatus) => void;\n\n /** Callback when workflow completes */\n onComplete?: (run: WorkflowRun) => void;\n\n /** Callback when workflow fails */\n onError?: (error: Error, run?: WorkflowRun) => void;\n\n /** Callback when approval is required */\n onApprovalRequired?: (approval: PendingApproval) => void;\n}\n\n/**\n * Result from useWorkflow hook\n */\nexport interface UseWorkflowResult {\n /** The workflow run data */\n run: WorkflowRun | null;\n\n /** Current workflow status */\n status: WorkflowStatus;\n\n /** Progress percentage (0-100) */\n progress: number;\n\n /** Currently executing node IDs */\n currentNodes: string[];\n\n /** Node states by node ID */\n nodeStates: Record<string, NodeState>;\n\n /** Pending approvals */\n pendingApprovals: PendingApproval[];\n\n /** Refresh the workflow data */\n refresh: () => Promise<void>;\n\n /** Cancel the workflow */\n cancel: () => Promise<void>;\n\n /** Retry a failed workflow */\n retry: () => Promise<void>;\n\n /** Loading state */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n}\n\n/**\n * useWorkflow - Track and interact with a workflow run\n */\nexport function useWorkflow(options: UseWorkflowOptions): UseWorkflowResult {\n const {\n runId,\n apiBase = \"/api/workflows\",\n pollInterval = 2000,\n autoRefresh = true,\n onStatusChange,\n onComplete,\n onError,\n onApprovalRequired,\n } = options;\n\n const [run, setRun] = useState<WorkflowRun | null>(null);\n const [isLoading, setIsLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n\n const previousStatusRef = useRef<WorkflowStatus | null>(null);\n const previousApprovalsRef = useRef<Set<string>>(new Set());\n const abortControllerRef = useRef<AbortController | null>(null);\n\n /**\n * Fetch workflow data\n */\n const fetchRun = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}`, {\n signal: abortControllerRef.current?.signal,\n });\n\n if (!response.ok) {\n throw new Error(`Failed to fetch workflow: ${response.status}`);\n }\n\n const data = await response.json();\n const workflowRun = data as WorkflowRun;\n\n // Check for status changes\n if (previousStatusRef.current && previousStatusRef.current !== workflowRun.status) {\n onStatusChange?.(workflowRun.status, previousStatusRef.current);\n }\n previousStatusRef.current = workflowRun.status;\n\n // Check for completion\n if (workflowRun.status === \"completed\") {\n onComplete?.(workflowRun);\n }\n\n // Check for failures\n if (workflowRun.status === \"failed\") {\n const failedError = new Error(\"Workflow failed\");\n onError?.(failedError, workflowRun);\n }\n\n // Check for new approvals\n if (workflowRun.pendingApprovals) {\n for (const approval of workflowRun.pendingApprovals) {\n if (approval.status === \"pending\" && !previousApprovalsRef.current.has(approval.id)) {\n previousApprovalsRef.current.add(approval.id);\n onApprovalRequired?.(approval);\n }\n }\n }\n\n setRun(workflowRun);\n setError(null);\n } catch (err) {\n if (err instanceof Error && err.name === \"AbortError\") {\n return;\n }\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n onError?.(fetchError);\n }\n }, [runId, apiBase, onStatusChange, onComplete, onError, onApprovalRequired]);\n\n /**\n * Initial fetch and polling setup\n */\n useEffect(() => {\n abortControllerRef.current = new AbortController();\n\n const doFetch = async () => {\n setIsLoading(true);\n await fetchRun();\n setIsLoading(false);\n };\n\n doFetch();\n\n // Set up polling for active workflows\n let intervalId: ReturnType<typeof setInterval> | undefined;\n if (autoRefresh) {\n intervalId = setInterval(() => {\n // Only poll if workflow is still active\n const currentStatus = previousStatusRef.current;\n if (currentStatus && ![\"completed\", \"failed\", \"cancelled\"].includes(currentStatus)) {\n fetchRun();\n }\n }, pollInterval);\n }\n\n return () => {\n abortControllerRef.current?.abort();\n if (intervalId) {\n clearInterval(intervalId);\n }\n };\n }, [runId, autoRefresh, pollInterval, fetchRun]);\n\n /**\n * Refresh workflow data\n */\n const refresh = useCallback(async () => {\n setIsLoading(true);\n await fetchRun();\n setIsLoading(false);\n }, [fetchRun]);\n\n /**\n * Cancel the workflow\n */\n const cancel = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}/cancel`, {\n method: \"POST\",\n });\n\n if (!response.ok) {\n throw new Error(`Failed to cancel workflow: ${response.status}`);\n }\n\n await refresh();\n } catch (err) {\n const cancelError = err instanceof Error ? err : new Error(String(err));\n setError(cancelError);\n throw cancelError;\n }\n }, [runId, apiBase, refresh]);\n\n /**\n * Retry a failed workflow\n */\n const retry = useCallback(async () => {\n if (!runId) return;\n\n try {\n const response = await fetch(`${apiBase}/runs/${runId}/retry`, {\n method: \"POST\",\n });\n\n if (!response.ok) {\n throw new Error(`Failed to retry workflow: ${response.status}`);\n }\n\n await refresh();\n } catch (err) {\n const retryError = err instanceof Error ? err : new Error(String(err));\n setError(retryError);\n throw retryError;\n }\n }, [runId, apiBase, refresh]);\n\n // Calculate progress\n const calculateProgress = (): number => {\n if (!run?.nodeStates) return 0;\n\n const states = Object.values(run.nodeStates);\n if (states.length === 0) return 0;\n\n const completed = states.filter(\n (s) => s.status === \"completed\" || s.status === \"skipped\",\n ).length;\n\n return Math.round((completed / states.length) * 100);\n };\n\n return {\n run,\n status: run?.status ?? \"pending\",\n progress: calculateProgress(),\n currentNodes: run?.currentNodes ?? [],\n nodeStates: run?.nodeStates ?? {},\n pendingApprovals: run?.pendingApprovals?.filter((a) => a.status === \"pending\") ?? [],\n refresh,\n cancel,\n retry,\n isLoading,\n error,\n };\n}\n", "/**\n * useApproval Hook\n *\n * React hook for handling workflow approval interactions.\n *\n * @example\n * ```tsx\n * import { useApproval } from 'veryfront/ai/workflow/react';\n *\n * function ApprovalUI({ runId, approvalId }: Props) {\n * const {\n * approval,\n * approve,\n * reject,\n * isSubmitting,\n * error,\n * } = useApproval({ runId, approvalId });\n *\n * if (!approval) return <p>Loading...</p>;\n *\n * return (\n * <div>\n * <h3>{approval.message}</h3>\n * <p>Requested by: {approval.stepId}</p>\n * <button onClick={() => approve('Looks good!')}>\n * Approve\n * </button>\n * <button onClick={() => reject('Needs changes')}>\n * Reject\n * </button>\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useState } from \"react\";\nimport type { ApprovalDecision, PendingApproval } from \"../types.ts\";\n\n/**\n * Options for useApproval hook\n */\nexport interface UseApprovalOptions {\n /** Workflow run ID */\n runId: string;\n\n /** Approval ID */\n approvalId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Current user/approver name */\n approver?: string;\n\n /** Callback on successful approval/rejection */\n onDecision?: (decision: ApprovalDecision) => void;\n\n /** Callback on error */\n onError?: (error: Error) => void;\n}\n\n/**\n * Result from useApproval hook\n */\nexport interface UseApprovalResult {\n /** The approval data */\n approval: PendingApproval | null;\n\n /** Approve the request */\n approve: (comment?: string) => Promise<void>;\n\n /** Reject the request */\n reject: (comment?: string) => Promise<void>;\n\n /** Submit a custom decision */\n submitDecision: (decision: ApprovalDecision) => Promise<void>;\n\n /** Whether a submission is in progress */\n isSubmitting: boolean;\n\n /** Loading state for initial fetch */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n\n /** Whether the approval is still pending */\n isPending: boolean;\n\n /** Whether the approval has been resolved */\n isResolved: boolean;\n}\n\n/**\n * useApproval - Handle workflow approval interactions\n */\nexport function useApproval(options: UseApprovalOptions): UseApprovalResult {\n const {\n runId,\n approvalId,\n apiBase = \"/api/workflows\",\n approver = \"unknown\",\n onDecision,\n onError,\n } = options;\n\n const [approval, setApproval] = useState<PendingApproval | null>(null);\n const [isLoading, setIsLoading] = useState(true);\n const [isSubmitting, setIsSubmitting] = useState(false);\n const [error, setError] = useState<Error | null>(null);\n\n /**\n * Fetch approval data\n */\n useEffect(() => {\n const fetchApproval = async () => {\n try {\n const response = await fetch(\n `${apiBase}/runs/${runId}/approvals/${approvalId}`,\n );\n\n if (!response.ok) {\n throw new Error(`Failed to fetch approval: ${response.status}`);\n }\n\n const data = await response.json();\n setApproval(data as PendingApproval);\n setError(null);\n } catch (err) {\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n onError?.(fetchError);\n } finally {\n setIsLoading(false);\n }\n };\n\n if (runId && approvalId) {\n fetchApproval();\n }\n }, [runId, approvalId, apiBase, onError]);\n\n /**\n * Submit a decision\n */\n const submitDecision = useCallback(\n async (decision: ApprovalDecision) => {\n if (!runId || !approvalId) return;\n\n setIsSubmitting(true);\n setError(null);\n\n try {\n const response = await fetch(\n `${apiBase}/runs/${runId}/approvals/${approvalId}`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(decision),\n },\n );\n\n if (!response.ok) {\n throw new Error(`Failed to submit decision: ${response.status}`);\n }\n\n // Update local state\n setApproval((prev) =>\n prev\n ? {\n ...prev,\n status: decision.approved ? \"approved\" : \"rejected\",\n resolvedAt: new Date(),\n resolvedBy: decision.approver,\n comment: decision.comment,\n }\n : null\n );\n\n onDecision?.(decision);\n } catch (err) {\n const submitError = err instanceof Error ? err : new Error(String(err));\n setError(submitError);\n onError?.(submitError);\n throw submitError;\n } finally {\n setIsSubmitting(false);\n }\n },\n [runId, approvalId, apiBase, onDecision, onError],\n );\n\n /**\n * Approve the request\n */\n const approve = useCallback(\n async (comment?: string) => {\n await submitDecision({\n approved: true,\n approver,\n comment,\n });\n },\n [submitDecision, approver],\n );\n\n /**\n * Reject the request\n */\n const reject = useCallback(\n async (comment?: string) => {\n await submitDecision({\n approved: false,\n approver,\n comment,\n });\n },\n [submitDecision, approver],\n );\n\n return {\n approval,\n approve,\n reject,\n submitDecision,\n isSubmitting,\n isLoading,\n error,\n isPending: approval?.status === \"pending\",\n isResolved: approval?.status !== \"pending\",\n };\n}\n", "/**\n * useWorkflowList Hook\n *\n * React hook for listing and filtering workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflowList } from 'veryfront/ai/workflow/react';\n *\n * function WorkflowList() {\n * const {\n * runs,\n * isLoading,\n * hasMore,\n * loadMore,\n * setFilter,\n * } = useWorkflowList({\n * workflowId: 'content-pipeline',\n * status: 'running',\n * });\n *\n * return (\n * <div>\n * {runs.map(run => (\n * <div key={run.id}>\n * {run.id} - {run.status}\n * </div>\n * ))}\n * {hasMore && (\n * <button onClick={loadMore}>Load More</button>\n * )}\n * </div>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useEffect, useState } from \"react\";\nimport type { RunFilter, WorkflowRun, WorkflowStatus } from \"../types.ts\";\n\n/**\n * Options for useWorkflowList hook\n */\nexport interface UseWorkflowListOptions {\n /** Filter by workflow ID */\n workflowId?: string;\n\n /** Filter by status */\n status?: WorkflowStatus | WorkflowStatus[];\n\n /** Filter runs created after this date */\n createdAfter?: Date;\n\n /** Filter runs created before this date */\n createdBefore?: Date;\n\n /** Page size (defaults to 20) */\n pageSize?: number;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Enable automatic refresh */\n autoRefresh?: boolean;\n\n /** Refresh interval in ms (defaults to 5000) */\n refreshInterval?: number;\n}\n\n/**\n * Result from useWorkflowList hook\n */\nexport interface UseWorkflowListResult {\n /** List of workflow runs */\n runs: WorkflowRun[];\n\n /** Total count (if available) */\n totalCount?: number;\n\n /** Loading state */\n isLoading: boolean;\n\n /** Error state */\n error: Error | null;\n\n /** Whether there are more results */\n hasMore: boolean;\n\n /** Load more results */\n loadMore: () => Promise<void>;\n\n /** Refresh the list */\n refresh: () => Promise<void>;\n\n /** Update the filter */\n setFilter: (filter: Partial<UseWorkflowListOptions>) => void;\n\n /** Current filter */\n filter: RunFilter;\n}\n\n/**\n * useWorkflowList - List and filter workflow runs\n */\nexport function useWorkflowList(\n options: UseWorkflowListOptions = {},\n): UseWorkflowListResult {\n const {\n workflowId,\n status,\n createdAfter,\n createdBefore,\n pageSize = 20,\n apiBase = \"/api/workflows\",\n autoRefresh = false,\n refreshInterval = 5000,\n } = options;\n\n const [runs, setRuns] = useState<WorkflowRun[]>([]);\n const [totalCount, setTotalCount] = useState<number | undefined>();\n const [isLoading, setIsLoading] = useState(true);\n const [error, setError] = useState<Error | null>(null);\n const [hasMore, setHasMore] = useState(false);\n const [cursor, setCursor] = useState<string | undefined>();\n\n const [filter, setFilterState] = useState<RunFilter>({\n workflowId,\n status,\n createdAfter,\n createdBefore,\n limit: pageSize,\n });\n\n /**\n * Build query string from filter\n */\n const buildQueryString = useCallback(\n (filterToUse: RunFilter, cursorToUse?: string): string => {\n const params = new URLSearchParams();\n\n if (filterToUse.workflowId) {\n params.set(\"workflowId\", filterToUse.workflowId);\n }\n\n if (filterToUse.status) {\n const statuses = Array.isArray(filterToUse.status)\n ? filterToUse.status\n : [filterToUse.status];\n statuses.forEach((s) => params.append(\"status\", s));\n }\n\n if (filterToUse.createdAfter) {\n params.set(\"createdAfter\", filterToUse.createdAfter.toISOString());\n }\n\n if (filterToUse.createdBefore) {\n params.set(\"createdBefore\", filterToUse.createdBefore.toISOString());\n }\n\n if (filterToUse.limit) {\n params.set(\"limit\", String(filterToUse.limit));\n }\n\n if (cursorToUse) {\n params.set(\"cursor\", cursorToUse);\n }\n\n return params.toString();\n },\n [],\n );\n\n /**\n * Fetch runs\n */\n const fetchRuns = useCallback(\n async (append: boolean = false) => {\n try {\n const queryString = buildQueryString(filter, append ? cursor : undefined);\n const response = await fetch(`${apiBase}/runs?${queryString}`);\n\n if (!response.ok) {\n throw new Error(`Failed to fetch runs: ${response.status}`);\n }\n\n const data = await response.json();\n const fetchedRuns = (data.runs || data) as WorkflowRun[];\n const nextCursor = data.cursor;\n const total = data.totalCount;\n\n if (append) {\n setRuns((prev) => [...prev, ...fetchedRuns]);\n } else {\n setRuns(fetchedRuns);\n }\n\n setCursor(nextCursor);\n setHasMore(!!nextCursor || fetchedRuns.length === filter.limit);\n setTotalCount(total);\n setError(null);\n } catch (err) {\n const fetchError = err instanceof Error ? err : new Error(String(err));\n setError(fetchError);\n }\n },\n [apiBase, filter, cursor, buildQueryString],\n );\n\n /**\n * Initial fetch\n */\n useEffect(() => {\n const doFetch = async () => {\n setIsLoading(true);\n await fetchRuns(false);\n setIsLoading(false);\n };\n\n doFetch();\n }, [filter]); // Re-fetch when filter changes\n\n /**\n * Auto-refresh setup\n */\n useEffect(() => {\n if (!autoRefresh) return;\n\n const intervalId = setInterval(() => {\n fetchRuns(false);\n }, refreshInterval);\n\n return () => clearInterval(intervalId);\n }, [autoRefresh, refreshInterval, fetchRuns]);\n\n /**\n * Load more results\n */\n const loadMore = useCallback(async () => {\n if (!hasMore || isLoading) return;\n setIsLoading(true);\n await fetchRuns(true);\n setIsLoading(false);\n }, [hasMore, isLoading, fetchRuns]);\n\n /**\n * Refresh the list\n */\n const refresh = useCallback(async () => {\n setCursor(undefined);\n setIsLoading(true);\n await fetchRuns(false);\n setIsLoading(false);\n }, [fetchRuns]);\n\n /**\n * Update filter\n */\n const setFilter = useCallback(\n (newFilter: Partial<UseWorkflowListOptions>) => {\n setCursor(undefined); // Reset pagination\n setFilterState((prev) => ({\n ...prev,\n workflowId: newFilter.workflowId ?? prev.workflowId,\n status: newFilter.status ?? prev.status,\n createdAfter: newFilter.createdAfter ?? prev.createdAfter,\n createdBefore: newFilter.createdBefore ?? prev.createdBefore,\n limit: newFilter.pageSize ?? prev.limit,\n }));\n },\n [],\n );\n\n return {\n runs,\n totalCount,\n isLoading,\n error,\n hasMore,\n loadMore,\n refresh,\n setFilter,\n filter,\n };\n}\n", "/**\n * useWorkflowStart Hook\n *\n * React hook for starting workflow runs.\n *\n * @example\n * ```tsx\n * import { useWorkflowStart } from 'veryfront/ai/workflow/react';\n *\n * function StartWorkflowButton() {\n * const { start, isStarting, error, lastRunId } = useWorkflowStart({\n * workflowId: 'content-pipeline',\n * onStart: (runId) => {\n * console.log('Started:', runId);\n * },\n * });\n *\n * return (\n * <button\n * onClick={() => start({ topic: 'AI Safety' })}\n * disabled={isStarting}\n * >\n * {isStarting ? 'Starting...' : 'Start Workflow'}\n * </button>\n * );\n * }\n * ```\n */\n\nimport { useCallback, useState } from \"react\";\n\n/**\n * Options for useWorkflowStart hook\n */\nexport interface UseWorkflowStartOptions {\n /** Workflow ID to start */\n workflowId: string;\n\n /** API endpoint base (defaults to /api/workflows) */\n apiBase?: string;\n\n /** Callback when workflow starts successfully */\n onStart?: (runId: string) => void;\n\n /** Callback on error */\n onError?: (error: Error) => void;\n}\n\n/**\n * Result from useWorkflowStart hook\n */\nexport interface UseWorkflowStartResult<TInput = unknown> {\n /** Start a new workflow run */\n start: (input: TInput) => Promise<string>;\n\n /** Whether a start is in progress */\n isStarting: boolean;\n\n /** Last started run ID */\n lastRunId: string | null;\n\n /** Error state */\n error: Error | null;\n\n /** Reset error state */\n resetError: () => void;\n}\n\n/**\n * useWorkflowStart - Start new workflow runs\n */\nexport function useWorkflowStart<TInput = unknown>(\n options: UseWorkflowStartOptions,\n): UseWorkflowStartResult<TInput> {\n const { workflowId, apiBase = \"/api/workflows\", onStart, onError } = options;\n\n const [isStarting, setIsStarting] = useState(false);\n const [lastRunId, setLastRunId] = useState<string | null>(null);\n const [error, setError] = useState<Error | null>(null);\n\n /**\n * Start a new workflow run\n */\n const start = useCallback(\n async (input: TInput): Promise<string> => {\n setIsStarting(true);\n setError(null);\n\n try {\n const response = await fetch(`${apiBase}/${workflowId}/start`, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({ input }),\n });\n\n if (!response.ok) {\n const errorData = await response.json().catch(() => ({}));\n throw new Error(\n errorData.message || `Failed to start workflow: ${response.status}`,\n );\n }\n\n const data = await response.json();\n const runId = data.runId || data.id;\n\n setLastRunId(runId);\n onStart?.(runId);\n\n return runId;\n } catch (err) {\n const startError = err instanceof Error ? err : new Error(String(err));\n setError(startError);\n onError?.(startError);\n throw startError;\n } finally {\n setIsStarting(false);\n }\n },\n [workflowId, apiBase, onStart, onError],\n );\n\n /**\n * Reset error state\n */\n const resetError = useCallback(() => {\n setError(null);\n }, []);\n\n return {\n start,\n isStarting,\n lastRunId,\n error,\n resetError,\n };\n}\n"],
5
5
  "mappings": ";AAqcO,SAAS,cAAc,UAAmC;AAC/D,MAAI,OAAO,aAAa,UAAU;AAChC,QAAI,WAAW,GAAG;AAChB,YAAM,IAAI,MAAM,gCAAgC,QAAQ,EAAE;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,SAAS,MAAM,kCAAkC;AAC/D,MAAI,CAAC,OAAO;AACV,UAAM,IAAI,MAAM,4BAA4B,QAAQ,EAAE;AAAA,EACxD;AAEA,QAAM,QAAQ,MAAM,CAAC;AACrB,QAAM,OAAO,MAAM,CAAC;AAEpB,MAAI,CAAC,SAAS,CAAC,MAAM;AACnB,UAAM,IAAI,MAAM,4BAA4B,QAAQ,EAAE;AAAA,EACxD;AAEA,QAAM,MAAM,WAAW,KAAK;AAG5B,MAAI,OAAO,GAAG;AACZ,UAAM,IAAI,MAAM,8BAA8B,QAAQ,EAAE;AAAA,EAC1D;AAEA,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO,MAAM;AAAA,IACf,KAAK;AACH,aAAO,MAAM,KAAK;AAAA,IACpB,KAAK;AACH,aAAO,MAAM,KAAK,KAAK;AAAA,IACzB,KAAK;AACH,aAAO,MAAM,KAAK,KAAK,KAAK;AAAA,IAC9B;AACE,YAAM,IAAI,MAAM,0BAA0B,IAAI,EAAE;AAAA,EACpD;AACF;AA+CO,SAAS,WAAW,SAAiB,MAAc;AACxD,QAAM,aAAa,OAAO,WAAW,EAAE,MAAM,GAAG,EAAE;AAClD,SAAO,GAAG,MAAM,IAAI,UAAU;AAChC;;;ACzaO,SAAS,SACd,SAC2B;AAE3B,MAAI,CAAC,QAAQ,IAAI;AACf,UAAM,IAAI,MAAM,4BAA4B;AAAA,EAC9C;AAEA,MAAI,CAAC,QAAQ,OAAO;AAClB,UAAM,IAAI,MAAM,aAAa,QAAQ,EAAE,qBAAqB;AAAA,EAC9D;AAGA,QAAM,aAAkD;AAAA,IACtD,IAAI,QAAQ;AAAA,IACZ,aAAa,QAAQ;AAAA,IACrB,SAAS,QAAQ;AAAA,IACjB,aAAa,QAAQ;AAAA,IACrB,cAAc,QAAQ;AAAA,IACtB,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,YAAY,QAAQ;AAAA,EACtB;AAEA,SAAO;AAAA,IACL;AAAA,IACA,IAAI,QAAQ;AAAA,IACZ,SAAS,QAAQ;AAAA,EACnB;AACF;AAQO,SAAS,YAAY,OAAuC;AACjE,SAAO,MAAM,IAAI,CAAC,MAAM,UAAU;AAChC,QAAI,UAAU,GAAG;AACf,aAAO;AAAA,IACT;AACA,UAAM,WAAW,MAAM,QAAQ,CAAC;AAChC,WAAO;AAAA,MACL,GAAG;AAAA,MACH,WAAW,WAAW,CAAC,SAAS,EAAE,IAAI;AAAA,IACxC;AAAA,EACF,CAAC;AACH;AAqBO,SAAS,IACd,OACgB;AAChB,QAAM,SAAyB,CAAC;AAChC,QAAM,UAAU,oBAAI,IAAY;AAEhC,aAAW,CAAC,IAAI,KAAK,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC/C,QAAI;AACJ,QAAI;AAEJ,QAAI,UAAU,SAAS,eAAe,OAAO;AAE3C,eAAS,MAAM,KAAK,MAAM;AAC1B,aAAO;AAAA,QACL,GAAG,MAAM;AAAA,QACT,IAAI;AAAA,QACJ,WAAW,MAAM;AAAA,MACnB;AAAA,IACF,OAAO;AAEL,YAAM,eAAe;AACrB,eAAS,aAAa,MAAM;AAC5B,aAAO;AAAA,QACL,GAAG;AAAA,QACH,IAAI;AAAA,MACN;AAAA,IACF;AAGA,QAAI,QAAQ,IAAI,MAAM,GAAG;AACvB,YAAM,IAAI,MAAM,uCAAuC,MAAM,GAAG;AAAA,IAClE;AACA,YAAQ,IAAI,MAAM;AAElB,WAAO,KAAK,IAAI;AAAA,EAClB;AAEA,SAAO;AACT;AAKO,SAAS,UACd,SACG,cACW;AACd,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAW,CAAC,GAAI,KAAK,aAAa,CAAC,GAAI,GAAG,YAAY;AAAA,EACxD;AACF;;;ACjLO,SAAS,KAAK,IAAY,SAAoC;AAEnE,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAGA,MAAI,CAAC,QAAQ,SAAS,CAAC,QAAQ,MAAM;AACnC,UAAM,IAAI,MAAM,SAAS,EAAE,yCAAyC;AAAA,EACtE;AAEA,MAAI,QAAQ,SAAS,QAAQ,MAAM;AACjC,UAAM,IAAI,MAAM,SAAS,EAAE,0CAA0C;AAAA,EACvE;AAGA,QAAM,mBAAmB,QAAQ,cAAc,CAAC,CAAC,QAAQ;AAEzD,QAAM,SAAyB;AAAA,IAC7B,MAAM;AAAA,IACN,OAAO,QAAQ;AAAA,IACf,MAAM,QAAQ;AAAA,IACd,OAAO,QAAQ;AAAA,IACf,YAAY;AAAA,IACZ,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,UACd,IACA,OACA,SACc;AACd,SAAO,KAAK,IAAI,EAAE,GAAG,SAAS,MAAM,CAAC;AACvC;AAMO,SAAS,SACd,IACA,MACA,SACc;AACd,SAAO,KAAK,IAAI,EAAE,GAAG,SAAS,KAAK,CAAC;AACtC;;;AClEO,SAAS,SACd,IACA,OACA,UAA2B,CAAC,GACd;AAEd,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,MAAI,CAAC,SAAS,MAAM,WAAW,GAAG;AAChC,UAAM,IAAI,MAAM,kBAAkB,EAAE,qCAAqC;AAAA,EAC3E;AAIA,QAAM,gBAAgB,MAAM,IAAI,CAAC,MAAM,UAAU;AAC/C,QAAI,CAAC,KAAK,MAAM,OAAO,KAAK,OAAO,UAAU;AAC3C,YAAM,IAAI,MAAM,uBAAuB,KAAK,iBAAiB,EAAE,kBAAkB;AAAA,IACnF;AACA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,IAAI,KAAK,GAAG,WAAW,GAAG,EAAE,GAAG,IAAI,KAAK,KAAK,GAAG,EAAE,IAAI,KAAK,EAAE;AAAA,IAC/D;AAAA,EACF,CAAC;AAED,QAAM,SAA6B;AAAA,IACjC,MAAM;AAAA,IACN,OAAO;AAAA,IACP,UAAU,QAAQ,YAAY;AAAA,IAC9B,YAAY,QAAQ,cAAc;AAAA,IAClC,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;AC/CO,SAAS,IACd,IACA,SACc;AAEd,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,MAAI,CAAC,QAAQ,OAAO;AAClB,UAAM,IAAI,MAAM,aAAa,EAAE,gCAAgC;AAAA,EACjE;AAEA,MAAI,CAAC,QAAQ,WAAW;AACtB,UAAM,IAAI,MAAM,aAAa,EAAE,sCAAsC;AAAA,EACvE;AAEA,QAAM,SAAwB;AAAA,IAC5B,MAAM;AAAA,IACN,OAAO,QAAQ;AAAA,IACf,WAAW,QAAQ;AAAA,IACnB,aAAa,QAAQ;AAAA,IACrB,YAAY,QAAQ,cAAc;AAAA,IAClC,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;ACxCO,SAAS,YACd,IACA,SACc;AAEd,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,MAAI,CAAC,QAAQ,UAAU;AACrB,UAAM,IAAI,MAAM,qBAAqB,EAAE,qCAAqC;AAAA,EAC9E;AAEA,QAAM,SAAgC;AAAA,IACpC,MAAM;AAAA,IACN,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,QAAQ,QAAQ;AAAA,IAChB,YAAY,QAAQ;AAAA,IACpB,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;ACLO,SAAS,OAAO,IAAY,SAAsC;AAEvE,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,MAAI,CAAC,QAAQ,WAAW;AACtB,UAAM,IAAI,MAAM,WAAW,EAAE,4BAA4B;AAAA,EAC3D;AAEA,MAAI,CAAC,QAAQ,QAAQ,QAAQ,KAAK,WAAW,GAAG;AAC9C,UAAM,IAAI,MAAM,WAAW,EAAE,sCAAsC;AAAA,EACrE;AAGA,QAAM,kBAAkB,QAAQ,KAAK,IAAI,CAAC,UAAU;AAAA,IAClD,GAAG;AAAA,IACH,IAAI,KAAK,GAAG,WAAW,GAAG,EAAE,QAAQ,IAAI,KAAK,KAAK,GAAG,EAAE,SAAS,KAAK,EAAE;AAAA,EACzE,EAAE;AAEF,QAAM,kBAAkB,QAAQ,MAAM,IAAI,CAAC,UAAU;AAAA,IACnD,GAAG;AAAA,IACH,IAAI,KAAK,GAAG,WAAW,GAAG,EAAE,QAAQ,IAAI,KAAK,KAAK,GAAG,EAAE,SAAS,KAAK,EAAE;AAAA,EACzE,EAAE;AAEF,QAAM,SAA2B;AAAA,IAC/B,MAAM;AAAA,IACN,WAAW,QAAQ;AAAA,IACnB,MAAM;AAAA,IACN,MAAM;AAAA,IACN,YAAY,QAAQ,cAAc;AAAA,IAClC,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAMO,SAAS,KACd,IACA,WACA,OACc;AACd,SAAO,OAAO,IAAI,EAAE,WAAW,MAAM,MAAM,CAAC;AAC9C;AAMO,SAAS,OACd,IACA,WACA,OACc;AACd,SAAO,OAAO,IAAI;AAAA,IAChB,WAAW,OAAO,QAAQ,CAAE,MAAM,UAAU,GAAG;AAAA,IAC/C,MAAM;AAAA,EACR,CAAC;AACH;;;ACxEO,SAAS,gBACd,IACA,UAAkC,CAAC,GACrB;AAEd,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,QAAM,SAAyB;AAAA,IAC7B,MAAM;AAAA,IACN,UAAU;AAAA,IACV,SAAS,QAAQ,WAAW;AAAA,IAC5B,SAAS,QAAQ;AAAA,IACjB,WAAW,QAAQ;AAAA,IACnB,SAAS,QAAQ;AAAA;AAAA,IAEjB,YAAY;AAAA,IACZ,OAAO,QAAQ;AAAA,IACf,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAoCO,SAAS,aACd,IACA,SACc;AAEd,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,MAAI,CAAC,QAAQ,WAAW;AACtB,UAAM,IAAI,MAAM,iBAAiB,EAAE,6BAA6B;AAAA,EAClE;AAEA,QAAM,SAAyB;AAAA,IAC7B,MAAM;AAAA,IACN,UAAU;AAAA,IACV,WAAW,QAAQ;AAAA,IACnB,SAAS,QAAQ;AAAA;AAAA,IAEjB,YAAY;AAAA,IACZ,OAAO,QAAQ;AAAA,IACf,MAAM,QAAQ;AAAA,EAChB;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAWO,SAAS,MAAM,IAAY,UAAyC;AAEzE,MAAI,CAAC,MAAM,OAAO,OAAO,YAAY,GAAG,KAAK,MAAM,IAAI;AACrD,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACtD;AAEA,QAAM,SAAyB;AAAA,IAC7B,MAAM;AAAA,IACN,UAAU;AAAA,IACV,WAAW;AAAA,IACX,SAAS;AAAA,IACT,YAAY;AAAA;AAAA,EACd;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;;;AChLA,OAAO,cAAc;AAIrB,IAAI,UAA+B;AAInC,IAAI,OAAO,SAAS,aAAa;AAC/B,YAAU;AACZ,OAAO;AAGL,SAAO,WAAiB,EAAE,KAAK,CAAC,QAAQ;AACtC,cAAU;AAAA,EACZ,CAAC;AACH;AAGA,SAAS,aAA2B;AAClC,MAAI;AAAS,WAAO;AAGpB,SAAO;AACT;AAKO,IAAM,UAAU,CAAC,SAAyB,WAAW,EAAE,QAAQ,IAAI;AAanE,IAAM,OAAO,IAAI,UAA4B,WAAW,EAAE,KAAK,GAAG,KAAK;AAMvE,IAAM,MAAc,SAAS;;;ACS7B,SAAS,YAAY,OAAuC;AACjE,SAAO;AACT;AA4CO,SAAS,QAAQ,gBAAuC;AAC7D,QAAM,QAAQ,IAAI,MAAM,eAAe,OAAO;AAC9C,QAAM,OAAO,kBAAkB,eAAe,IAAI;AAClD,SAAO,eAAe,OAAO,WAAW;AAAA,IACtC,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,cAAc;AAAA,EAChB,CAAC;AACD,SAAO;AACT;;;ACjHO,IAAM,SAAS,OAAO,SAAS;AAC/B,IAAM,SACX,OAAQ,WAA8D,YAAY,eACjF,WAA8D,SAAS,UAAU,SAChF;AACG,IAAM,QAAQ,OAAQ,WAAiC,QAAQ;AAC/D,IAAM,eAAe,OAAO,eAAe,eAAe,YAAY,cAC3E,mBAAmB;;;ACuErB,IAAM,iBAAN,MAA2C;AAAA,EAA3C;AACE,SAAQ,KAA4B;AACpC,SAAQ,KAAsC;AAC9C,SAAQ,OAA0C;AAClD,SAAQ,cAAc;AAAA;AAAA,EAEtB,MAAc,oBAAmC;AAC/C,QAAI,KAAK;AAAa;AAEtB,QAAI,CAAC,QAAQ;AACX,YAAM,QAAQ,YAAY;AAAA,QACxB,MAAM;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,MACX,CAAC,CAAC;AAAA,IACJ;AAGA,UAAM,CAAC,UAAU,UAAU,UAAU,IAAI,MAAM,QAAQ,IAAI;AAAA,MACzD,OAAO,kBAAkB;AAAA,MACzB,OAAO,SAAS;AAAA,MAChB,OAAO,WAAW;AAAA,IACpB,CAAC;AAED,SAAK,KAAK;AACV,SAAK,KAAK;AACV,SAAK,OAAO;AACZ,SAAK,cAAc;AAAA,EACrB;AAAA,EAEA,MAAM,aAAa,MAA+B;AAChD,UAAM,KAAK,kBAAkB;AAC7B,WAAO,MAAO,KAAK,GAAI,SAAS,MAAM,EAAE,UAAU,OAAO,CAAC;AAAA,EAC5D;AAAA,EAEA,MAAM,SAAS,MAAmC;AAChD,UAAM,KAAK,kBAAkB;AAC7B,WAAO,MAAO,KAAK,GAAI,SAAS,IAAI;AAAA,EACtC;AAAA,EAEA,MAAM,cAAc,MAAc,MAA6B;AAC7D,UAAM,KAAK,kBAAkB;AAC7B,UAAM,KAAK,GAAI,UAAU,MAAM,MAAM,EAAE,UAAU,OAAO,CAAC;AAAA,EAC3D;AAAA,EAEA,MAAM,UAAU,MAAc,MAAiC;AAC7D,UAAM,KAAK,kBAAkB;AAC7B,UAAM,KAAK,GAAI,UAAU,MAAM,IAAI;AAAA,EACrC;AAAA,EAEA,MAAM,OAAO,MAAgC;AAC3C,UAAM,KAAK,kBAAkB;AAC7B,QAAI;AACF,YAAM,KAAK,GAAI,OAAO,IAAI;AAC1B,aAAO;AAAA,IACT,SAAS,OAAY;AACnB,UAAI,MAAM,SAAS,UAAU;AAC3B,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,MAAiC;AAC1C,UAAM,KAAK,kBAAkB;AAC7B,UAAM,OAAO,MAAM,KAAK,GAAI,KAAK,IAAI;AACrC,WAAO;AAAA,MACL,QAAQ,KAAK,OAAO;AAAA,MACpB,aAAa,KAAK,YAAY;AAAA,MAC9B,WAAW,KAAK,eAAe;AAAA,MAC/B,MAAM,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,MAAc,SAAkD;AAC1E,UAAM,KAAK,kBAAkB;AAC7B,UAAM,KAAK,GAAI,MAAM,MAAM,EAAE,WAAW,SAAS,aAAa,MAAM,CAAC;AAAA,EACvE;AAAA,EAEA,OAAO,QACL,MACwE;AACxE,UAAM,KAAK,kBAAkB;AAC7B,UAAM,UAAU,MAAM,KAAK,GAAI,QAAQ,MAAM,EAAE,eAAe,KAAK,CAAC;AACpE,eAAW,SAAS,SAAS;AAC3B,YAAM;AAAA,QACJ,MAAM,MAAM;AAAA,QACZ,QAAQ,MAAM,OAAO;AAAA,QACrB,aAAa,MAAM,YAAY;AAAA,MACjC;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,MAAc,SAAkD;AAC3E,UAAM,KAAK,kBAAkB;AAE7B,UAAM,KAAK,GAAI,GAAG,MAAM;AAAA,MACtB,WAAW,SAAS,aAAa;AAAA,MACjC,OAAO,SAAS,aAAa;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,YAAY,SAAgD;AAChE,UAAM,KAAK,kBAAkB;AAC7B,UAAM,UAAU,KAAK,KAAM;AAAA,MACzB,KAAK,GAAI,OAAO;AAAA,MAChB,GAAG,SAAS,UAAU,MAAM,GAAG,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,UAAU,GAAG,CAAC,CAAC;AAAA,IAC3E;AACA,UAAM,KAAK,GAAI,MAAM,SAAS,EAAE,WAAW,KAAK,CAAC;AACjD,WAAO;AAAA,EACT;AACF;AAMA,IAAM,iBAAN,MAA2C;AAAA,EACzC,MAAM,aAAa,MAA+B;AAEhD,WAAO,MAAM,KAAK,aAAa,IAAI;AAAA,EACrC;AAAA,EAEA,MAAM,SAAS,MAAmC;AAEhD,WAAO,MAAM,KAAK,SAAS,IAAI;AAAA,EACjC;AAAA,EAEA,MAAM,cAAc,MAAc,MAA6B;AAE7D,UAAM,KAAK,cAAc,MAAM,IAAI;AAAA,EACrC;AAAA,EAEA,MAAM,UAAU,MAAc,MAAiC;AAE7D,UAAM,KAAK,UAAU,MAAM,IAAI;AAAA,EACjC;AAAA,EAEA,MAAM,OAAO,MAAgC;AAC3C,QAAI;AAEF,YAAM,KAAK,KAAK,IAAI;AACpB,aAAO;AAAA,IACT,SAAS,OAAY;AAEnB,UAAI,iBAAiB,KAAK,OAAO,UAAU;AACzC,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,MAAiC;AAE1C,UAAM,OAAO,MAAM,KAAK,KAAK,IAAI;AACjC,WAAO;AAAA,MACL,QAAQ,KAAK;AAAA,MACb,aAAa,KAAK;AAAA,MAClB,WAAW,KAAK;AAAA,MAChB,MAAM,KAAK;AAAA,MACX,OAAO,KAAK;AAAA,IACd;AAAA,EACF;AAAA,EAEA,MAAM,MAAM,MAAc,SAAkD;AAE1E,UAAM,KAAK,MAAM,MAAM,EAAE,WAAW,SAAS,aAAa,MAAM,CAAC;AAAA,EACnE;AAAA,EAEA,OAAO,QACL,MACwE;AAExE,qBAAiB,SAAS,KAAK,QAAQ,IAAI,GAAG;AAC5C,YAAM;AAAA,QACJ,MAAM,MAAM;AAAA,QACZ,QAAQ,MAAM;AAAA,QACd,aAAa,MAAM;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,MAAc,SAAkD;AAE3E,UAAM,KAAK,OAAO,MAAM,EAAE,WAAW,SAAS,aAAa,MAAM,CAAC;AAAA,EACpE;AAAA,EAEA,MAAM,YAAY,SAAgD;AAEhE,WAAO,MAAM,KAAK,YAAY,EAAE,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC3D;AACF;AAgBO,SAAS,mBAA+B;AAC7C,MAAI,QAAQ;AACV,WAAO,IAAI,eAAe;AAAA,EAC5B,OAAO;AAEL,WAAO,IAAI,eAAe;AAAA,EAC5B;AACF;;;AC/QO,SAAS,eAAe,QAA2C;AACxE,SACE,OAAO,WAAW,YAClB,WAAW,QACX,UAAU,UACV,OAAQ,OAA0B,MAAM,KAAK,QAAQ;AAEzD;AAEO,SAAS,eAAe,QAA8C;AAC3E,SACE,OAAO,WAAW,YAClB,WAAW,QACX,aAAa,UACb,OAAQ,OAA6B,SAAS,QAAQ;AAE1D;;;ACnCO,SAAS,uBAAuB,MAAkC;AACvE,MAAI;AACF,QAAI,OAAO,SAAS,eAAe,eAAe,UAAU,GAAG;AAC7D,YAAM,QAAS,WAA8B,MAAM,IAAI,IAAI,IAAI;AAC/D,aAAO,UAAU,KAAK,SAAY;AAAA,IACpC;AACA,QAAI,eAAe,UAAU,GAAG;AAC9B,YAAM,QAAS,WAAiC,SAAS,IAAI,IAAI;AACjE,aAAO,UAAU,KAAK,SAAY;AAAA,IACpC;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AACA,SAAO;AACT;;;ACOA,IAAI;AAEJ,SAAS,gBAAgB,QAAQ,OAAiB;AAChD,MAAI,SAAS,mBAAmB,QAAW;AACzC,qBAAiB,gBAAgB;AAAA,EACnC;AACA,SAAO;AACT;AAEA,IAAM,gBAAN,MAAsC;AAAA,EACpC,YACU,QACA,QAAkB,gBAAgB,GAC1C;AAFQ;AACA;AAAA,EACP;AAAA,EAEH,SAAS,OAAuB;AAC9B,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,WAAqB;AACnB,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,YAAoB,MAAuB;AAC/C,QAAI,KAAK,SAAS,eAAgB;AAChC,cAAQ,MAAM,IAAI,KAAK,MAAM,YAAY,OAAO,IAAI,GAAG,IAAI;AAAA,IAC7D;AAAA,EACF;AAAA,EAEA,KAAK,YAAoB,MAAuB;AAC9C,QAAI,KAAK,SAAS,cAAe;AAC/B,cAAQ,IAAI,IAAI,KAAK,MAAM,KAAK,OAAO,IAAI,GAAG,IAAI;AAAA,IACpD;AAAA,EACF;AAAA,EAEA,KAAK,YAAoB,MAAuB;AAC9C,QAAI,KAAK,SAAS,cAAe;AAC/B,cAAQ,KAAK,IAAI,KAAK,MAAM,WAAW,OAAO,IAAI,GAAG,IAAI;AAAA,IAC3D;AAAA,EACF;AAAA,EAEA,MAAM,YAAoB,MAAuB;AAC/C,QAAI,KAAK,SAAS,eAAgB;AAChC,cAAQ,MAAM,IAAI,KAAK,MAAM,YAAY,OAAO,IAAI,GAAG,IAAI;AAAA,IAC7D;AAAA,EACF;AAAA,EAEA,MAAM,KAAQ,OAAe,IAAkC;AAC7D,UAAM,QAAQ,YAAY,IAAI;AAC9B,QAAI;AACF,YAAM,SAAS,MAAM,GAAG;AACxB,YAAM,MAAM,YAAY,IAAI;AAC5B,WAAK,MAAM,GAAG,KAAK,kBAAkB,MAAM,OAAO,QAAQ,CAAC,CAAC,IAAI;AAChE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,YAAM,MAAM,YAAY,IAAI;AAC5B,WAAK,MAAM,GAAG,KAAK,kBAAkB,MAAM,OAAO,QAAQ,CAAC,CAAC,MAAM,KAAK;AACvE,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEA,SAAS,cAAc,aAAuD;AAC5E,MAAI,CAAC;AAAa,WAAO;AACzB,QAAM,QAAQ,YAAY,YAAY;AACtC,UAAQ,OAAO;AAAA,IACb,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT;AACE,aAAO;AAAA,EACX;AACF;AAEA,IAAM,kBAAkB,MAAgB;AACtC,QAAM,WAAW,uBAAuB,WAAW;AACnD,QAAM,cAAc,cAAc,QAAQ;AAC1C,MAAI,gBAAgB;AAAW,WAAO;AAEtC,QAAM,YAAY,uBAAuB,iBAAiB;AAC1D,MAAI,cAAc,OAAO,cAAc;AAAQ,WAAO;AAEtD,SAAO;AACT;AAEA,IAAM,iBAAiB,oBAAI,IAAmB;AAE9C,SAAS,aAAa,QAA+B;AACnD,QAAMA,UAAS,IAAI,cAAc,MAAM;AACvC,iBAAe,IAAIA,OAAM;AACzB,SAAOA;AACT;AAEO,IAAM,YAAY,aAAa,KAAK;AACpC,IAAM,eAAe,aAAa,QAAQ;AAC1C,IAAM,iBAAiB,aAAa,UAAU;AAC9C,IAAM,gBAAgB,aAAa,SAAS;AAC5C,IAAM,cAAc,aAAa,OAAO;AAExC,IAAM,SAAS,aAAa,WAAW;;;AChIvC,IAAM,qBAAqB;AAE3B,IAAM,mBAAmB;AAEzB,IAAM,gBAAgB;AAEtB,IAAM,gBAAgB;AAKtB,IAAM,0BAA0B,KAAK,qBAAqB;AAG1D,IAAM,sBAAsB,KAAK,qBAAqB;AAGtD,IAAM,uBAAuB,IAAI,qBAAqB;AAGtD,IAAM,oBAAoB,KAAK,qBAAqB;AAGpD,IAAM,uBAAuB,KAAK,qBAAqB;AAEvD,IAAM,8BAA8B,gBAAgB,mBAAmB,qBAC5E;AACK,IAAM,+BAA+B,IAAI,qBAAqB;AAE9D,IAAM,iCAAiC,gBAAgB,mBAC5D,qBAAqB;AAChB,IAAM,kCAAkC,IAAI,qBAAqB;AAEjE,IAAM,8BAA8B,IAAI,gBAAgB,mBAC7D,qBAAqB;AAChB,IAAM,6BAA6B,mBAAmB,qBAAqB;AAI3E,IAAM,gCAAgC,mBAAmB;AAQzD,IAAM,aAAa,gBAAgB,mBAAmB,qBAAqB;AAM3E,IAAM,6BAA6B,KAAK,OAAO;;;ACrDtD;AAAA,EACE,MAAQ;AAAA,EACR,SAAW;AAAA,EACX,SAAW;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAAA,EACA,SAAW;AAAA,IACT,KAAK;AAAA,IACL,SAAS;AAAA,IACT,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,gBAAgB;AAAA,IAChB,UAAU;AAAA,IACV,YAAY;AAAA,IACZ,cAAc;AAAA,IACd,QAAQ;AAAA,IACR,eAAe;AAAA,IACf,cAAc;AAAA,IACd,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,IACnB,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,uBAAuB;AAAA,IACvB,WAAW;AAAA,IACX,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,IACpB,uBAAuB;AAAA,EACzB;AAAA,EACA,SAAW;AAAA,IACT,cAAc;AAAA,IACd,eAAe;AAAA,IACf,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB,uBAAuB;AAAA,IACvB,wBAAwB;AAAA,IACxB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,yBAAyB;AAAA,IACzB,0BAA0B;AAAA,IAC1B,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,IACtB,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,IACtB,4BAA4B;AAAA,IAC5B,6BAA6B;AAAA,IAC7B,sBAAsB;AAAA,IACtB,uBAAuB;AAAA,IACvB,yBAAyB;AAAA,IACzB,0BAA0B;AAAA,IAC1B,mBAAmB;AAAA,IACnB,oBAAoB;AAAA,IACpB,uBAAuB;AAAA,IACvB,wBAAwB;AAAA,IACxB,yBAAyB;AAAA,IACzB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,mBAAmB;AAAA,IACnB,oBAAoB;AAAA,IACpB,wBAAwB;AAAA,IACxB,yBAAyB;AAAA,IACzB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,qBAAqB;AAAA,IACrB,sBAAsB;AAAA,IACtB,sBAAsB;AAAA,IACtB,uBAAuB;AAAA,IACvB,6BAA6B;AAAA,IAC7B,sBAAsB;AAAA,IACtB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,uBAAuB;AAAA,IACvB,eAAe;AAAA,IACf,SAAW;AAAA,IACX,gBAAgB;AAAA,IAChB,oBAAoB;AAAA,IACpB,OAAS;AAAA,IACT,aAAa;AAAA,IACb,oBAAoB;AAAA,IACpB,oBAAoB;AAAA,IACpB,qBAAqB;AAAA,IACrB,yBAAyB;AAAA,IACzB,eAAe;AAAA,IACf,iBAAiB;AAAA,IACjB,oBAAoB;AAAA,IACpB,wBAAwB;AAAA,IACxB,kBAAkB;AAAA,IAClB,cAAc;AAAA,IACd,sBAAsB;AAAA,IACtB,oBAAoB;AAAA,IACpB,eAAe;AAAA,IACf,SAAW;AAAA,IACX,kBAAkB;AAAA,IAClB,mBAAmB;AAAA,IACnB,KAAO;AAAA,IACP,cAAc;AAAA,IACd,OAAS;AAAA,IACT,MAAQ;AAAA,IACR,OAAS;AAAA,IACT,SAAW;AAAA,IACX,IAAM;AAAA,IACN,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,IAClB,qBAAqB;AAAA,IACrB,QAAU;AAAA,IACV,gBAAgB;AAAA,IAChB,uBAAuB;AAAA,IACvB,OAAS;AAAA,IACT,IAAM;AAAA,EACR;AAAA,EACA,iBAAmB;AAAA,IACjB,KAAO;AAAA,IACP,iBAAmB;AAAA,IACnB,QAAU;AAAA,IACV,eAAiB;AAAA,IACjB,0BAA4B;AAAA,IAC5B,OAAS,CAAC;AAAA,IACV,KAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA,EACA,OAAS;AAAA,IACP,OAAS;AAAA,IACT,KAAO;AAAA,IACP,OAAS;AAAA,IACT,aAAa;AAAA,IACb,SAAW;AAAA,IACX,MAAQ;AAAA,IACR,aAAa;AAAA,IACb,oBAAoB;AAAA,IACpB,iBAAiB;AAAA,IACjB,sBAAsB;AAAA,IACtB,6BAA6B;AAAA,IAC7B,mBAAmB;AAAA,IACnB,iBAAiB;AAAA,IACjB,MAAQ;AAAA,IACR,KAAO;AAAA,IACP,WAAa;AAAA,IACb,oBAAoB;AAAA,IACpB,oBAAoB;AAAA,IACpB,yBAAyB;AAAA,IACzB,kCAAkC;AAAA,IAClC,qBAAqB;AAAA,IACrB,iBAAiB;AAAA,IACjB,kBAAkB;AAAA,EACpB;AAAA,EACA,MAAQ;AAAA,IACN,SAAW;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAW;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,IACA,OAAS;AAAA,MACP,MAAQ;AAAA,QACN;AAAA,MACF;AAAA,MACA,SAAW;AAAA,QACT;AAAA,MACF;AAAA,MACA,SAAW;AAAA,QACT;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EACA,KAAO;AAAA,IACL,SAAW;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAW;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,IACA,SAAW;AAAA,MACT,SAAW;AAAA,MACX,WAAa;AAAA,MACb,aAAe;AAAA,MACf,YAAc;AAAA,MACd,aAAe;AAAA,MACf,WAAa;AAAA,IACf;AAAA,EACF;AACF;;;ACzMA,IAAM,cAAe,WAA2D;AAChF,IAAMC,kBAAiB,CAAC,CAAC,aAAa,UAAU;AAsDzC,SAAS,OAAO,KAAiC;AACtD,MAAI,QAAS;AACX,WAAO,YAAa,GAAG;AAAA,EACzB;AACA,MAAIC,iBAAgB;AAClB,WAAO,YAAa,IAAI,GAAG;AAAA,EAC7B;AACA,SAAO;AACT;;;AC9DO,IAAM,UAAkB,OAAO,mBAAmB,MACtD,OAAO,aAAW,YAAY,WAAW,aAAW,UAAU;;;ACJ1D,IAAM,cAAc;AAgBpB,IAAM,0BAA0B,MAAM;;;ACdtC,IAAM,6BAA6B,OAAO;;;ACK1C,IAAM,eAAe,OAAO;;;ACO5B,IAAM,kBAAkB;AAKxB,IAAM,yBAAyB;AAAA;AAAA,EAEpC,KAAK,GAAG,eAAe;AAAA;AAAA,EAEvB,IAAI,GAAG,eAAe;AAAA;AAAA,EAEtB,SAAS,GAAG,eAAe;AAAA;AAAA,EAE3B,OAAO,GAAG,eAAe;AAAA;AAAA,EAEzB,MAAM,GAAG,eAAe;AAAA;AAAA,EAExB,KAAK,GAAG,eAAe;AAAA;AAAA,EAEvB,QAAQ,GAAG,eAAe;AAAA;AAAA,EAE1B,QAAQ,GAAG,eAAe;AAC5B;AAKO,IAAM,qBAAqB;AAAA;AAAA,EAEhC,aAAa,GAAG,eAAe;AAAA,EAC/B,KAAK,GAAG,eAAe;AAAA,EACvB,SAAS,GAAG,eAAe;AAAA,EAC3B,eAAe,GAAG,eAAe;AAAA,EACjC,YAAY,GAAG,eAAe;AAAA,EAC9B,YAAY,GAAG,eAAe;AAAA;AAAA,EAG9B,WAAW,GAAG,eAAe;AAAA,EAC7B,WAAW,GAAG,eAAe;AAAA,EAC7B,aAAa,GAAG,eAAe;AAAA,EAC/B,eAAe,GAAG,eAAe;AAAA,EACjC,QAAQ,GAAG,eAAe;AAAA;AAAA,EAG1B,YAAY,GAAG,eAAe;AAAA,EAC9B,cAAc,GAAG,eAAe;AAAA,EAChC,YAAY,GAAG,eAAe;AAAA,EAC9B,aAAa,GAAG,eAAe;AAAA,EAC/B,YAAY,GAAG,eAAe;AAAA,EAC9B,UAAU,GAAG,eAAe;AAAA,EAC5B,YAAY,GAAG,eAAe;AAAA,EAC9B,SAAS,GAAG,eAAe;AAAA,EAC3B,cAAc,GAAG,eAAe;AAAA,EAChC,oBAAoB,GAAG,eAAe;AAAA;AAAA,EAGtC,cAAc,GAAG,eAAe;AAAA,EAChC,mBAAmB,GAAG,eAAe;AAAA,EACrC,mBAAmB,GAAG,eAAe;AACvC;AAKO,IAAM,aAAa;AAAA;AAAA,EAExB,MAAM;AAAA;AAAA,EAEN,QAAQ;AAAA;AAAA,EAER,MAAM;AAAA;AAAA,EAEN,QAAQ;AACV;AAMO,IAAM,eAAe;AAAA;AAAA,EAE1B,MAAM;AAAA;AAAA,EAEN,OAAO;AAAA;AAAA,EAEP,IAAI;AAAA;AAAA,EAEJ,MAAM;AAAA;AAAA,EAEN,KAAK;AACP;AAGO,IAAM,oBAAoB,aAAa;AA8BvC,IAAM,uBAAuB;AAAA,EAClC,aAAa,mBAAmB;AAAA,EAChC,eAAe,mBAAmB;AACpC;;;ACnHO,IAAM,kBAAkB;AAAA,EAC7B;AAAA,EACA,WAAW,WAAW;AAAA,EACtB,YAAY,WAAW;AAAA,EACvB,UAAU,WAAW;AAAA,EACrB,YAAY,WAAW;AAAA,EACvB,aAAa,mBAAmB;AAAA,EAChC,WAAW,mBAAmB;AAAA,EAC9B,WAAW,mBAAmB;AAAA,EAC9B,eAAe,mBAAmB;AACpC;;;ACYO,IAAM,8BAAN,MAAiE;AAAA,EAAjE;AACL,SAAQ,WAAW,oBAAI,IAAwD;AAC/E,SAAQ,OAAO,oBAAI,IAAoD;AACvE,SAAQ,cAAc,oBAAI,IAAyB;AAAA;AAAA,EAEnD,kBAAkB,KAAkD;AAClE,UAAM,QAAQ,KAAK,SAAS,IAAI,GAAG;AACnC,QAAI,CAAC;AAAO,aAAO,QAAQ,QAAQ,MAAS;AAC5C,QAAI,MAAM,UAAU,KAAK,IAAI,IAAI,MAAM,QAAQ;AAC7C,WAAK,SAAS,OAAO,GAAG;AACxB,aAAO,QAAQ,QAAQ,MAAS;AAAA,IAClC;AACA,WAAO,QAAQ,QAAQ,MAAM,KAAK;AAAA,EACpC;AAAA,EAEA,kBAAkB,KAAa,UAA0B,OAA+B;AACtF,UAAM,SAAS,QAAQ,KAAK,IAAI,IAAI,QAAQ;AAC5C,SAAK,SAAS,IAAI,KAAK,EAAE,OAAO,UAAU,OAAO,CAAC;AAElD,QAAI,CAAC,KAAK,YAAY,IAAI,SAAS,MAAM,GAAG;AAC1C,WAAK,YAAY,IAAI,SAAS,QAAQ,oBAAI,IAAI,CAAC;AAAA,IACjD;AACA,SAAK,YAAY,IAAI,SAAS,MAAM,EAAG,IAAI,GAAG;AAC9C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,cAAc,MAA+C;AAC3D,UAAM,QAAQ,KAAK,KAAK,IAAI,IAAI;AAChC,QAAI,CAAC;AAAO,aAAO,QAAQ,QAAQ,MAAS;AAC5C,QAAI,MAAM,UAAU,KAAK,IAAI,IAAI,MAAM,QAAQ;AAC7C,WAAK,KAAK,OAAO,IAAI;AACrB,aAAO,QAAQ,QAAQ,MAAS;AAAA,IAClC;AACA,WAAO,QAAQ,QAAQ,MAAM,KAAK;AAAA,EACpC;AAAA,EAEA,cAAc,MAAc,MAAkB,OAA+B;AAC3E,UAAM,SAAS,QAAQ,KAAK,IAAI,IAAI,QAAQ;AAC5C,SAAK,KAAK,IAAI,MAAM,EAAE,OAAO,MAAM,OAAO,CAAC;AAC3C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,MAAM,aAAa,KAA4B;AAC7C,UAAM,WAAW,MAAM,KAAK,kBAAkB,GAAG;AACjD,SAAK,SAAS,OAAO,GAAG;AACxB,QAAI,UAAU;AACZ,WAAK,KAAK,OAAO,SAAS,QAAQ;AAClC,YAAM,aAAa,KAAK,YAAY,IAAI,SAAS,MAAM;AACvD,UAAI,YAAY;AACd,mBAAW,OAAO,GAAG;AACrB,YAAI,WAAW,SAAS,GAAG;AACzB,eAAK,YAAY,OAAO,SAAS,MAAM;AAAA,QACzC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,iBAAiB,QAAiC;AACtD,UAAM,OAAO,KAAK,YAAY,IAAI,MAAM;AACxC,QAAI,CAAC;AAAM,aAAO;AAElB,QAAI,QAAQ;AACZ,eAAW,OAAO,MAAM,KAAK,IAAI,GAAG;AAClC,YAAM,KAAK,aAAa,GAAG;AAC3B;AAAA,IACF;AACA,SAAK,YAAY,OAAO,MAAM;AAC9B,WAAO;AAAA,EACT;AAAA,EAEA,QAAuB;AACrB,SAAK,SAAS,MAAM;AACpB,SAAK,KAAK,MAAM;AAChB,SAAK,YAAY,MAAM;AACvB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,cAAgC;AAC9B,WAAO,QAAQ,QAAQ,IAAI;AAAA,EAC7B;AAAA,EAEA,WAKG;AACD,QAAI,YAAY;AAChB,QAAI;AACJ,QAAI;AAEJ,eAAW,EAAE,MAAM,KAAK,KAAK,SAAS,OAAO,GAAG;AAC9C,mBAAa,MAAM;AACnB,UAAI,CAAC,UAAU,MAAM,aAAa;AAAQ,iBAAS,MAAM;AACzD,UAAI,CAAC,UAAU,MAAM,aAAa;AAAQ,iBAAS,MAAM;AAAA,IAC3D;AAEA,WAAO,QAAQ,QAAQ;AAAA,MACrB,cAAc,KAAK,SAAS;AAAA,MAC5B;AAAA,MACA,cAAc;AAAA,MACd,cAAc;AAAA,IAChB,CAAC;AAAA,EACH;AACF;AAEA,IAAI,gBAAqC,IAAI,4BAA4B;;;AC9IlE,IAAM,mBAAN,MAA8C;AAAA,EAKnD,YAAY,SAAiB,SAAkB;AAC7C,SAAK,UAAU;AACf,SAAK,UAAU;AACf,SAAK,KAAK,iBAAiB;AAAA,EAC7B;AAAA,EAEQ,QAAQ,IAAoB;AAElC,UAAM,SAAS,GAAG,MAAM,GAAG,CAAC;AAC5B,WAAO,KAAK,KAAK,SAAS,QAAQ,EAAE;AAAA,EACtC;AAAA,EAEQ,gBAAgB,IAAoB;AAC1C,WAAO,KAAK,QAAQ,EAAE,IAAI;AAAA,EAC5B;AAAA,EAEA,MAAM,IACJ,MACA,UAA4B,CAAC,GACX;AAClB,UAAM,KAAK,QAAQ,MAAM,OAAO,WAAW;AAC3C,UAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,UAAM,WAAW,KAAK,gBAAgB,EAAE;AAExC,UAAM,KAAK,GAAG,MAAM,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAE1D,QAAI,OAAO;AAEX,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,KAAK,GAAG,cAAc,UAAU,IAAI;AAC1C,aAAO,IAAI,YAAY,EAAE,OAAO,IAAI,EAAE;AAAA,IACxC,WAAW,gBAAgB,YAAY;AACrC,YAAM,KAAK,GAAG,UAAU,UAAU,IAAI;AACtC,aAAO,KAAK;AAAA,IACd,WAAW,gBAAgB,MAAM;AAC/B,YAAM,MAAM,IAAI,WAAW,MAAM,KAAK,YAAY,CAAC;AACnD,YAAM,KAAK,GAAG,UAAU,UAAU,GAAG;AACrC,aAAO,KAAK;AAAA,IACd,WAAW,gBAAgB,gBAAgB;AAEzC,YAAM,SAAS,IAAI,WAAW,MAAM,IAAI,SAAS,IAAI,EAAE,YAAY,CAAC;AACpE,YAAM,KAAK,GAAG,UAAU,UAAU,MAAM;AACxC,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AAEA,UAAM,MAAe;AAAA,MACnB,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,UAAU,QAAQ,YAAY;AAAA,MAC9B,WAAW,oBAAI,KAAK;AAAA,MACpB,WAAW,QAAQ,MAAM,IAAI,KAAK,KAAK,IAAI,IAAI,QAAQ,MAAM,GAAI,IAAI;AAAA,MACrE,UAAU,QAAQ;AAAA,MAClB,KAAK,KAAK,UAAU,GAAG,KAAK,OAAO,IAAI,EAAE,KAAK;AAAA,IAChD;AAEA,UAAM,KAAK,GAAG,cAAc,UAAU,KAAK,UAAU,GAAG,CAAC;AAEzD,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,IAA4C;AAC1D,QAAI;AACF,YAAM,QAAQ,MAAM,KAAK,SAAS,EAAE;AACpC,UAAI,CAAC;AAAO,eAAO;AAEnB,aAAO,IAAI,eAAe;AAAA,QACxB,MAAM,YAAY;AAChB,qBAAW,QAAQ,KAAK;AACxB,qBAAW,MAAM;AAAA,QACnB;AAAA,MACF,CAAC;AAAA,IACH,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,IAAoC;AAChD,UAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,QAAI;AACF,aAAO,MAAM,KAAK,GAAG,aAAa,QAAQ;AAAA,IAC5C,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,SAAS,IAAwC;AACrD,UAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,QAAI;AACF,aAAO,MAAM,KAAK,GAAG,SAAS,QAAQ;AAAA,IACxC,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,IAA2B;AACtC,UAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,UAAM,WAAW,KAAK,gBAAgB,EAAE;AACxC,QAAI;AACF,YAAM,KAAK,GAAG,OAAO,QAAQ;AAC7B,YAAM,KAAK,GAAG,OAAO,QAAQ;AAAA,IAC/B,QAAQ;AAAA,IAER;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,IAA8B;AACzC,UAAM,WAAW,KAAK,QAAQ,EAAE;AAChC,WAAO,MAAM,KAAK,GAAG,OAAO,QAAQ;AAAA,EACtC;AAAA,EAEA,MAAM,KAAK,IAAqC;AAC9C,UAAM,WAAW,KAAK,gBAAgB,EAAE;AACxC,QAAI;AACF,YAAM,OAAO,MAAM,KAAK,GAAG,aAAa,QAAQ;AAChD,YAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,aAAO;AAAA,QACL,GAAG;AAAA,QACH,WAAW,IAAI,KAAK,KAAK,SAAS;AAAA,QAClC,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,MACzD;AAAA,IACF,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,sBAAqC;AAEzC,aAAS,IAAI,GAAG,IAAI,KAAK,KAAK;AAC5B,YAAM,SAAS,EAAE,SAAS,EAAE,EAAE,SAAS,GAAG,GAAG;AAC7C,YAAM,YAAY,KAAK,KAAK,SAAS,MAAM;AAC3C,UAAI;AACF,yBAAiB,SAAS,KAAK,GAAG,QAAQ,SAAS,GAAG;AACpD,cAAI,MAAM,UAAU,MAAM,KAAK,SAAS,YAAY,GAAG;AACrD,kBAAM,KAAK,MAAM,KAAK,QAAQ,cAAc,EAAE;AAC9C,kBAAM,UAAU,MAAM,KAAK,KAAK,EAAE;AAClC,gBAAI,WAAW,QAAQ,aAAa,QAAQ,YAAY,oBAAI,KAAK,GAAG;AAClE,0BAAO,MAAM,6CAA6C,EAAE,EAAE;AAC9D,oBAAM,KAAK,OAAO,EAAE;AAAA,YACtB;AAAA,UACF;AAAA,QACF;AAAA,MACF,SAAS,IAAI;AAEX;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;;;ACnJA,IAAI,WAAuD;AAO3D,eAAe,cAA4D;AACzE,MAAI,UAAU;AACZ,WAAO;AAAA,EACT;AAEA,MAAI;AAEF,QAAI,QAAQ;AACV,iBAAW,MAAM,OAAO,oBAA2C;AAAA,IACrE,OAAO;AAEL,iBAAW,MAAM,OAAO,oBAAoB;AAAA,IAC9C;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,IAAI;AAAA,MACR;AAAA,kBACqB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,IAC7E;AAAA,EACF;AACF;AAyBO,IAAM,gBAAN,MAA2C;AAAA,EAKhD,YAAY,QAA6B;AAJzC,SAAQ,SAA8B;AAEtC,SAAQ,cAAoC;AAG1C,SAAK,SAAS;AAEd,SAAK,cAAc,KAAK,WAAW;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aAA4B;AACxC,UAAM,EAAE,SAAS,IAAI,MAAM,YAAY;AACvC,SAAK,SAAS,IAAI,SAAS;AAAA,MACzB,QAAQ,KAAK,OAAO;AAAA,MACpB,aAAa;AAAA,QACX,aAAa,KAAK,OAAO;AAAA,QACzB,iBAAiB,KAAK,OAAO;AAAA,MAC/B;AAAA,MACA,UAAU,KAAK,OAAO;AAAA,MACtB,gBAAgB,KAAK,OAAO;AAAA,IAC9B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAA2C;AACvD,QAAI,KAAK,aAAa;AACpB,YAAM,KAAK;AACX,WAAK,cAAc;AAAA,IACrB;AACA,QAAI,CAAC,KAAK,QAAQ;AAChB,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AACA,WAAO,KAAK;AAAA,EACd;AAAA,EAEQ,OAAO,IAAoB;AACjC,WAAO,KAAK,OAAO,SAAS,GAAG,KAAK,OAAO,MAAM,GAAG,EAAE,KAAK;AAAA,EAC7D;AAAA,EAEA,MAAM,IACJ,MACA,UAA4B,CAAC,GACX;AAClB,UAAM,SAAS,MAAM,KAAK,kBAAkB;AAC5C,UAAM,EAAE,kBAAkB,qBAAqB,kBAAkB,IAAI,MAAM,YAAY;AAEvF,UAAM,KAAK,QAAQ,MAAM,OAAO,WAAW;AAC3C,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,WAAW,QAAQ,YAAY;AACrC,UAAM,YAAY,oBAAI,KAAK;AAC3B,UAAM,MAAM,QAAQ,OAAO,KAAK,OAAO;AACvC,UAAM,YAAY,MAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,MAAM,GAAI,IAAI;AAErE,QAAI;AACJ,QAAI;AAEJ,QAAI,OAAO,SAAS,UAAU;AAC5B,aAAO,IAAI,YAAY,EAAE,OAAO,IAAI;AACpC,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,YAAY;AACrC,aAAO;AACP,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,MAAM;AAC/B,aAAO;AACP,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,gBAAgB;AAIzC,aAAO;AAAA,IAGT,OAAO;AACL,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAEA,UAAM,aAAa,IAAI,iBAAiB;AAAA,MACtC,QAAQ,KAAK,OAAO;AAAA,MACpB,KAAK;AAAA,MACL,MAAM;AAAA,MACN,aAAa;AAAA,MACb,eAAe;AAAA;AAAA,MACf,SAAS;AAAA;AAAA,MACT,UAAU,QAAQ;AAAA;AAAA,IACpB,CAAC;AAED,QAAI;AACF,YAAM,OAAO,KAAK,UAAU;AAAA,IAC9B,SAAS,GAAQ;AACf,UAAI,EAAE,SAAS,kBAAkB,KAAK,OAAO,kBAAkB;AAE7D,YAAI;AACF,gBAAM,OAAO,KAAK,IAAI,oBAAoB,EAAE,QAAQ,KAAK,OAAO,OAAO,CAAC,CAAC;AAEzE,gBAAM,OAAO,KAAK,UAAU;AAAA,QAC9B,SAASC,cAAa;AAEpB,sBAAO,MAAM,iCAAiCA,YAAW;AACzD,gBAAM;AAAA,QACR;AAAA,MACF,OAAO;AACL,cAAM;AAAA,MACR;AAAA,IACF;AAIA,QAAI,OAAO,iBAAiB;AAC5B,QAAI,SAAS,GAAG;AACd,UAAI;AACF,cAAM,cAAc,IAAI,kBAAkB;AAAA,UACxC,QAAQ,KAAK,OAAO;AAAA,UACpB,KAAK;AAAA,QACP,CAAC;AACD,cAAM,aAAa,MAAM,OAAO,KAAK,WAAW;AAChD,eAAO,WAAW,iBAAiB;AAAA,MACrC,SAAS,GAAG;AACV,oBAAO,KAAK,kCAAkC,GAAG,eAAe,CAAC;AAAA,MACnE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,UAAU,QAAQ;AAAA,MAClB,KAAK,KAAK,OAAO,UAAU,GAAG,KAAK,OAAO,OAAO,IAAI,GAAG,KAAK;AAAA,IAC/D;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,IAA4C;AAC1D,UAAM,SAAS,MAAM,KAAK,kBAAkB;AAC5C,UAAM,EAAE,iBAAiB,IAAI,MAAM,YAAY;AAE/C,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,QAAI;AACF,YAAM,aAAa,IAAI,iBAAiB;AAAA,QACtC,QAAQ,KAAK,OAAO;AAAA,QACpB,KAAK;AAAA,MACP,CAAC;AACD,YAAM,WAAW,MAAM,OAAO,KAAK,UAAU;AAC7C,UAAI,SAAS,MAAM;AAEjB,eAAO,SAAS;AAAA,MAClB;AACA,aAAO;AAAA,IACT,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,SAAS,aAAa;AAChD,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,IAAoC;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU,EAAE;AACtC,QAAI,CAAC;AAAQ,aAAO;AAEpB,UAAM,WAAW,IAAI,SAAS,MAAM;AACpC,WAAO,MAAM,SAAS,KAAK;AAAA,EAC7B;AAAA,EAEA,MAAM,SAAS,IAAwC;AACrD,UAAM,SAAS,MAAM,KAAK,UAAU,EAAE;AACtC,QAAI,CAAC;AAAQ,aAAO;AAEpB,UAAM,WAAW,IAAI,SAAS,MAAM;AACpC,UAAM,SAAS,MAAM,SAAS,YAAY;AAC1C,WAAO,IAAI,WAAW,MAAM;AAAA,EAC9B;AAAA,EAEA,MAAM,OAAO,IAA2B;AACtC,UAAM,SAAS,MAAM,KAAK,kBAAkB;AAC5C,UAAM,EAAE,oBAAoB,IAAI,MAAM,YAAY;AAElD,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,gBAAgB,IAAI,oBAAoB;AAAA,MAC5C,QAAQ,KAAK,OAAO;AAAA,MACpB,KAAK;AAAA,IACP,CAAC;AACD,QAAI;AACF,YAAM,OAAO,KAAK,aAAa;AAAA,IACjC,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,SAAS,aAAa;AAEhD;AAAA,MACF;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,IAA8B;AACzC,UAAM,SAAS,MAAM,KAAK,kBAAkB;AAC5C,UAAM,EAAE,kBAAkB,IAAI,MAAM,YAAY;AAEhD,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,QAAI;AACF,YAAM,OAAO;AAAA,QACX,IAAI,kBAAkB;AAAA,UACpB,QAAQ,KAAK,OAAO;AAAA,UACpB,KAAK;AAAA,QACP,CAAC;AAAA,MACH;AACA,aAAO;AAAA,IACT,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,SAAS,YAAY;AAC/C,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,IAAqC;AAC9C,UAAM,SAAS,MAAM,KAAK,kBAAkB;AAC5C,UAAM,EAAE,kBAAkB,IAAI,MAAM,YAAY;AAEhD,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,QAAI;AACF,YAAM,aAAa,MAAM,OAAO;AAAA,QAC9B,IAAI,kBAAkB;AAAA,UACpB,QAAQ,KAAK,OAAO;AAAA,UACpB,KAAK;AAAA,QACP,CAAC;AAAA,MACH;AAEA,UAAI,CAAC,WAAW;AAAc,eAAO;AAGrC,YAAM,WAAmC,CAAC;AAC1C,YAAM,cAAc,WAAW;AAC/B,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,eAAe,CAAC,CAAC,GAAG;AACtD,YAAI,KAAK,MAAM;AACb,mBAAS,CAAC,IAAI;AAAA,QAChB;AAAA,MACF;AAEA,UAAI;AACJ,UAAI,WAAW,SAAS;AACtB,oBAAY,IAAI,KAAK,WAAW,OAAO;AAAA,MACzC,WAAW,WAAW,YAAY,WAAW,SAAS,WAAW,GAAG;AAElE,oBAAY,IAAI,KAAK,WAAW,SAAS,WAAW,CAAE;AAAA,MACxD;AASA,aAAO;AAAA,QACL,QAAQ;AAAA,QACR;AAAA,QACA,MAAM,WAAW,iBAAiB;AAAA,QAClC,UAAU,WAAW,eAAe;AAAA,QACpC,WAAW,WAAW;AAAA,QACtB;AAAA,QACA;AAAA,QACA,KAAK,KAAK,OAAO,UAAU,GAAG,KAAK,OAAO,OAAO,IAAI,GAAG,KAAK;AAAA,MAC/D;AAAA,IACF,SAAS,GAAG;AACV,UAAI,aAAa,SAAS,EAAE,SAAS,YAAY;AAC/C,eAAO;AAAA,MACT;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;;;ACzUO,IAAM,iBAAN,MAA4C;AAAA,EAIjD,YAAY,QAA8B;AAF1C,SAAQ,aAA8D;AAGpE,SAAK,SAAS;AACd,QAAI;AACF,WAAK,MAAM,KAAK,OAAO,iBAAiB;AAAA,IAC1C,QAAQ;AACN,YAAM,IAAI,MAAM,gEAAgE;AAAA,IAClF;AAAA,EACF;AAAA,EAEQ,OAAO,IAAoB;AACjC,WAAO,KAAK,OAAO,SAAS,GAAG,KAAK,OAAO,MAAM,GAAG,EAAE,KAAK;AAAA,EAC7D;AAAA,EAEA,MAAc,iBAAkC;AAC9C,QAAI,KAAK,cAAc,KAAK,WAAW,YAAY,oBAAI,KAAK,GAAG;AAC7D,aAAO,KAAK,WAAW;AAAA,IACzB;AAEA,UAAM,KAAK,KAAK,MAAM,KAAK,OAAO,iBAAiB;AACnD,UAAM,gBAAgB;AACtB,UAAM,QAAQ;AAEd,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,YAAY,KAAK,KAAK,UAAU,EAAE,KAAK,SAAS,KAAK,MAAM,CAAC,CAAC;AACnE,UAAM,cAAc,KAAK,KAAK,UAAU;AAAA,MACtC,KAAK,GAAG;AAAA,MACR;AAAA,MACA,KAAK;AAAA,MACL,KAAK,KAAK,MAAM,MAAM,GAAI,IAAI;AAAA;AAAA,MAC9B,KAAK,KAAK,MAAM,MAAM,GAAI;AAAA,IAC5B,CAAC,CAAC;AAOF,YAAQ;AAAA,MACN;AAAA,IAEF;AAGA,UAAM,YAAY;AAClB,UAAM,MAAM,GAAG,SAAS,IAAI,WAAW,IAAI,SAAS;AAKpD,UAAM,WAAW,MAAM,MAAM,eAAe;AAAA,MAC1C,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,gBAAgB;AAAA,MAClB;AAAA,MACA,MAAM,IAAI,gBAAgB;AAAA,QACxB,YAAY;AAAA,QACZ,WAAW;AAAA,MACb,CAAC;AAAA,IACH,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,QAAQ,MAAM,SAAS,KAAK;AAClC,YAAM,IAAI,MAAM,mCAAmC,SAAS,MAAM,MAAM,KAAK,EAAE;AAAA,IACjF;AAEA,UAAM,OAAO,MAAM,SAAS,KAAK;AACjC,UAAM,cAAc,KAAK;AACzB,UAAM,YAAY,KAAK;AAEvB,SAAK,aAAa;AAAA,MAChB;AAAA,MACA,WAAW,IAAI,KAAK,KAAK,IAAI,KAAK,YAAY,MAAM,GAAI;AAAA;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,IACJ,MACA,UAA4B,CAAC,GACX;AAClB,UAAM,KAAK,QAAQ,MAAM,OAAO,WAAW;AAC3C,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,WAAW,QAAQ,YAAY;AACrC,UAAM,YAAY,oBAAI,KAAK;AAC3B,UAAM,MAAM,QAAQ,OAAO,KAAK,OAAO;AACvC,UAAM,YAAY,MAAM,IAAI,KAAK,UAAU,QAAQ,IAAI,MAAM,GAAI,IAAI;AAErE,QAAI;AACJ,QAAI;AAEJ,QAAI,OAAO,SAAS,UAAU;AAC5B,aAAO,IAAI,YAAY,EAAE,OAAO,IAAI;AACpC,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,YAAY;AACrC,aAAO;AACP,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,MAAM;AAC/B,aAAO;AACP,sBAAgB,KAAK;AAAA,IACvB,WAAW,gBAAgB,gBAAgB;AACzC,aAAO;AAAA,IAGT,OAAO;AACL,YAAM,IAAI,MAAM,0CAA0C;AAAA,IAC5D;AAEA,UAAM,QAAQ,MAAM,KAAK,eAAe;AACxC,UAAM,YACJ,sDAAsD,KAAK,OAAO,MAAM,4BAA4B,GAAG;AAEzG,UAAM,UAAkC;AAAA,MACtC,iBAAiB,UAAU,KAAK;AAAA,MAChC,gBAAgB;AAAA,IAClB;AACA,QAAI,kBAAkB,QAAW;AAC/B,cAAQ,gBAAgB,IAAI,OAAO,aAAa;AAAA,IAClD;AAGA,UAAM,cAAsC,CAAC;AAC7C,QAAI,QAAQ,UAAU;AACpB,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,QAAQ,QAAQ,GAAG;AACrD,oBAAY,eAAe,EAAE,YAAY,CAAC,EAAE,IAAI;AAAA,MAClD;AAAA,IACF;AACA,QAAI,WAAW;AAEb,kBAAY,uBAAuB,IAAI,UAAU,YAAY;AAAA,IAC/D;AACA,WAAO,OAAO,SAAS,WAAW;AAElC,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,IACF,CAAC;AAED,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK;AACtC,YAAM,IAAI;AAAA,QACR,4BAA4B,SAAS,MAAM,MAAM,SAAS,UAAU,WAAW,SAAS;AAAA,MAC1F;AAAA,IACF;AAEA,UAAM,YAAY,MAAM,SAAS,KAAK;AAEtC,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,MAAM,OAAO,UAAU,IAAI;AAAA,MAC3B,UAAU,UAAU;AAAA,MACpB,WAAW,IAAI,KAAK,UAAU,WAAW;AAAA,MACzC;AAAA;AAAA,MACA,UAAU,QAAQ;AAAA,MAClB,KAAK,KAAK,OAAO,UAAU,GAAG,KAAK,OAAO,OAAO,IAAI,GAAG,KAAK,UAAU;AAAA;AAAA,IACzE;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,IAA4C;AAC1D,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,QAAQ,MAAM,KAAK,eAAe;AACxC,UAAM,cACJ,+CAA+C,KAAK,OAAO,MAAM,MAAM,GAAG;AAE5E,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,aAAa;AAAA,QACxC,SAAS;AAAA,UACP,iBAAiB,UAAU,KAAK;AAAA,QAClC;AAAA,MACF,CAAC;AAED,UAAI,SAAS,WAAW,KAAK;AAC3B,eAAO;AAAA,MACT;AACA,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,YAAY,MAAM,SAAS,KAAK;AACtC,cAAM,IAAI;AAAA,UACR,gCAAgC,SAAS,MAAM,MAAM,SAAS,UAAU,WAAW,SAAS;AAAA,QAC9F;AAAA,MACF;AACA,aAAO,SAAS;AAAA,IAClB,SAAS,GAAG;AACV,cAAQ,MAAM,wBAAwB,CAAC;AACvC,YAAM;AAAA,IACR;AAAA,EACF;AAAA,EAEA,MAAM,QAAQ,IAAoC;AAChD,UAAM,SAAS,MAAM,KAAK,UAAU,EAAE;AACtC,QAAI,CAAC;AAAQ,aAAO;AACpB,UAAM,SAAS,OAAO,UAAU;AAChC,QAAI,OAAO;AACX,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI;AAAM;AACV,cAAQ,IAAI,YAAY,EAAE,OAAO,KAAK;AAAA,IACxC;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,IAAwC;AACrD,UAAM,SAAS,MAAM,KAAK,UAAU,EAAE;AACtC,QAAI,CAAC;AAAQ,aAAO;AACpB,UAAM,SAAuB,CAAC;AAC9B,UAAM,SAAS,OAAO,UAAU;AAChC,WAAO,MAAM;AACX,YAAM,EAAE,MAAM,MAAM,IAAI,MAAM,OAAO,KAAK;AAC1C,UAAI;AAAM;AACV,aAAO,KAAK,KAAK;AAAA,IACnB;AACA,UAAM,cAAc,OAAO,OAAO,CAAC,KAAK,UAAU,MAAM,MAAM,QAAQ,CAAC;AACvE,UAAM,SAAS,IAAI,WAAW,WAAW;AACzC,QAAI,SAAS;AACb,eAAW,SAAS,QAAQ;AAC1B,aAAO,IAAI,OAAO,MAAM;AACxB,gBAAU,MAAM;AAAA,IAClB;AACA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,OAAO,IAA2B;AACtC,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,QAAQ,MAAM,KAAK,eAAe;AACxC,UAAM,YAAY,+CAA+C,KAAK,OAAO,MAAM,MAAM,GAAG;AAE5F,UAAM,WAAW,MAAM,MAAM,WAAW;AAAA,MACtC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,SAAS,WAAW,KAAK;AAE3B;AAAA,IACF;AACA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK;AACtC,YAAM,IAAI;AAAA,QACR,8BAA8B,SAAS,MAAM,MAAM,SAAS,UAAU,WAAW,SAAS;AAAA,MAC5F;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,IAA8B;AACzC,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,QAAQ,MAAM,KAAK,eAAe;AACxC,UAAM,SACJ,+CAA+C,KAAK,OAAO,MAAM,MAAM,GAAG;AAE5E,UAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,MACnC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,SAAS,WAAW,KAAK;AAC3B,aAAO;AAAA,IACT;AACA,QAAI,SAAS,WAAW,KAAK;AAC3B,aAAO;AAAA,IACT;AACA,UAAM,YAAY,MAAM,SAAS,KAAK;AACtC,UAAM,IAAI;AAAA,MACR,qCAAqC,SAAS,MAAM,MAAM,SAAS,UAAU,WAAW,SAAS;AAAA,IACnG;AAAA,EACF;AAAA,EAEA,MAAM,KAAK,IAAqC;AAC9C,UAAM,MAAM,KAAK,OAAO,EAAE;AAC1B,UAAM,QAAQ,MAAM,KAAK,eAAe;AACxC,UAAM,SAAS,+CAA+C,KAAK,OAAO,MAAM,MAAM,GAAG;AAEzF,UAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,MACnC,QAAQ;AAAA,MACR,SAAS;AAAA,QACP,iBAAiB,UAAU,KAAK;AAAA,MAClC;AAAA,IACF,CAAC;AAED,QAAI,SAAS,WAAW,KAAK;AAC3B,aAAO;AAAA,IACT;AACA,QAAI,CAAC,SAAS,IAAI;AAChB,YAAM,YAAY,MAAM,SAAS,KAAK;AACtC,YAAM,IAAI;AAAA,QACR,oCAAoC,SAAS,MAAM,MAAM,SAAS,UAAU,WAAW,SAAS;AAAA,MAClG;AAAA,IACF;AAEA,UAAM,YAAY,MAAM,SAAS,KAAK;AAGtC,UAAM,WAAmC,CAAC;AAC1C,QAAI,UAAU,UAAU;AACtB,iBAAW,CAAC,GAAG,CAAC,KAAK,OAAO,QAAQ,UAAU,QAAkC,GAAG;AACjF,YAAI,EAAE,WAAW,cAAc,GAAG;AAChC,mBAAS,EAAE,QAAQ,gBAAgB,EAAE,CAAC,IAAI;AAAA,QAC5C,OAAO;AACL,mBAAS,CAAC,IAAI;AAAA,QAChB;AAAA,MACF;AAAA,IACF;AAEA,QAAI;AACJ,QAAI,SAAS,WAAW,GAAG;AACzB,kBAAY,IAAI,KAAK,SAAS,WAAW,CAAE;AAAA,IAC7C;AAEA,WAAO;AAAA,MACL,QAAQ;AAAA,MACR;AAAA,MACA,MAAM,OAAO,UAAU,IAAI;AAAA,MAC3B,UAAU,UAAU;AAAA,MACpB,WAAW,IAAI,KAAK,UAAU,WAAW;AAAA,MACzC;AAAA;AAAA,MACA;AAAA,MACA,KAAK,UAAU;AAAA;AAAA,IACjB;AAAA,EACF;AACF;;;ACxFO,SAAS,gBACd,SAGyE;AACzE,SACE,OAAO,QAAQ,YAAY,cAC3B,OAAO,QAAQ,YAAY,cAC3B,OAAO,QAAQ,gBAAgB;AAEnC;AAMO,SAAS,eACd,SAC6F;AAC7F,SACE,OAAO,QAAQ,gBAAgB,cAC/B,OAAO,QAAQ,gBAAgB;AAEnC;AAMO,SAAS,gBACd,SAGsE;AACtE,SACE,OAAO,QAAQ,iBAAiB,cAChC,OAAO,QAAQ,oBAAoB;AAEvC;;;AClRA,IAAM,yBAAyB;AAYxB,IAAM,gBAAN,MAA+C;AAAA,EAQpD,YAAY,SAA8B,CAAC,GAAG;AAP9C,SAAQ,OAAO,oBAAI,IAAyB;AAC5C,SAAQ,cAAc,oBAAI,IAA0B;AACpD,SAAQ,YAAY,oBAAI,IAA+B;AACvD,SAAQ,QAAuB,CAAC;AAChC,SAAQ,QAAQ,oBAAI,IAAmD;AAIrE,SAAK,SAAS;AAAA,MACZ,QAAQ;AAAA,MACR,OAAO;AAAA,MACP,cAAc;AAAA,MACd,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,UAAU,KAAiC;AACzC,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,iCAAiC,IAAI,EAAE,EAAE;AAAA,IACvD;AACA,SAAK,KAAK,IAAI,IAAI,IAAI,gBAAgB,GAAG,CAAC;AAC1C,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,OAAO,OAA4C;AACjD,UAAM,MAAM,KAAK,KAAK,IAAI,KAAK;AAC/B,WAAO,QAAQ,QAAQ,MAAM,gBAAgB,GAAG,IAAI,IAAI;AAAA,EAC1D;AAAA,EAEA,UAAU,OAAe,OAA4C;AACnE,UAAM,MAAM,KAAK,KAAK,IAAI,KAAK;AAC/B,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,iCAAiC,KAAK,IAAI,KAAK;AAAA,IAC7D;AAGA,UAAM,UAAU;AAAA,MACd,GAAG;AAAA,MACH,GAAG;AAAA;AAAA,MAEH,YAAY,EAAE,GAAG,IAAI,YAAY,GAAG,MAAM,WAAW;AAAA,MACrD,SAAS,EAAE,GAAG,IAAI,SAAS,GAAG,MAAM,QAAQ;AAAA,IAC9C;AAEA,SAAK,KAAK,IAAI,OAAO,OAAO;AAC5B,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,UAAU,OAA8B;AACtC,SAAK,KAAK,OAAO,KAAK;AACtB,SAAK,YAAY,OAAO,KAAK;AAC7B,SAAK,UAAU,OAAO,KAAK;AAC3B,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,SAAS,QAA2C;AAClD,QAAI,OAAO,MAAM,KAAK,KAAK,KAAK,OAAO,CAAC;AAGxC,QAAI,OAAO,YAAY;AACrB,aAAO,KAAK,OAAO,CAAC,MAAM,EAAE,eAAe,OAAO,UAAU;AAAA,IAC9D;AAEA,QAAI,OAAO,QAAQ;AACjB,YAAM,WAAW,MAAM,QAAQ,OAAO,MAAM,IAAI,OAAO,SAAS,CAAC,OAAO,MAAM;AAC9E,aAAO,KAAK,OAAO,CAAC,MAAM,SAAS,SAAS,EAAE,MAAM,CAAC;AAAA,IACvD;AAEA,QAAI,OAAO,cAAc;AACvB,aAAO,KAAK,OAAO,CAAC,MAAM,EAAE,aAAa,OAAO,YAAa;AAAA,IAC/D;AAEA,QAAI,OAAO,eAAe;AACxB,aAAO,KAAK,OAAO,CAAC,MAAM,EAAE,aAAa,OAAO,aAAc;AAAA,IAChE;AAGA,SAAK,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAGjE,UAAM,QAAQ,OAAO,UAAU;AAC/B,UAAM,MAAM,OAAO,QAAQ,QAAQ,OAAO,QAAQ;AAClD,WAAO,KAAK,MAAM,OAAO,GAAG;AAE5B,WAAO,QAAQ,QAAQ,KAAK,IAAI,CAAC,MAAM,gBAAgB,CAAC,CAAC,CAAC;AAAA,EAC5D;AAAA,EAEA,MAAM,UAAU,QAAoC;AAClD,UAAM,OAAO,MAAM,KAAK,SAAS,EAAE,GAAG,QAAQ,OAAO,QAAW,QAAQ,OAAU,CAAC;AACnF,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAMA,eAAe,OAAe,YAAuC;AACnE,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,sCAAsC,WAAW,EAAE,YAAY,KAAK,EAAE;AAAA,IACpF;AAEA,UAAM,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,CAAC;AACjD,aAAS,KAAK,gBAAgB,UAAU,CAAC;AACzC,SAAK,YAAY,IAAI,OAAO,QAAQ;AACpC,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,oBAAoB,OAA2C;AAC7D,UAAM,cAAc,KAAK,YAAY,IAAI,KAAK;AAC9C,QAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,aAAO,QAAQ,QAAQ,IAAI;AAAA,IAC7B;AAGA,UAAM,SAAS,YAAY,YAAY,SAAS,CAAC;AACjD,WAAO,QAAQ,QAAQ,SAAS,gBAAgB,MAAM,IAAI,IAAI;AAAA,EAChE;AAAA,EAEA,eAAe,OAAsC;AACnD,UAAM,cAAc,KAAK,YAAY,IAAI,KAAK,KAAK,CAAC;AACpD,WAAO,QAAQ,QAAQ,YAAY,IAAI,CAAC,MAAM,gBAAgB,CAAC,CAAC,CAAC;AAAA,EACnE;AAAA,EAEA,iBAAiB,OAAe,cAAqC;AACnE,UAAM,cAAc,KAAK,YAAY,IAAI,KAAK;AAC9C,QAAI,CAAC,aAAa;AAChB,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,UAAM,QAAQ,YAAY,UAAU,CAAC,MAAM,EAAE,OAAO,YAAY;AAChE,QAAI,UAAU,IAAI;AAChB,kBAAY,OAAO,OAAO,CAAC;AAC3B,UAAI,KAAK,OAAO,OAAO;AACrB,gBAAQ,IAAI,uCAAuC,YAAY,EAAE;AAAA,MACnE;AAAA,IACF;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,kBAAkB,OAAe,eAAwC;AACvE,UAAM,cAAc,KAAK,YAAY,IAAI,KAAK;AAC9C,QAAI,CAAC,aAAa;AAChB,aAAO,QAAQ,QAAQ;AAAA,IACzB;AAEA,UAAM,cAAc,IAAI,IAAI,aAAa;AACzC,UAAM,WAAW,YAAY,OAAO,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,EAAE,CAAC;AACjE,SAAK,YAAY,IAAI,OAAO,QAAQ;AAEpC,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,2BAA2B,cAAc,MAAM,cAAc;AAAA,IAC3E;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA,EAMA,oBACE,OACA,UACe;AACf,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,oCAAoC,SAAS,EAAE,YAAY,KAAK,EAAE;AAAA,IAChF;AAEA,UAAM,WAAW,KAAK,UAAU,IAAI,KAAK,KAAK,CAAC;AAC/C,aAAS,KAAK,gBAAgB,QAAQ,CAAC;AACvC,SAAK,UAAU,IAAI,OAAO,QAAQ;AAClC,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,oBAAoB,OAA2C;AAC7D,UAAM,YAAY,KAAK,UAAU,IAAI,KAAK,KAAK,CAAC;AAChD,WAAO,QAAQ;AAAA,MACb,UACG,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS,EACpC,IAAI,CAAC,MAAM,gBAAgB,CAAC,CAAC;AAAA,IAClC;AAAA,EACF;AAAA,EAEA,mBACE,OACA,YACiC;AACjC,UAAM,YAAY,KAAK,UAAU,IAAI,KAAK,KAAK,CAAC;AAChD,UAAM,WAAW,UAAU,KAAK,CAAC,MAAM,EAAE,OAAO,UAAU;AAC1D,WAAO,QAAQ,QAAQ,WAAW,gBAAgB,QAAQ,IAAI,IAAI;AAAA,EACpE;AAAA,EAEA,eACE,OACA,YACA,UACe;AACf,UAAM,YAAY,KAAK,UAAU,IAAI,KAAK;AAC1C,QAAI,CAAC,WAAW;AACd,YAAM,IAAI,MAAM,+BAA+B,KAAK,EAAE;AAAA,IACxD;AAEA,UAAM,WAAW,UAAU,KAAK,CAAC,MAAM,EAAE,OAAO,UAAU;AAC1D,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,sCAAsC,UAAU,IAAI,QAAQ;AAAA,IAC1E;AAEA,aAAS,SAAS,SAAS,WAAW,aAAa;AACnD,aAAS,YAAY,SAAS;AAC9B,aAAS,YAAY,oBAAI,KAAK;AAC9B,aAAS,UAAU,SAAS;AAC5B,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,qBAAqB,QAI4C;AAC/D,UAAM,SAA8D,CAAC;AAErE,eAAW,CAAC,OAAO,SAAS,KAAK,KAAK,WAAW;AAC/C,YAAM,MAAM,KAAK,KAAK,IAAI,KAAK;AAC/B,UAAI,CAAC;AAAK;AAEV,UAAI,QAAQ,cAAc,IAAI,eAAe,OAAO,YAAY;AAC9D;AAAA,MACF;AAEA,iBAAW,YAAY,WAAW;AAEhC,YAAI,QAAQ,WAAW,aAAa,SAAS,WAAW,WAAW;AACjE;AAAA,QACF;AAEA,YAAI,QAAQ,WAAW,WAAW;AAChC,gBAAM,YAAY,SAAS,aAAa,oBAAI,KAAK,IAAI,SAAS;AAC9D,cAAI,CAAC;AAAW;AAAA,QAClB;AAGA,YACE,QAAQ,YACR,SAAS,aACT,CAAC,SAAS,UAAU,SAAS,OAAO,QAAQ,GAC5C;AACA;AAAA,QACF;AAEA,eAAO,KAAK,EAAE,OAAO,UAAU,gBAAgB,QAAQ,EAAE,CAAC;AAAA,MAC5D;AAAA,IACF;AAEA,WAAO,QAAQ,QAAQ,MAAM;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAMA,QAAQ,KAAiC;AAEvC,UAAM,UAAU,KAAK,OAAO,gBAAgB;AAC5C,QAAI,KAAK,MAAM,UAAU,SAAS;AAChC,aAAO,QAAQ;AAAA,QACb,IAAI,MAAM,oBAAoB,OAAO,0BAA0B,IAAI,KAAK,EAAE;AAAA,MAC5E;AAAA,IACF;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,mCAAmC,IAAI,KAAK,EAAE;AAAA,IAC5D;AAGA,UAAM,WAAW,IAAI,YAAY;AACjC,UAAM,cAAc,KAAK,MAAM,UAAU,CAAC,OAAO,EAAE,YAAY,KAAK,QAAQ;AAE5E,QAAI,gBAAgB,IAAI;AACtB,WAAK,MAAM,KAAK,gBAAgB,GAAG,CAAC;AAAA,IACtC,OAAO;AACL,WAAK,MAAM,OAAO,aAAa,GAAG,gBAAgB,GAAG,CAAC;AAAA,IACxD;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,UAAuC;AACrC,UAAM,MAAM,KAAK,MAAM,MAAM;AAC7B,WAAO,QAAQ,QAAQ,MAAM,gBAAgB,GAAG,IAAI,IAAI;AAAA,EAC1D;AAAA,EAEA,YAAY,OAA8B;AACxC,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,sCAAsC,KAAK,EAAE;AAAA,IAC3D;AAGA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,MAAM,KAAK,OAA8B;AAEvC,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,QAAI,KAAK;AACP,YAAM,KAAK,QAAQ;AAAA,QACjB,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,OAAO,IAAI;AAAA,QACX,WAAW,oBAAI,KAAK;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,OAAe,UAAoC;AAC7D,UAAM,WAAW,KAAK,MAAM,IAAI,KAAK;AACrC,UAAM,MAAM,KAAK,IAAI;AAGrB,QAAI,YAAY,SAAS,YAAY,KAAK;AACxC,aAAO,QAAQ,QAAQ,KAAK;AAAA,IAC9B;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,uCAAuC,KAAK,EAAE;AAAA,IAC5D;AAEA,SAAK,MAAM,IAAI,OAAO;AAAA,MACpB,QAAQ,OAAO,WAAW;AAAA,MAC1B,WAAW,MAAM;AAAA,IACnB,CAAC;AAED,WAAO,QAAQ,QAAQ,IAAI;AAAA,EAC7B;AAAA,EAEA,YAAY,OAA8B;AACxC,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,uCAAuC,KAAK,EAAE;AAAA,IAC5D;AACA,SAAK,MAAM,OAAO,KAAK;AACvB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,WAAW,OAAe,UAAoC;AAC5D,UAAM,WAAW,KAAK,MAAM,IAAI,KAAK;AACrC,UAAM,MAAM,KAAK,IAAI;AAErB,QAAI,CAAC,YAAY,SAAS,aAAa,KAAK;AAC1C,aAAO,QAAQ,QAAQ,KAAK;AAAA,IAC9B;AAEA,aAAS,YAAY,MAAM;AAC3B,WAAO,QAAQ,QAAQ,IAAI;AAAA,EAC7B;AAAA,EAEA,SAAS,OAAiC;AACxC,UAAM,WAAW,KAAK,MAAM,IAAI,KAAK;AACrC,WAAO,QAAQ,QAAQ,CAAC,CAAC,YAAY,SAAS,YAAY,KAAK,IAAI,CAAC;AAAA,EACtE;AAAA;AAAA;AAAA;AAAA,EAMA,aAA4B;AAC1B,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,6BAA6B;AAAA,IAC3C;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,cAAgC;AAC9B,WAAO,QAAQ,QAAQ,IAAI;AAAA,EAC7B;AAAA,EAEA,UAAyB;AACvB,SAAK,KAAK,MAAM;AAChB,SAAK,YAAY,MAAM;AACvB,SAAK,UAAU,MAAM;AACrB,SAAK,QAAQ,CAAC;AACd,SAAK,MAAM,MAAM;AAEjB,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,2BAA2B;AAAA,IACzC;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,WAME;AACA,QAAI,mBAAmB;AACvB,QAAI,iBAAiB;AAErB,eAAW,eAAe,KAAK,YAAY,OAAO,GAAG;AACnD,0BAAoB,YAAY;AAAA,IAClC;AAEA,eAAW,aAAa,KAAK,UAAU,OAAO,GAAG;AAC/C,wBAAkB,UAAU;AAAA,IAC9B;AAEA,WAAO;AAAA,MACL,MAAM,KAAK,KAAK;AAAA,MAChB,aAAa;AAAA,MACb,WAAW;AAAA,MACX,aAAa,KAAK,MAAM;AAAA,MACxB,OAAO,KAAK,MAAM;AAAA,IACpB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,QAAuB;AACrB,SAAK,KAAK,MAAM;AAChB,SAAK,YAAY,MAAM;AACvB,SAAK,UAAU,MAAM;AACrB,SAAK,QAAQ,CAAC;AACd,SAAK,MAAM,MAAM;AACjB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACF;;;AC/cA,IAAI,YAAiB;AACrB,IAAI,YAAiB;AASrB,eAAe,iBAA8D;AAE3E,MAAI,aAAa,WAAW;AAC1B,WAAO,EAAE,WAAW,UAAU;AAAA,EAChC;AAEA,MAAI,QAAQ;AACV,QAAI;AAEF,YAAM,eAAe,CAAC,6BAA6B,iBAAiB,EAAE,KAAK,EAAE;AAE7E,kBAAY,MAAM,OAAO;AAAA,IAC3B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR,4CACE,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CACvD;AAAA,MACF;AAAA,IACF;AAAA,EACF,OAAO;AACL,QAAI;AAGF,YAAM,kBAAkB,CAAC,MAAM,KAAK,EAAE,KAAK,EAAE;AAC7C,kBAAY,MAAM,OAAO;AAAA,IAC3B,SAAS,OAAO;AACd,YAAM,IAAI;AAAA,QACR;AAAA,SACY,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,UAAU;AAChC;AAqDA,SAAS,cAAc,KAAuC;AAC5D,QAAM,MAA8B,CAAC;AACrC,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,KAAK,GAAG;AACtC,UAAM,MAAM,IAAI,CAAC;AACjB,UAAM,QAAQ,IAAI,IAAI,CAAC;AACvB,QAAI,OAAO,UAAU,QAAW;AAC9B,UAAI,GAAG,IAAI;AAAA,IACb;AAAA,EACF;AACA,SAAO;AACT;AAKA,IAAM,mBAAN,MAA+C;AAAA,EAC7C,YAAoB,QAAa;AAAb;AAAA,EAAc;AAAA,EAElC,MAAM,KAAK,KAAa,QAA0D;AAChF,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,MAAM;AAAA,EAC3C;AAAA,EAEA,MAAM,QAAQ,KAA8C;AAC1D,WAAO,MAAM,KAAK,OAAO,QAAQ,GAAG;AAAA,EACtC;AAAA,EAEA,MAAM,KAAK,QAAgB,QAAmC;AAC5D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,MAAM;AAAA,EAC3C;AAAA,EAEA,MAAM,OAAO,MAAiC;AAC5C,WAAO,MAAM,KAAK,OAAO,IAAI,IAAI;AAAA,EACnC;AAAA,EAEA,MAAM,KAAK,QAAgB,SAAoC;AAC7D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAM,KAAK,QAAgB,SAAoC;AAC7D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO;AAAA,EAC5C;AAAA,EAEA,MAAM,SAAS,KAAgC;AAC7C,WAAO,MAAM,KAAK,OAAO,SAAS,GAAG;AAAA,EACvC;AAAA,EAEA,MAAM,MAAM,QAAgB,QAAmC;AAC7D,WAAO,MAAM,KAAK,OAAO,MAAM,KAAK,MAAM;AAAA,EAC5C;AAAA,EAEA,MAAM,OAAO,KAAa,OAAe,MAAiC;AACxE,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,OAAO,IAAI;AAAA,EAClD;AAAA,EAEA,MAAM,OAAO,KAAa,OAAuC;AAC/D,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,KAAK;AAAA,EAC5C;AAAA,EAEA,MAAM,KAAK,KAAa,OAAe,OAAuC;AAC5E,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,KAAK,KAA8B;AACvC,WAAO,MAAM,KAAK,OAAO,KAAK,GAAG;AAAA,EACnC;AAAA,EAEA,MAAM,KAAK,KAAa,IAAY,QAAiD;AACnF,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,IAAI,MAAM;AAAA,EAC/C;AAAA,EAEA,MAAM,aAAa,KAAa,OAAe,IAAY,UAAqC;AAC9F,WAAO,MAAM,KAAK,OAAO,aAAa,KAAK,OAAO,IAAI,EAAE,UAAU,SAAS,CAAC;AAAA,EAC9E;AAAA,EAEA,MAAM,WACJ,SACA,SAGA;AAKA,UAAM,SAAS,MAAM,KAAK,OAAO;AAAA,MAC/B,QAAQ;AAAA,MACR,QAAQ;AAAA,MACR,QAAQ,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,IAAI,EAAE,IAAI,EAAE;AAAA,MAC9C;AAAA,QACE,OAAO,QAAQ;AAAA,QACf,OAAO,QAAQ;AAAA,MACjB;AAAA,IACF;AAEA,QAAI,CAAC;AAAQ,aAAO,CAAC;AAIrB,WAAQ,OAAiB,IAAI,CAAC,YAAiB;AAAA,MAC7C,KAAK,OAAO;AAAA,MACZ,UAAU,OAAO,SAAS,IAAI,CAAC,SAAc;AAAA,QAC3C,IAAI,IAAI;AAAA,QACR,MAAM,IAAI;AAAA,MACZ,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,KAAK,KAAa,UAAkB,KAAgC;AACxE,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO,GAAG;AAAA,EAC/C;AAAA,EAEA,MAAM,KAAK,SAAoC;AAC7C,WAAO,MAAM,KAAK,OAAO,KAAK,OAAO;AAAA,EACvC;AAAA,EAEA,MAAM,UAAU,MAAiC;AAC/C,WAAO,MAAM,KAAK,OAAO,OAAO,IAAI;AAAA,EACtC;AAAA,EAEA,MAAM,OAAO,KAAa,SAAkC;AAC1D,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,OAAO;AAAA,EAC9C;AAAA,EAEA,MAAM,IACJ,KACA,OACA,SACwB;AACxB,UAAM,OAAY,CAAC;AACnB,QAAI,SAAS;AAAI,WAAK,KAAK;AAC3B,QAAI,SAAS;AAAI,WAAK,KAAK,QAAQ;AACnC,QAAI,SAAS;AAAI,WAAK,KAAK,QAAQ;AACnC,WAAO,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,IAAI;AAAA,EAC/C;AAAA,EAEA,MAAM,IAAI,KAAqC;AAC7C,WAAO,MAAM,KAAK,OAAO,IAAI,GAAG;AAAA,EAClC;AAAA,EAEA,MAAM,OAAsB;AAC1B,UAAM,KAAK,OAAO,KAAK;AAAA,EACzB;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,KAAK,OAAO,WAAW;AAAA,EAC/B;AACF;AAKA,IAAM,mBAAN,MAA+C;AAAA,EAC7C,YAAoB,QAAa;AAAb;AAAA,EAAc;AAAA,EAElC,MAAM,KAAK,KAAa,QAA0D;AAChF,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,MAAM;AAAA,EAC3C;AAAA,EAEA,MAAM,QAAQ,KAA8C;AAC1D,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,GAAG;AAEzC,WAAO,cAAc,GAAG;AAAA,EAC1B;AAAA,EAEA,MAAM,KAAK,QAAgB,QAAmC;AAC5D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,GAAG,MAAM;AAAA,EAC9C;AAAA,EAEA,MAAM,OAAO,MAAiC;AAC5C,WAAO,MAAM,KAAK,OAAO,IAAI,GAAG,IAAI;AAAA,EACtC;AAAA,EAEA,MAAM,KAAK,QAAgB,SAAoC;AAC7D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEA,MAAM,KAAK,QAAgB,SAAoC;AAC7D,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,GAAG,OAAO;AAAA,EAC/C;AAAA,EAEA,MAAM,SAAS,KAAgC;AAC7C,WAAO,MAAM,KAAK,OAAO,SAAS,GAAG;AAAA,EACvC;AAAA,EAEA,MAAM,MAAM,QAAgB,QAAmC;AAC7D,WAAO,MAAM,KAAK,OAAO,MAAM,KAAK,GAAG,MAAM;AAAA,EAC/C;AAAA,EAEA,MAAM,OAAO,KAAa,OAAe,MAAiC;AACxE,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,OAAO,IAAI;AAAA,EAClD;AAAA,EAEA,MAAM,OAAO,KAAa,OAAuC;AAC/D,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,KAAK;AAAA,EAC5C;AAAA,EAEA,MAAM,KAAK,KAAa,OAAe,OAAuC;AAC5E,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO,KAAK;AAAA,EACjD;AAAA,EAEA,MAAM,KAAK,KAA8B;AACvC,WAAO,MAAM,KAAK,OAAO,KAAK,GAAG;AAAA,EACnC;AAAA,EAEA,MAAM,KAAK,KAAa,IAAY,QAAiD;AACnF,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,IAAI,MAAM;AAAA,EAC/C;AAAA,EAEA,MAAM,aAAa,KAAa,OAAe,IAAY,UAAqC;AAC9F,WAAO,MAAM,KAAK,OAAO,aAAa,KAAK,OAAO,IAAI,QAAQ;AAAA,EAChE;AAAA,EAEA,MAAM,WACJ,SACA,SAGA;AACA,QAAI,QAAQ,WAAW;AAAG,aAAO,CAAC;AAGlC,UAAM,MAAM,MAAM,KAAK,OAAO;AAAA,MAC5B,QAAQ,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,KAAK,EAAE,IAAI,EAAE;AAAA,MAC/C;AAAA,IACF;AAEA,QAAI,CAAC;AAAK,aAAO,CAAC;AAElB,WAAQ,IAAc,IAAI,CAAC,YAAiB;AAAA,MAC1C,KAAK,OAAO;AAAA,MACZ,UAAU,OAAO,SAAS,IAAI,CAAC,SAAc;AAAA,QAC3C,IAAI,IAAI;AAAA,QACR,MAAM,cAAc,IAAI,WAAW;AAAA,MACrC,EAAE;AAAA,IACJ,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,KAAK,KAAa,UAAkB,KAAgC;AACxE,WAAO,MAAM,KAAK,OAAO,KAAK,KAAK,OAAO,GAAG,GAAG;AAAA,EAClD;AAAA,EAEA,MAAM,KAAK,SAAoC;AAC7C,WAAO,MAAM,KAAK,OAAO,KAAK,OAAO;AAAA,EACvC;AAAA,EAEA,MAAM,UAAU,MAAiC;AAC/C,WAAO,MAAM,KAAK,OAAO,OAAO,GAAG,IAAI;AAAA,EACzC;AAAA,EAEA,MAAM,OAAO,KAAa,SAAkC;AAC1D,WAAO,MAAM,KAAK,OAAO,OAAO,KAAK,OAAO;AAAA,EAC9C;AAAA,EAEA,MAAM,IACJ,KACA,OACA,SACwB;AACxB,WAAO,MAAM,KAAK,OAAO,IAAI,KAAK,OAAO,OAAO;AAAA,EAClD;AAAA,EAEA,MAAM,IAAI,KAAqC;AAC7C,WAAO,MAAM,KAAK,OAAO,IAAI,GAAG;AAAA,EAClC;AAAA,EAEA,MAAM,OAAsB;AAC1B,UAAM,KAAK,OAAO,MAAM;AAAA,EAC1B;AAAA,EAEA,MAAM,aAA4B;AAChC,UAAM,KAAK,OAAO,MAAM;AAAA,EAC1B;AACF;AA+BO,IAAM,eAAN,MAA8C;AAAA,EAUnD,YAAY,SAA6B,CAAC,GAAG;AAT7C,SAAQ,SAA8B;AACtC,SAAQ,oBAAkD;AAM1D,SAAQ,cAAc;AAGpB,SAAK,SAAS;AAAA,MACZ,QAAQ;AAAA,MACR,WAAW;AAAA,MACX,WAAW;AAAA,MACX,cAAc,UAAU,OAAO,WAAW,EAAE,MAAM,GAAG,CAAC,CAAC;AAAA,MACvD,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAGA,QAAI,OAAO,QAAQ;AACjB,WAAK,SAAS,OAAO;AAAA,IACvB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,OAAO,OAAuB;AACpC,WAAO,GAAG,KAAK,OAAO,MAAM,OAAO,KAAK;AAAA,EAC1C;AAAA,EAEQ,eAAe,OAAuB;AAC5C,WAAO,GAAG,KAAK,OAAO,MAAM,eAAe,KAAK;AAAA,EAClD;AAAA,EAEQ,aAAa,OAAuB;AAC1C,WAAO,GAAG,KAAK,OAAO,MAAM,aAAa,KAAK;AAAA,EAChD;AAAA,EAEQ,eAAe,QAAgC;AACrD,WAAO,GAAG,KAAK,OAAO,MAAM,gBAAgB,MAAM;AAAA,EACpD;AAAA,EAEQ,iBAAiB,YAA4B;AACnD,WAAO,GAAG,KAAK,OAAO,MAAM,kBAAkB,UAAU;AAAA,EAC1D;AAAA,EAEQ,QAAQ,OAAuB;AACrC,WAAO,GAAG,KAAK,OAAO,MAAM,QAAQ,KAAK;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAMQ,aAAa,KAA0C;AAC7D,WAAO;AAAA,MACL,IAAI,IAAI;AAAA,MACR,YAAY,IAAI;AAAA,MAChB,SAAS,IAAI,WAAW;AAAA,MACxB,QAAQ,IAAI;AAAA,MACZ,OAAO,KAAK,UAAU,IAAI,KAAK;AAAA,MAC/B,QAAQ,IAAI,WAAW,SAAY,KAAK,UAAU,IAAI,MAAM,IAAI;AAAA,MAChE,YAAY,KAAK,UAAU,IAAI,UAAU;AAAA,MACzC,cAAc,KAAK,UAAU,IAAI,YAAY;AAAA,MAC7C,SAAS,KAAK,UAAU,IAAI,OAAO;AAAA,MACnC,OAAO,IAAI,QAAQ,KAAK,UAAU,IAAI,KAAK,IAAI;AAAA,MAC/C,WAAW,IAAI,UAAU,YAAY;AAAA,MACrC,WAAW,IAAI,WAAW,YAAY,KAAK;AAAA,MAC3C,aAAa,IAAI,aAAa,YAAY,KAAK;AAAA,IACjD;AAAA,EACF;AAAA,EAEQ,eAAe,MAA2C;AAEhE,QAAI,CAAC,KAAK,IAAI;AACZ,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AACA,QAAI,CAAC,KAAK,YAAY;AACpB,YAAM,IAAI,MAAM,sCAAsC,KAAK,EAAE,+BAA+B;AAAA,IAC9F;AAGA,UAAM,gBAAkC;AAAA,MACtC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,UAAM,SAAS,KAAK;AACpB,QAAI,KAAK,UAAU,CAAC,cAAc,SAAS,MAAM,GAAG;AAClD,YAAM,IAAI;AAAA,QACR,sCAAsC,KAAK,EAAE,sBAAsB,KAAK,MAAM,uBACxD,cAAc,KAAK,IAAI,CAAC;AAAA,MAChD;AAAA,IACF;AAGA,UAAM,gBAAgB,CAAI,OAAe,OAA2B,iBAAuB;AACzF,UAAI,CAAC;AAAO,eAAO;AACnB,UAAI;AACF,eAAO,KAAK,MAAM,KAAK;AAAA,MACzB,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR,sCAAsC,KAAK,EAAE,uBAAuB,KAAK,qBAC7D,aAAa,QAAQ,EAAE,UAAU,OAAO,CAAC,CAAC;AAAA,QACxD;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,IAAI,KAAK;AAAA,MACT,YAAY,KAAK;AAAA,MACjB,SAAS,KAAK,WAAW;AAAA,MACzB,QAAQ,UAAU;AAAA,MAClB,OAAO,cAAc,SAAS,KAAK,OAAO,MAAS;AAAA,MACnD,QAAQ,cAAc,UAAU,KAAK,QAAQ,MAAS;AAAA,MACtD,YAAY,cAAc,cAAc,KAAK,YAAY,CAAC,CAAC;AAAA,MAC3D,cAAc,cAAc,gBAAgB,KAAK,cAAc,CAAC,CAAC;AAAA,MACjE,SAAS,cAAc,WAAW,KAAK,SAAS,EAAE,OAAO,OAAU,CAAC;AAAA,MACpE,aAAa,CAAC;AAAA;AAAA,MACd,kBAAkB,CAAC;AAAA;AAAA,MACnB,OAAO,cAAc,SAAS,KAAK,OAAO,MAAS;AAAA,MACnD,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI,oBAAI,KAAK;AAAA,MAChE,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,MACvD,aAAa,KAAK,cAAc,IAAI,KAAK,KAAK,WAAW,IAAI;AAAA,IAC/D;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMQ,eAAsC;AAE5C,QAAI,KAAK,QAAQ;AACf,aAAO,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACpC;AAIA,QAAI,CAAC,KAAK,mBAAmB;AAC3B,WAAK,oBAAoB,KAAK,iBAAiB;AAAA,IACjD;AAEA,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAA0C;AAEtD,UAAM,EAAE,WAAW,WAAW,WAAW,UAAU,IAAI,MAAM,eAAe;AAE5E,QAAI,WAAW;AACb,YAAM,SAAS,UAAU,aAAa;AAAA,QACpC,KAAK,KAAK,OAAO;AAAA,QACjB,QAAQ;AAAA,UACN,MAAM,KAAK,OAAO;AAAA,UAClB,MAAM,KAAK,OAAO;AAAA,QACpB;AAAA,MACF,CAAC;AACD,YAAM,OAAO,QAAQ;AACrB,WAAK,SAAS,IAAI,iBAAiB,MAAM;AAAA,IAC3C,WAAW,WAAW;AACpB,YAAM,SAAS,MAAM,UAAU,QAAQ;AAAA,QACrC,UAAU,KAAK,OAAO;AAAA,QACtB,MAAM,KAAK,OAAO;AAAA,MACpB,CAAC;AACD,WAAK,SAAS,IAAI,iBAAiB,MAAM;AAAA,IAC3C,OAAO;AACL,YAAM,IAAI,MAAM,6CAA6C;AAAA,IAC/D;AAEA,UAAM,WAAW,KAAK,OAAO,YAAY;AACzC,UAAM,OAAO,KAAK,OAAO,QAAQ;AAEjC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,gCAAgC,QAAQ,IAAI,IAAI,EAAE;AAAA,IACjE;AAGA,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,aAA4B;AAChC,QAAI,KAAK;AAAa;AAEtB,UAAM,SAAS,MAAM,KAAK,aAAa;AAGvC,QAAI;AACF,YAAM,OAAO;AAAA,QACX,KAAK,OAAO;AAAA,QACZ,KAAK,OAAO;AAAA,QACZ;AAAA,QACA;AAAA,MACF;AACA,UAAI,KAAK,OAAO,OAAO;AACrB,oBAAO,MAAM,0CAA0C,KAAK,OAAO,SAAS,EAAE;AAAA,MAChF;AAAA,IACF,SAAS,GAAG;AACV,YAAM,MAAM,OAAO,aAAa,QAAQ,EAAE,UAAU,CAAC;AACrD,UAAI,CAAC,IAAI,SAAS,WAAW,GAAG;AAC9B,oBAAO,MAAM,iDAAiD,CAAC;AAAA,MACjE;AAAA,IACF;AAEA,SAAK,cAAc;AAAA,EACrB;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,UAAU,KAAiC;AAC/C,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,gCAAgC,IAAI,EAAE,EAAE;AAAA,IACvD;AAGA,UAAM,OAAO,KAAK,KAAK,OAAO,IAAI,EAAE,GAAG,KAAK,aAAa,GAAG,CAAC;AAG7D,UAAM,OAAO,KAAK,KAAK,eAAe,IAAI,MAAM,GAAG,IAAI,EAAE;AACzD,UAAM,OAAO,KAAK,KAAK,iBAAiB,IAAI,UAAU,GAAG,IAAI,EAAE;AAG/D,QAAI,KAAK,OAAO,QAAQ;AACtB,YAAM,OAAO,OAAO,KAAK,OAAO,IAAI,EAAE,GAAG,KAAK,OAAO,MAAM;AAAA,IAC7D;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,OAA4C;AACvD,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,OAAO,MAAM,OAAO,QAAQ,KAAK,OAAO,KAAK,CAAC;AAEpD,QAAI,CAAC,QAAQ,OAAO,KAAK,IAAI,EAAE,WAAW,GAAG;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,eAAe,IAAI;AAGpC,QAAI,mBAAmB,MAAM,KAAK,oBAAoB,KAAK;AAE3D,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,OAAe,OAA4C;AACzE,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACtD;AAGA,UAAM,aAAa,MAAM,KAAK,OAAO,KAAK;AAC1C,UAAM,YAAY,YAAY;AAG9B,UAAM,SAAiC,CAAC;AAExC,QAAI,MAAM,WAAW;AAAW,aAAO,SAAS,MAAM;AACtD,QAAI,MAAM,WAAW;AAAW,aAAO,SAAS,KAAK,UAAU,MAAM,MAAM;AAC3E,QAAI,MAAM,eAAe;AAAW,aAAO,aAAa,KAAK,UAAU,MAAM,UAAU;AACvF,QAAI,MAAM,iBAAiB;AAAW,aAAO,eAAe,KAAK,UAAU,MAAM,YAAY;AAC7F,QAAI,MAAM,YAAY;AAAW,aAAO,UAAU,KAAK,UAAU,MAAM,OAAO;AAC9E,QAAI,MAAM,UAAU;AAAW,aAAO,QAAQ,KAAK,UAAU,MAAM,KAAK;AACxE,QAAI,MAAM,cAAc;AAAW,aAAO,YAAY,MAAM,UAAU,YAAY;AAClF,QAAI,MAAM,gBAAgB;AAAW,aAAO,cAAc,MAAM,YAAY,YAAY;AAExF,QAAI,OAAO,KAAK,MAAM,EAAE,SAAS,GAAG;AAClC,YAAM,OAAO,KAAK,KAAK,OAAO,KAAK,GAAG,MAAM;AAAA,IAC9C;AAGA,QAAI,MAAM,UAAU,aAAa,MAAM,WAAW,WAAW;AAC3D,YAAM,OAAO,KAAK,KAAK,eAAe,SAAS,GAAG,KAAK;AACvD,YAAM,OAAO,KAAK,KAAK,eAAe,MAAM,MAAM,GAAG,KAAK;AAAA,IAC5D;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,OAA8B;AAC5C,UAAM,SAAS,MAAM,KAAK,aAAa;AAGvC,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,QAAI,CAAC;AAAK;AAGV,UAAM,OAAO;AAAA,MACX,KAAK,OAAO,KAAK;AAAA,MACjB,KAAK,eAAe,KAAK;AAAA,MACzB,KAAK,aAAa,KAAK;AAAA,IACzB;AAGA,UAAM,OAAO,KAAK,KAAK,eAAe,IAAI,MAAM,GAAG,KAAK;AACxD,UAAM,OAAO,KAAK,KAAK,iBAAiB,IAAI,UAAU,GAAG,KAAK;AAAA,EAChE;AAAA,EAEA,MAAM,SAAS,QAA2C;AACxD,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,QAAI,SAAmB,CAAC;AAGxB,QAAI,OAAO,YAAY;AACrB,eAAS,MAAM,OAAO,SAAS,KAAK,iBAAiB,OAAO,UAAU,CAAC;AAAA,IACzE,WAAW,OAAO,QAAQ;AACxB,YAAM,WAAW,MAAM,QAAQ,OAAO,MAAM,IAAI,OAAO,SAAS,CAAC,OAAO,MAAM;AAC9E,iBAAW,UAAU,UAAU;AAC7B,cAAM,MAAM,MAAM,OAAO,SAAS,KAAK,eAAe,MAAM,CAAC;AAC7D,eAAO,KAAK,GAAG,GAAG;AAAA,MACpB;AAEA,eAAS,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC;AAAA,IAC9B,OAAO;AAEL,YAAM,OAAO,MAAM,OAAO,KAAK,GAAG,KAAK,OAAO,MAAM,OAAO;AAC3D,eAAS,KAAK,IAAI,CAAC,MAAM,EAAE,QAAQ,GAAG,KAAK,OAAO,MAAM,QAAQ,EAAE,CAAC;AAAA,IACrE;AAGA,UAAM,OAAsB,CAAC;AAC7B,eAAW,SAAS,QAAQ;AAC1B,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,UAAI,CAAC;AAAK;AAGV,UAAI,OAAO,QAAQ;AACjB,cAAM,WAAW,MAAM,QAAQ,OAAO,MAAM,IAAI,OAAO,SAAS,CAAC,OAAO,MAAM;AAC9E,YAAI,CAAC,SAAS,SAAS,IAAI,MAAM;AAAG;AAAA,MACtC;AAEA,UAAI,OAAO,gBAAgB,IAAI,YAAY,OAAO;AAAc;AAChE,UAAI,OAAO,iBAAiB,IAAI,YAAY,OAAO;AAAe;AAElE,WAAK,KAAK,GAAG;AAAA,IACf;AAGA,SAAK,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAGjE,QAAI,SAAS;AACb,QAAI,OAAO,QAAQ;AACjB,eAAS,OAAO,MAAM,OAAO,MAAM;AAAA,IACrC;AACA,QAAI,OAAO,OAAO;AAChB,eAAS,OAAO,MAAM,GAAG,OAAO,KAAK;AAAA,IACvC;AAEA,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,UAAU,QAAoC;AAClD,UAAM,OAAO,MAAM,KAAK,SAAS,EAAE,GAAG,QAAQ,OAAO,QAAW,QAAQ,OAAU,CAAC;AACnF,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAe,OAAe,YAAuC;AACzE,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,qCAAqC,WAAW,EAAE,EAAE;AAAA,IACnE;AAEA,UAAM,aAAa,KAAK,UAAU;AAAA,MAChC,GAAG;AAAA,MACH,WAAW,WAAW,UAAU,YAAY;AAAA,IAC9C,CAAC;AAED,UAAM,OAAO,MAAM,KAAK,eAAe,KAAK,GAAG,UAAU;AAAA,EAC3D;AAAA,EAEA,MAAM,oBAAoB,OAA2C;AACnE,UAAM,SAAS,MAAM,KAAK,aAAa;AAGvC,UAAM,MAAM,MAAM,OAAO,OAAO,KAAK,eAAe,KAAK,GAAG,EAAE;AAC9D,QAAI,CAAC;AAAK,aAAO;AAEjB,UAAM,OAAO,KAAK,MAAM,GAAG;AAC3B,WAAO;AAAA,MACL,GAAG;AAAA,MACH,WAAW,IAAI,KAAK,KAAK,SAAS;AAAA,IACpC;AAAA,EACF;AAAA,EAEA,MAAM,eAAe,OAAsC;AACzD,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,UAAM,UAAU,MAAM,OAAO,OAAO,KAAK,eAAe,KAAK,GAAG,GAAG,EAAE;AAErE,WAAO,QAAQ,IAAI,CAAC,QAAQ;AAC1B,YAAM,OAAO,KAAK,MAAM,GAAG;AAC3B,aAAO;AAAA,QACL,GAAG;AAAA,QACH,WAAW,IAAI,KAAK,KAAK,SAAS;AAAA,MACpC;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,oBAAoB,OAAe,UAA0C;AACjF,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,mCAAmC,SAAS,EAAE,EAAE;AAAA,IAC/D;AAEA,UAAM,aAAa,KAAK,UAAU;AAAA,MAChC,GAAG;AAAA,MACH,aAAa,SAAS,YAAY,YAAY;AAAA,MAC9C,WAAW,SAAS,WAAW,YAAY;AAAA,MAC3C,WAAW,SAAS,WAAW,YAAY;AAAA,IAC7C,CAAC;AAED,UAAM,OAAO,MAAM,KAAK,aAAa,KAAK,GAAG,UAAU;AAAA,EACzD;AAAA,EAEA,MAAM,oBAAoB,OAA2C;AACnE,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,UAAM,UAAU,MAAM,OAAO,OAAO,KAAK,aAAa,KAAK,GAAG,GAAG,EAAE;AAEnE,WAAO,QACJ,IAAI,CAAC,QAAQ;AACZ,YAAM,OAAO,KAAK,MAAM,GAAG;AAC3B,aAAO;AAAA,QACL,GAAG;AAAA,QACH,aAAa,IAAI,KAAK,KAAK,WAAW;AAAA,QACtC,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,QACvD,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,MACzD;AAAA,IACF,CAAC,EACA,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS;AAAA,EACzC;AAAA,EAEA,MAAM,mBAAmB,OAAe,YAAqD;AAC3F,UAAM,YAAY,MAAM,KAAK,oBAAoB,KAAK;AACtD,WAAO,UAAU,KAAK,CAAC,MAAM,EAAE,OAAO,UAAU,KAAK;AAAA,EACvD;AAAA,EAEA,MAAM,eACJ,OACA,YACA,UACe;AACf,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,MAAM,KAAK,aAAa,KAAK;AAGnC,UAAM,UAAU,MAAM,OAAO,OAAO,KAAK,GAAG,EAAE;AAG9C,QAAI,cAAc;AAClB,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAMC,QAAO,KAAK,MAAM,QAAQ,CAAC,CAAE;AACnC,UAAIA,MAAK,OAAO,YAAY;AAC1B,sBAAc;AACd;AAAA,MACF;AAAA,IACF;AAEA,QAAI,gBAAgB,IAAI;AACtB,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAGA,UAAM,OAAO,KAAK,MAAM,QAAQ,WAAW,CAAE;AAC7C,SAAK,SAAS,SAAS,WAAW,aAAa;AAC/C,SAAK,YAAY,SAAS;AAC1B,SAAK,aAAY,oBAAI,KAAK,GAAE,YAAY;AACxC,SAAK,UAAU,SAAS;AAIxB,UAAM,OAAO,KAAK,KAAK,aAAa,KAAK,UAAU,IAAI,CAAC;AAAA,EAC1D;AAAA,EAEA,MAAM,qBAAqB,QAIsC;AAC/D,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,SAA8D,CAAC;AAGrE,UAAM,OAAO,MAAM,OAAO,KAAK,GAAG,KAAK,OAAO,MAAM,aAAa;AAEjE,eAAW,OAAO,MAAM;AACtB,YAAM,QAAQ,IAAI,QAAQ,GAAG,KAAK,OAAO,MAAM,cAAc,EAAE;AAG/D,UAAI,QAAQ,YAAY;AACtB,cAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,YAAI,CAAC,OAAO,IAAI,eAAe,OAAO;AAAY;AAAA,MACpD;AAEA,YAAM,UAAU,MAAM,OAAO,OAAO,KAAK,GAAG,EAAE;AAE9C,iBAAW,OAAO,SAAS;AACzB,cAAM,OAAO,KAAK,MAAM,GAAG;AAC3B,cAAM,WAA4B;AAAA,UAChC,GAAG;AAAA,UACH,aAAa,IAAI,KAAK,KAAK,WAAW;AAAA,UACtC,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,UACvD,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI;AAAA,QACzD;AAGA,YAAI,QAAQ,WAAW,aAAa,SAAS,WAAW;AAAW;AACnE,YAAI,QAAQ,WAAW,WAAW;AAChC,gBAAM,YAAY,SAAS,aAAa,oBAAI,KAAK,IAAI,SAAS;AAC9D,cAAI,CAAC;AAAW;AAAA,QAClB;AAGA,YACE,QAAQ,YAAY,SAAS,aAAa,CAAC,SAAS,UAAU,SAAS,OAAO,QAAQ,GACtF;AACA;AAAA,QACF;AAEA,eAAO,KAAK,EAAE,OAAO,SAAS,CAAC;AAAA,MACjC;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QAAQ,KAAiC;AAC7C,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,kCAAkC,IAAI,KAAK,EAAE;AAAA,IAC5D;AAEA,UAAM,OAAO,KAAK,KAAK,OAAO,WAAW,KAAK;AAAA,MAC5C,OAAO,IAAI;AAAA,MACX,YAAY,IAAI;AAAA,MAChB,OAAO,KAAK,UAAU,IAAI,KAAK;AAAA,MAC/B,UAAU,OAAO,IAAI,YAAY,CAAC;AAAA,MAClC,WAAW,IAAI,UAAU,YAAY;AAAA,IACvC,CAAC;AAAA,EACH;AAAA,EAEA,MAAM,UAAuC;AAC3C,UAAM,SAAS,MAAM,KAAK,aAAa;AAEvC,UAAM,UAAU,MAAM,OAAO;AAAA,MAC3B,CAAC,EAAE,KAAK,KAAK,OAAO,WAAW,KAAK,IAAI,CAAC;AAAA,MACzC;AAAA,QACE,OAAO,KAAK,OAAO;AAAA,QACnB,UAAU,KAAK,OAAO;AAAA,QACtB,OAAO;AAAA;AAAA,QACP,OAAO;AAAA,MACT;AAAA,IACF;AAEA,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG;AACpC,aAAO;AAAA,IACT;AAGA,UAAM,SAAS,QAAQ,CAAC;AACxB,QAAI,CAAC,UAAU,CAAC,OAAO,YAAY,OAAO,SAAS,WAAW,GAAG;AAC/D,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,OAAO,SAAS,CAAC;AACjC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,QAAQ;AAErB,WAAO;AAAA,MACL,OAAO,KAAK,SAAS;AAAA,MACrB,YAAY,KAAK,cAAc;AAAA,MAC/B,OAAO,KAAK,QAAQ,KAAK,MAAM,KAAK,KAAK,IAAI;AAAA,MAC7C,UAAU,KAAK,WAAW,SAAS,KAAK,QAAQ,IAAI;AAAA,MACpD,WAAW,KAAK,YAAY,IAAI,KAAK,KAAK,SAAS,IAAI,oBAAI,KAAK;AAAA,IAClE;AAAA,EACF;AAAA,EAEA,YAAY,OAA8B;AAGxC,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,gCAAgC,KAAK,EAAE;AAAA,IACtD;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,MAAM,KAAK,OAA8B;AAEvC,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,QAAI,KAAK;AACP,YAAM,KAAK,QAAQ;AAAA,QACjB,OAAO,IAAI;AAAA,QACX,YAAY,IAAI;AAAA,QAChB,OAAO,IAAI;AAAA,QACX,WAAW,oBAAI,KAAK;AAAA,MACtB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,YAAY,OAAe,UAAoC;AACnE,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,YAAY,OAAO,WAAW;AAEpC,UAAM,SAAS,MAAM,OAAO,IAAI,KAAK,QAAQ,KAAK,GAAG,WAAW;AAAA,MAC9D,IAAI;AAAA,MACJ,IAAI;AAAA,IACN,CAAC;AAED,WAAO,WAAW;AAAA,EACpB;AAAA,EAEA,MAAM,YAAY,OAA8B;AAC9C,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,OAAO,IAAI,KAAK,QAAQ,KAAK,CAAC;AAAA,EACtC;AAAA,EAEA,MAAM,WAAW,OAAe,UAAoC;AAClE,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,SAAS,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,CAAC;AAEtD,QAAI,WAAW;AAAG,aAAO;AAEzB,UAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,GAAG,KAAK,KAAK,WAAW,GAAI,CAAC;AACnE,WAAO;AAAA,EACT;AAAA,EAEA,MAAM,SAAS,OAAiC;AAC9C,UAAM,SAAS,MAAM,KAAK,aAAa;AACvC,UAAM,SAAS,MAAM,OAAO,OAAO,KAAK,QAAQ,KAAK,CAAC;AACtD,WAAO,SAAS;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,cAAgC;AACpC,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,aAAa;AACvC,YAAM,OAAO,IAAI,oBAAoB,MAAM,EAAE,IAAI,EAAE,CAAC;AACpD,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,UAAyB;AACvB,QAAI,KAAK,QAAQ;AACf,UAAI,OAAO,KAAK,OAAO,SAAS,YAAY;AAC1C,aAAK,OAAO,KAAK;AAAA,MACnB,WAAW,OAAO,KAAK,OAAO,eAAe,YAAY;AACvD,aAAK,OAAO,WAAW;AAAA,MACzB;AACA,WAAK,SAAS;AAAA,IAChB;AACA,SAAK,oBAAoB;AACzB,SAAK,cAAc;AAEnB,QAAI,KAAK,OAAO,OAAO;AACrB,kBAAO,MAAM,0BAA0B;AAAA,IACzC;AACA,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACF;;;ACzhCO,IAAM,cAAN,MAAkB;AAAA,EAGvB,YAAY,QAA2B;AACrC,SAAK,SAAS;AAAA,MACZ,gBAAgB;AAAA,MAChB,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACJ,OACA,KACA,eAC6B;AAC7B,UAAM,UAAU,EAAE,GAAG,IAAI,QAAQ;AACjC,UAAM,aAAa,EAAE,GAAG,IAAI,WAAW;AAGvC,UAAM,EAAE,SAAS,UAAU,QAAQ,IAAI,KAAK,WAAW,KAAK;AAI5D,eAAW,CAAC,QAAQ,KAAK,KAAK,OAAO,QAAQ,UAAU,GAAG;AACxD,UAAI,MAAM,WAAW,eAAe,MAAM,WAAW,WAAW;AAE9D,mBAAW,aAAa,QAAQ,IAAI,MAAM,KAAK,CAAC,GAAG;AACjD,gBAAM,gBAAgB,SAAS,IAAI,SAAS,KAAK;AACjD,cAAI,gBAAgB,GAAG;AACrB,qBAAS,IAAI,WAAW,gBAAgB,CAAC;AAAA,UAC3C;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAGA,QAAI,KAAK,SAAS,OAAO,OAAO,GAAG;AACjC,aAAO;AAAA,QACL,WAAW;AAAA,QACX,SAAS;AAAA,QACT;AAAA,QACA;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF;AAGA,QAAI;AACJ,QAAI,eAAe;AAEjB,cAAQ,CAAC,aAAa;AAAA,IACxB,OAAO;AAEL,cAAQ,KAAK,cAAc,UAAU,UAAU;AAAA,IACjD;AAGA,WAAO,MAAM,SAAS,GAAG;AAEvB,YAAM,QAAQ,MAAM,MAAM,GAAG,KAAK,OAAO,cAAc;AACvD,cAAQ,MAAM,MAAM,KAAK,OAAO,cAAc;AAE9C,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,MAAM,IAAI,CAAC,WAAW,KAAK,YAAY,QAAQ,IAAI,MAAM,GAAI,SAAS,UAAU,CAAC;AAAA,MACnF;AAGA,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,cAAM,SAAS,MAAM,CAAC;AACtB,cAAM,SAAS,QAAQ,CAAC;AAExB,YAAI,OAAO,WAAW,aAAa;AACjC,gBAAM,aAAa,OAAO;AAG1B,qBAAW,MAAM,IAAI,WAAW;AAChC,iBAAO,OAAO,SAAS,WAAW,cAAc;AAGhD,cAAI,WAAW,SAAS;AACtB,mBAAO;AAAA,cACL,WAAW;AAAA,cACX,SAAS;AAAA,cACT,aAAa;AAAA,cACb;AAAA,cACA;AAAA,YACF;AAAA,UACF;AAGA,gBAAM,aAAa,QAAQ,IAAI,MAAM;AACrC,cACE,WAAW,MAAM,WAAW,eAC5B,cAAc,KAAK,iBAAiB,UAAU,GAC9C;AACA,kBAAM,KAAK,WAAW,IAAI,IAAI,QAAQ,SAAS,UAAU;AAAA,UAC3D;AAGA,cAAI,WAAW,MAAM,WAAW,UAAU;AACxC,mBAAO;AAAA,cACL,WAAW;AAAA,cACX,SAAS;AAAA,cACT;AAAA,cACA;AAAA,cACA,OAAO,SAAS,MAAM,aAAa,WAAW,MAAM,SAAS,eAAe;AAAA,YAC9E;AAAA,UACF;AAGA,cAAI,WAAW,MAAM,WAAW,eAAe,WAAW,MAAM,WAAW,WAAW;AACpF,uBAAW,aAAa,QAAQ,IAAI,MAAM,KAAK,CAAC,GAAG;AACjD,oBAAM,YAAY,SAAS,IAAI,SAAS,IAAK;AAC7C,uBAAS,IAAI,WAAW,SAAS;AAAA,YACnC;AAAA,UACF;AAAA,QACF,OAAO;AAEL,gBAAM,QAAQ,OAAO,kBAAkB,QACnC,OAAO,OAAO,UACd,OAAO,OAAO,MAAM;AAExB,qBAAW,MAAM,IAAI;AAAA,YACnB;AAAA,YACA,QAAQ;AAAA,YACR;AAAA,YACA,UAAU,WAAW,MAAM,GAAG,WAAW,KAAK;AAAA,YAC9C,aAAa,oBAAI,KAAK;AAAA,UACxB;AAGA,iBAAO;AAAA,YACL,WAAW;AAAA,YACX,SAAS;AAAA,YACT;AAAA,YACA;AAAA,YACA,OAAO,SAAS,MAAM,aAAa,KAAK;AAAA,UAC1C;AAAA,QACF;AAAA,MACF;AAGA,YAAM,WAAW,KAAK,cAAc,UAAU,UAAU;AACxD,cAAQ,CAAC,GAAG,OAAO,GAAG,QAAQ;AAAA,IAChC;AAEA,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,MACA,SACA,YAKC;AACD,UAAM,SAAS,KAAK;AAGpB,UAAM,gBAAgB,WAAW,MAAM;AACvC,QAAI,eAAe,WAAW,aAAa;AACzC,aAAO,EAAE,OAAO,eAAe,gBAAgB,CAAC,GAAG,SAAS,MAAM;AAAA,IACpE;AAEA,SAAK,OAAO,cAAc,MAAM;AAGhC,QAAI,KAAK,OAAO,QAAS,MAAM,KAAK,OAAO,KAAK,OAAO,GAAI;AACzD,YAAM,QAAQ,KAAK,OAAO,aAAa,mBAAmB,MAAM;AAChE,WAAK,OAAO,iBAAiB,QAAQ,KAAK;AAC1C,aAAO,EAAE,OAAO,gBAAgB,CAAC,GAAG,SAAS,MAAM;AAAA,IACrD;AAGA,UAAM,SAAS,KAAK;AAEpB,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AACH,eAAO,MAAM,KAAK,gBAAgB,MAAM,OAAO;AAAA,MAEjD,KAAK;AACH,eAAO,MAAM,KAAK,oBAAoB,MAAM,QAAQ,SAAS,UAAU;AAAA,MAEzE,KAAK;AACH,eAAO,MAAM,KAAK,eAAe,MAAM,QAAyB,SAAS,UAAU;AAAA,MAErF,KAAK;AACH,eAAO,MAAM,KAAK,kBAAkB,MAAM,QAA4B,SAAS,UAAU;AAAA,MAE3F,KAAK;AACH,eAAO,MAAM,KAAK,gBAAgB,MAAM,QAA0B,OAAO;AAAA,MAE3E,KAAK;AACH,eAAO,MAAM,KAAK;AAAA,UAChB;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AAAA,MAEF;AACE,cAAM,IAAI;AAAA,UACR,sBAAuB,OAA8B,IAAI,eAAe,KAAK,EAAE;AAAA,QAEjF;AAAA,IACJ;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,eACZ,MACA,QACA,SACA,YAKC;AACD,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,QAAQ,OAAO,OAAO,UAAU,aAAa,MAAM,OAAO,MAAM,OAAO,IAAI,OAAO;AAExF,QAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,YAAM,IAAI,MAAM,aAAa,KAAK,EAAE,0BAA0B;AAAA,IAChE;AAEA,QAAI,MAAM,WAAW,GAAG;AAEtB,YAAM,QAAmB;AAAA,QACvB,QAAQ,KAAK;AAAA,QACb,QAAQ;AAAA,QACR,QAAQ,CAAC;AAAA,QACT,SAAS;AAAA,QACT,WAAW,IAAI,KAAK,SAAS;AAAA,QAC7B,aAAa,oBAAI,KAAK;AAAA,MACxB;AACA,aAAO,EAAE,OAAO,gBAAgB,EAAE,CAAC,KAAK,EAAE,GAAG,CAAC,EAAE,GAAG,SAAS,MAAM;AAAA,IACpE;AAGA,UAAM,aAA6B,CAAC;AAGpC,UAAM,gBAAgB,CAAC,MAAoC,CAAC,CAAC,EAAE;AAQ/D,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,UAAU,GAAG,KAAK,EAAE,IAAI,CAAC;AAE/B,UAAI;AAEJ,UAAI,cAAc,OAAO,SAAS,GAAG;AAEnC,oBAAY;AAAA,UACV,IAAI;AAAA,UACJ,QAAQ;AAAA,YACN,MAAM;AAAA,YACN,UAAU,OAAO;AAAA,YACjB,OAAO;AAAA,YACP,OAAO,OAAO;AAAA,YACd,YAAY;AAAA;AAAA,UACd;AAAA,QACF;AAAA,MACF,OAAO;AAGL,cAAM,kBAAkB,EAAE,GAAG,OAAO,UAAU,OAAO;AAGrD,YAAI,gBAAgB,SAAS,QAAQ;AACnC,0BAAgB,QAAQ;AAAA,QAC1B;AAEA,oBAAY;AAAA,UACV,IAAI;AAAA,UACJ,QAAQ;AAAA,QACV;AAAA,MACF;AAEA,iBAAW,KAAK,SAAS;AAAA,IAC3B;AAKA,UAAM,sBAAsB,KAAK,OAAO;AACxC,QAAI,OAAO,aAAa;AACtB,WAAK,OAAO,iBAAiB,OAAO;AAAA,IACtC;AAEA,QAAI;AACF,YAAM,SAAS,MAAM,KAAK,QAAQ,YAAY;AAAA,QAC5C,IAAI,GAAG,KAAK,EAAE;AAAA,QACd,YAAY;AAAA,QACZ,QAAQ;AAAA,QACR,OAAO,QAAQ;AAAA,QACf,YAAY,CAAC;AAAA;AAAA,QACb,cAAc,CAAC;AAAA,QACf,SAAS,EAAE,GAAG,QAAQ;AAAA;AAAA,QACtB,aAAa,CAAC;AAAA,QACd,kBAAkB,CAAC;AAAA,QACnB,WAAW,oBAAI,KAAK;AAAA,MACtB,CAAC;AAGD,aAAO,OAAO,YAAY,OAAO,UAAU;AAG3C,YAAM,UAAU,WAAW,IAAI,CAAC,UAAU;AACxC,cAAM,aAAa,OAAO,WAAW,MAAM,EAAE;AAC7C,eAAO,YAAY;AAAA,MACrB,CAAC;AAED,YAAM,QAAmB;AAAA,QACvB,QAAQ,KAAK;AAAA,QACb,QAAQ,OAAO,YAAY,cAAe,OAAO,UAAU,YAAY;AAAA,QACvE,QAAQ;AAAA,QACR,OAAO,OAAO;AAAA,QACd,SAAS;AAAA,QACT,WAAW,IAAI,KAAK,SAAS;AAAA,QAC7B,aAAa,OAAO,YAAY,oBAAI,KAAK,IAAI;AAAA,MAC/C;AAEA,WAAK,OAAO,iBAAiB,KAAK,IAAI,KAAK;AAE3C,aAAO;AAAA,QACL;AAAA,QACA,gBAAgB,OAAO,YAAY,EAAE,CAAC,KAAK,EAAE,GAAG,QAAQ,IAAI,CAAC;AAAA,QAC7D,SAAS,OAAO;AAAA,MAClB;AAAA,IACF,UAAE;AAEA,WAAK,OAAO,iBAAiB;AAAA,IAC/B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,uBACZ,MACA,QACA,SACA,aAKC;AACD,UAAM,YAAY,KAAK,IAAI;AAG3B,QAAI;AACJ,QAAI,OAAO,OAAO,aAAa,UAAU;AACvC,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF,OAAO;AACL,oBAAc,OAAO;AAAA,IACvB;AAGA,UAAM,QAAQ,OAAO,OAAO,UAAU,aAClC,MAAM,OAAO,MAAM,OAAO,IACzB,OAAO,SAAS,QAAQ;AAG7B,QAAI;AACJ,QAAI,OAAO,YAAY,UAAU,YAAY;AAC3C,cAAQ,YAAY,MAAM;AAAA,QACxB;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH,OAAO;AACL,cAAQ,YAAY;AAAA,IACtB;AAIA,UAAM,WAAW,GAAG,KAAK,EAAE,QAAQ,WAAW,CAAC;AAG/C,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO;AAAA,MACvC,IAAI;AAAA,MACJ,YAAY,YAAY;AAAA,MACxB,QAAQ;AAAA,MACR;AAAA,MACA,YAAY,CAAC;AAAA,MACb,cAAc,CAAC;AAAA,MACf,SAAS;AAAA,QACP;AAAA;AAAA;AAAA;AAAA,MAGF;AAAA,MACA,aAAa,CAAC;AAAA,MACd,kBAAkB,CAAC;AAAA,MACnB,WAAW,oBAAI,KAAK;AAAA,IACtB,CAAC;AAGD,QAAI,cAAc,OAAO;AAGzB,QAAI,OAAO,aAAa,OAAO,QAAQ;AACrC,oBAAc,OAAO,OAAO,OAAO,OAAO;AAAA,IAC5C;AAEA,UAAM,QAAmB;AAAA,MACvB,QAAQ,KAAK;AAAA,MACb,QAAQ,OAAO,YAAY,cAAe,OAAO,UAAU,YAAY;AAAA,MACvE,QAAQ;AAAA,MACR,OAAO,OAAO;AAAA,MACd,SAAS;AAAA,MACT,WAAW,IAAI,KAAK,SAAS;AAAA,MAC7B,aAAa,OAAO,YAAY,oBAAI,KAAK,IAAI;AAAA,IAC/C;AAEA,SAAK,OAAO,iBAAiB,KAAK,IAAI,KAAK;AAE3C,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,OAAO,YAAY,EAAE,CAAC,KAAK,EAAE,GAAG,YAAY,IAAI,CAAC;AAAA,MACjE,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,MACA,SAKC;AACD,UAAM,SAAS,MAAM,KAAK,OAAO,aAAa,QAAQ,MAAM,OAAO;AAEnE,UAAM,QAAmB;AAAA,MACvB,QAAQ,KAAK;AAAA,MACb,QAAQ,OAAO,UAAU,cAAc;AAAA,MACvC,OAAO,QAAQ;AAAA,MACf,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS;AAAA,MACT,WAAW,IAAI,KAAK,KAAK,IAAI,IAAI,OAAO,aAAa;AAAA,MACrD,aAAa,oBAAI,KAAK;AAAA,IACxB;AAEA,SAAK,OAAO,iBAAiB,KAAK,IAAI,KAAK;AAE3C,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,OAAO,UAAU,EAAE,CAAC,KAAK,EAAE,GAAG,OAAO,OAAO,IAAI,CAAC;AAAA,MACjE,SAAS;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBACZ,MACA,QACA,SACA,YAKC;AACD,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,SAAS,MAAM,KAAK,QAAQ,OAAO,OAAO;AAAA,MAC9C,IAAI,GAAG,KAAK,EAAE;AAAA,MACd,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,OAAO,QAAQ;AAAA,MACf,YAAY,CAAC;AAAA,MACb,cAAc,CAAC;AAAA,MACf;AAAA,MACA,aAAa,CAAC;AAAA,MACd,kBAAkB,CAAC;AAAA,MACnB,WAAW,oBAAI,KAAK;AAAA,IACtB,CAAC;AAGD,WAAO,OAAO,YAAY,OAAO,UAAU;AAE3C,UAAM,QAAmB;AAAA,MACvB,QAAQ,KAAK;AAAA,MACb,QAAQ,OAAO,YAAY,cAAe,OAAO,UAAU,YAAY;AAAA,MACvE,QAAQ,OAAO;AAAA,MACf,OAAO,OAAO;AAAA,MACd,SAAS;AAAA,MACT,WAAW,IAAI,KAAK,SAAS;AAAA,MAC7B,aAAa,OAAO,YAAY,oBAAI,KAAK,IAAI;AAAA,IAC/C;AAEA,SAAK,OAAO,iBAAiB,KAAK,IAAI,KAAK;AAE3C,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,OAAO;AAAA,MACvB,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBACZ,MACA,QACA,SACA,YAKC;AACD,UAAM,YAAY,KAAK,IAAI;AAG3B,UAAM,kBAAkB,MAAM,OAAO,UAAU,OAAO;AAGtD,UAAM,cAAc,kBAAkB,OAAO,OAAQ,OAAO,QAAQ,CAAC;AAErE,QAAI,YAAY,WAAW,GAAG;AAE5B,YAAMC,SAAmB;AAAA,QACvB,QAAQ,KAAK;AAAA,QACb,QAAQ;AAAA,QACR,QAAQ,EAAE,QAAQ,kBAAkB,SAAS,QAAQ,SAAS,KAAK;AAAA,QACnE,SAAS;AAAA,QACT,WAAW,IAAI,KAAK,SAAS;AAAA,QAC7B,aAAa,oBAAI,KAAK;AAAA,MACxB;AAEA,aAAO,EAAE,OAAAA,QAAO,gBAAgB,CAAC,GAAG,SAAS,MAAM;AAAA,IACrD;AAGA,UAAM,SAAS,MAAM,KAAK,QAAQ,aAAa;AAAA,MAC7C,IAAI,GAAG,KAAK,EAAE;AAAA,MACd,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,OAAO,QAAQ;AAAA,MACf,YAAY,CAAC;AAAA,MACb,cAAc,CAAC;AAAA,MACf;AAAA,MACA,aAAa,CAAC;AAAA,MACd,kBAAkB,CAAC;AAAA,MACnB,WAAW,oBAAI,KAAK;AAAA,IACtB,CAAC;AAGD,WAAO,OAAO,YAAY,OAAO,UAAU;AAE3C,UAAM,QAAmB;AAAA,MACvB,QAAQ,KAAK;AAAA,MACb,QAAQ,OAAO,YAAY,cAAe,OAAO,UAAU,YAAY;AAAA,MACvE,QAAQ;AAAA,QACN,QAAQ,kBAAkB,SAAS;AAAA,QACnC,QAAQ,OAAO;AAAA,MACjB;AAAA,MACA,OAAO,OAAO;AAAA,MACd,SAAS;AAAA,MACT,WAAW,IAAI,KAAK,SAAS;AAAA,MAC7B,aAAa,OAAO,YAAY,oBAAI,KAAK,IAAI;AAAA,IAC/C;AAEA,SAAK,OAAO,iBAAiB,KAAK,IAAI,KAAK;AAE3C,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,OAAO;AAAA,MACvB,SAAS,OAAO;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,gBACZ,MACA,QACA,SAKC;AAED,SAAK,OAAO,YAAY,KAAK,IAAI,MAAM;AAEvC,UAAM,QAAmB;AAAA,MACvB,QAAQ,KAAK;AAAA,MACb,QAAQ;AAAA,MACR,OAAO;AAAA,QACL,MAAM,OAAO;AAAA,QACb,SAAS,OAAO;AAAA,QAChB,SAAS,OAAO,OAAO,YAAY,aAC/B,MAAM,OAAO,QAAQ,OAAO,IAC5B,OAAO;AAAA,MACb;AAAA,MACA,SAAS;AAAA,MACT,WAAW,oBAAI,KAAK;AAAA,IACtB;AAGA,WAAO;AAAA,MACL;AAAA,MACA,gBAAgB,CAAC;AAAA,MACjB,SAAS;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,OAIjB;AACA,UAAM,UAAU,oBAAI,IAAsB;AAC1C,UAAM,WAAW,oBAAI,IAAoB;AACzC,UAAM,UAAU,oBAAI,IAA0B;AAG9C,eAAW,QAAQ,OAAO;AACxB,cAAQ,IAAI,KAAK,IAAI,CAAC,CAAC;AACvB,eAAS,IAAI,KAAK,IAAI,CAAC;AACvB,cAAQ,IAAI,KAAK,IAAI,IAAI;AAAA,IAC3B;AAGA,eAAW,QAAQ,OAAO;AACxB,iBAAW,OAAO,KAAK,aAAa,CAAC,GAAG;AACtC,YAAI,CAAC,QAAQ,IAAI,GAAG,GAAG;AACrB,gBAAM,IAAI;AAAA,YACR,SAAS,KAAK,EAAE,8BAA8B,GAAG;AAAA,UACnD;AAAA,QACF;AACA,gBAAQ,IAAI,GAAG,EAAG,KAAK,KAAK,EAAE;AAC9B,iBAAS,IAAI,KAAK,IAAI,SAAS,IAAI,KAAK,EAAE,IAAK,CAAC;AAAA,MAClD;AAAA,IACF;AAKA,QAAI,aAA4B;AAChC,eAAW,QAAQ,OAAO;AAKxB,UAAI,KAAK,cAAc,UAAa,YAAY;AAC9C,cAAM,cAAc,KAAK,iBAAiB,OAAO,KAAK,EAAE;AACxD,cAAM,kBAAkB,SAAS,IAAI,KAAK,EAAE,KAAK;AAEjD,YAAI,CAAC,eAAe,oBAAoB,GAAG;AAGzC,kBAAQ,IAAI,UAAU,EAAG,KAAK,KAAK,EAAE;AACrC,mBAAS,IAAI,KAAK,IAAI,SAAS,IAAI,KAAK,EAAE,IAAK,CAAC;AAAA,QAClD;AAAA,MACF;AACA,mBAAa,KAAK;AAAA,IACpB;AAEA,WAAO,EAAE,SAAS,UAAU,QAAQ;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,OAAuB,QAAyB;AACvE,WAAO,MAAM,KAAK,CAAC,MAAM,EAAE,WAAW,SAAS,MAAM,CAAC;AAAA,EACxD;AAAA;AAAA;AAAA;AAAA,EAKQ,cACN,UACA,YACU;AACV,UAAM,QAAkB,CAAC;AAEzB,eAAW,CAAC,QAAQ,MAAM,KAAK,UAAU;AAIvC,YAAM,QAAQ,WAAW,MAAM;AAC/B,YAAM,UAAU,WAAW,MACxB,CAAC,SAAS,MAAM,WAAW;AAE9B,UAAI,SAAS;AACX,cAAM,KAAK,MAAM;AAAA,MACnB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,SACN,OACA,SACS;AACT,UAAM,UAAU,oBAAI,IAAY;AAChC,UAAM,iBAAiB,oBAAI,IAAY;AAEvC,UAAM,MAAM,CAAC,WAA4B;AACvC,cAAQ,IAAI,MAAM;AAClB,qBAAe,IAAI,MAAM;AAEzB,iBAAW,YAAY,QAAQ,IAAI,MAAM,KAAK,CAAC,GAAG;AAChD,YAAI,CAAC,QAAQ,IAAI,QAAQ,GAAG;AAC1B,cAAI,IAAI,QAAQ;AAAG,mBAAO;AAAA,QAC5B,WAAW,eAAe,IAAI,QAAQ,GAAG;AACvC,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,qBAAe,OAAO,MAAM;AAC5B,aAAO;AAAA,IACT;AAEA,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,QAAQ,IAAI,KAAK,EAAE,GAAG;AACzB,YAAI,IAAI,KAAK,EAAE;AAAG,iBAAO;AAAA,MAC3B;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAAiB,MAA6B;AACpD,WAAO,KAAK,OAAO,cAAc;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,WACZ,OACA,QACA,SACA,YACe;AACf,QAAI,CAAC,KAAK,OAAO,mBAAmB;AAClC;AAAA,IACF;AAEA,UAAM,aAAyB;AAAA,MAC7B,IAAI,WAAW,IAAI;AAAA,MACnB;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,MACpB,SAAS,gBAAgB,OAAO;AAAA,MAChC,YAAY,gBAAgB,UAAU;AAAA,IACxC;AAEA,UAAM,KAAK,OAAO,kBAAkB,KAAK,OAAO,UAAU;AAAA,EAC5D;AACF;;;AC7zBO,IAAM,oBAAN,MAAwB;AAAA,EAG7B,YAAY,QAAiC;AAC3C,SAAK,SAAS;AAAA,MACZ,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,KAAK,OAAe,YAAuC;AAC/D,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,yCAAyC,WAAW,EAAE,YAAY,KAAK,EAAE;AAAA,IACvF;AAEA,UAAM,KAAK,OAAO,QAAQ,eAAe,OAAO,UAAU;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,OACA,QACA,SACA,YACqB;AACrB,UAAM,aAAyB;AAAA,MAC7B,IAAI,WAAW,IAAI;AAAA,MACnB;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,MACpB,SAAS,gBAAgB,OAAO;AAAA,MAChC,YAAY,gBAAgB,UAAU;AAAA,IACxC;AAEA,UAAM,KAAK,KAAK,OAAO,UAAU;AAEjC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAU,OAA2C;AACzD,WAAO,MAAM,KAAK,OAAO,QAAQ,oBAAoB,KAAK;AAAA,EAC5D;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAAsC;AACjD,QAAI,KAAK,OAAO,QAAQ,gBAAgB;AACtC,aAAO,MAAM,KAAK,OAAO,QAAQ,eAAe,KAAK;AAAA,IACvD;AAGA,UAAM,SAAS,MAAM,KAAK,UAAU,KAAK;AACzC,WAAO,SAAS,CAAC,MAAM,IAAI,CAAC;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cACJ,OACA,OACA,gBAC4B;AAC5B,QAAI;AAEJ,QAAI,gBAAgB;AAElB,YAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AACnC,mBAAa,IAAI,KAAK,CAAC,MAAM,EAAE,OAAO,cAAc,KAAK;AAAA,IAC3D,OAAO;AAEL,mBAAa,MAAM,KAAK,UAAU,KAAK;AAAA,IACzC;AAEA,QAAI,CAAC,YAAY;AACf,aAAO;AAAA,IACT;AAGA,UAAM,gBAAgB,KAAK,aAAa,OAAO,UAAU;AAEzD,QAAI,CAAC,eAAe;AAElB,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,SAAS,gBAAgB,WAAW,OAAO;AAAA,MAC3C,YAAY,gBAAgB,WAAW,UAAU;AAAA,IACnD;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aACN,OACA,YACe;AACf,UAAM,kBAAkB,WAAW;AACnC,UAAM,aAAa,WAAW;AAG9B,UAAM,YAAY,oBAAI,IAAoB;AAC1C,UAAM,QAAQ,CAAC,MAAM,UAAU,UAAU,IAAI,KAAK,IAAI,KAAK,CAAC;AAG5D,UAAM,kBAAkB,UAAU,IAAI,eAAe;AACrD,QAAI,oBAAoB,QAAW;AAEjC,YAAM,YAAY,MAAM,CAAC;AACzB,aAAO,WAAW,MAAM;AAAA,IAC1B;AAGA,aAAS,IAAI,kBAAkB,GAAG,IAAI,MAAM,QAAQ,KAAK;AACvD,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,CAAC;AAAM;AAEX,YAAM,QAAQ,WAAW,KAAK,EAAE;AAGhC,UAAI,CAAC,SAAS,MAAM,WAAW,WAAW;AACxC,eAAO,KAAK;AAAA,MACd;AAAA,IACF;AAGA,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,WAAW,SAAS,eAAe,GAAG;AAC7C,cAAM,QAAQ,WAAW,KAAK,EAAE;AAChC,YAAI,CAAC,SAAS,MAAM,WAAW,WAAW;AACxC,iBAAO,KAAK;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAGA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,MAA6B;AAC5C,UAAM,SAAS,KAAK;AAGpB,QAAI,OAAO,eAAe,QAAW;AACnC,aAAO,OAAO;AAAA,IAChB;AAMA,YAAQ,OAAO,MAAM;AAAA,MACnB,KAAK;AAEH,eAAO,WAAW,UAAU,CAAC,CAAC,OAAO;AAAA,MAEvC,KAAK;AAEH,eAAO;AAAA,MAET,KAAK;AAEH,eAAO;AAAA,MAET,KAAK;AAEH,eAAO;AAAA,MAET,KAAK;AAEH,eAAO;AAAA,MAET;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QAAQ,OAAe,YAAoB,GAAkB;AACjE,UAAM,MAAM,MAAM,KAAK,OAAO,KAAK;AAEnC,QAAI,IAAI,UAAU,WAAW;AAC3B;AAAA,IACF;AAGA,QAAI,KAAK,CAAC,GAAG,MAAM,EAAE,UAAU,QAAQ,IAAI,EAAE,UAAU,QAAQ,CAAC;AAGhE,UAAM,WAAW,IAAI,MAAM,SAAS;AACpC,UAAM,cAAc,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE;AAE5C,QAAI,YAAY,WAAW,GAAG;AAC5B;AAAA,IACF;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ;AAAA,QACN,mCAAmC,YAAY,MAAM,4BAA4B,KAAK;AAAA,MACxF;AAAA,IACF;AAGA,QAAI,KAAK,OAAO,QAAQ,mBAAmB;AACzC,YAAM,KAAK,OAAO,QAAQ,kBAAkB,OAAO,WAAW;AAAA,IAChE,WAAW,KAAK,OAAO,QAAQ,kBAAkB;AAC/C,iBAAW,MAAM,aAAa;AAC5B,cAAM,KAAK,OAAO,QAAQ,iBAAiB,OAAO,EAAE;AAAA,MACtD;AAAA,IACF;AAAA,EAEF;AACF;;;AClQA,IAAM,0BAA0B,IAAI,KAAK;AA4DlC,IAAM,eAAN,MAAmB;AAAA,EAGxB,YAAY,SAA6B,CAAC,GAAG;AAC3C,SAAK,SAAS;AAAA,MACZ,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACJ,MACA,SACqB;AACrB,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,SAAS,KAAK;AAEpB,QAAI,OAAO,SAAS,QAAQ;AAC1B,YAAM,IAAI;AAAA,QACR,yDAAyD,KAAK,EAAE,eAAe,OAAO,IAAI;AAAA,MAE5F;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,gBAAgB,MAAM,KAAK,aAAa,OAAO,OAAO,OAAO;AACnE,WAAK,OAAO,cAAc,KAAK,IAAI,aAAa;AAGhD,YAAM,UAAU,OAAO,UAAU,cAAc,OAAO,OAAO,IAAI,KAAK,OAAO;AAE7E,YAAM,SAAS,MAAM,KAAK;AAAA,QACxB,MAAM,KAAK,YAAY,QAAQ,eAAe,OAAO;AAAA,QACrD;AAAA,QACA,KAAK;AAAA,MACP;AAGA,WAAK,OAAO,iBAAiB,KAAK,IAAI,MAAM;AAE5C,aAAO;AAAA,QACL,SAAS;AAAA,QACT;AAAA,QACA,eAAe,KAAK,IAAI,IAAI;AAAA,MAC9B;AAAA,IACF,SAAS,OAAO;AACd,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAG1E,WAAK,OAAO,cAAc,KAAK,IAAI,KAAc;AAEjD,aAAO;AAAA,QACL,SAAS;AAAA,QACT,OAAO;AAAA,QACP,eAAe,KAAK,IAAI,IAAI;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OACA,SACkB;AAClB,QAAI,UAAU,QAAW;AAEvB,aAAO,QAAQ;AAAA,IACjB;AAEA,QAAI,OAAO,UAAU,YAAY;AAC/B,aAAO,MAAM,MAAM,OAAO;AAAA,IAC5B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,mBACZ,IACA,SACA,QACY;AACZ,QAAI;AAEJ,UAAM,iBAAiB,IAAI,QAAe,CAAC,GAAG,WAAW;AACvD,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,MAAM,SAAS,MAAM,qBAAqB,OAAO,IAAI,CAAC;AAAA,MACnE,GAAG,OAAO;AAAA,IACZ,CAAC;AAED,QAAI;AACF,aAAO,MAAM,QAAQ,KAAK,CAAC,GAAG,GAAG,cAAc,CAAC;AAAA,IAClD,UAAE;AACA,UAAI,cAAc,QAAW;AAC3B,qBAAa,SAAS;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,QACA,OACA,SACkB;AAClB,QAAI,OAAO,OAAO;AAChB,aAAO,MAAM,KAAK,aAAa,OAAO,OAAO,OAAO,OAAO;AAAA,IAC7D;AAEA,QAAI,OAAO,MAAM;AACf,aAAO,MAAM,KAAK,YAAY,OAAO,MAAM,KAAK;AAAA,IAClD;AAEA,UAAM,IAAI,MAAM,mDAAmD;AAAA,EACrE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,aACZ,OACA,OACA,SACkB;AAElB,UAAM,gBAAgB,OAAO,UAAU,WAAW,KAAK,SAAS,KAAK,IAAI;AAGzE,UAAM,aAAa,OAAO,UAAU,WAAW,QAAQ,KAAK,UAAU,KAAK;AAG3E,UAAM,WAA0B,MAAM,cAAc,SAAS;AAAA,MAC3D,OAAO;AAAA,MACP;AAAA,IACF,CAAC;AAGD,WAAO;AAAA,MACL,MAAM,SAAS;AAAA,MACf,WAAW,SAAS;AAAA,MACpB,QAAQ,SAAS;AAAA,MACjB,OAAO,SAAS;AAAA,IAClB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,MACA,OACkB;AAElB,UAAM,eAAe,OAAO,SAAS,WAAW,KAAK,QAAQ,IAAI,IAAI;AAGrE,UAAM,SAAS,MAAM,aAAa;AAAA,MAChC;AAAA,MACA;AAAA,QACE,SAAS;AAAA,QACT,aAAa,KAAK,OAAO;AAAA,MAC3B;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,SAAS,IAAmB;AAClC,QAAI,CAAC,KAAK,OAAO,eAAe;AAC9B,YAAM,IAAI;AAAA,QACR,wDAAwD,EAAE;AAAA,MAC5D;AAAA,IACF;AAEA,UAAM,QAAQ,KAAK,OAAO,cAAc,IAAI,EAAE;AAC9C,QAAI,CAAC,OAAO;AACV,YAAM,YAAY,KAAK,OAAO,cAAc,OAAO,KAAK,CAAC;AACzD,YAAM,aAAa,UAAU,SAAS,IAClC,sBAAsB,UAAU,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI,CAAC,GACtD,UAAU,SAAS,IAAI,QAAQ,EACjC,KACE;AACJ,YAAM,IAAI,MAAM,qBAAqB,EAAE,KAAK,UAAU,EAAE;AAAA,IAC1D;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,QAAQ,IAAkB;AAChC,QAAI,CAAC,KAAK,OAAO,cAAc;AAC7B,YAAM,IAAI;AAAA,QACR,sDAAsD,EAAE;AAAA,MAC1D;AAAA,IACF;AAEA,UAAM,OAAO,KAAK,OAAO,aAAa,IAAI,EAAE;AAC5C,QAAI,CAAC,MAAM;AACT,YAAM,YAAY,KAAK,OAAO,aAAa,OAAO,KAAK,CAAC;AACxD,YAAM,aAAa,UAAU,SAAS,IAClC,qBAAqB,UAAU,MAAM,GAAG,CAAC,EAAE,KAAK,IAAI,CAAC,GACrD,UAAU,SAAS,IAAI,QAAQ,EACjC,KACE;AACJ,YAAM,IAAI,MAAM,oBAAoB,EAAE,KAAK,UAAU,EAAE;AAAA,IACzD;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WACJ,MACA,SACkB;AAClB,UAAM,SAAS,KAAK;AAEpB,QAAI,CAAC,OAAO,MAAM;AAChB,aAAO;AAAA,IACT;AAEA,WAAO,MAAM,OAAO,KAAK,OAAO;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,QAA2B;AAC5C,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR,SAAS;AAAA,IACX;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,QAAgB,OAAgB,SAA4B;AAC7E,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,WAAW,oBAAI,KAAK;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,qBACE,QACA,eACW;AACX,QAAI,OAAO,SAAS;AAClB,aAAO;AAAA,QACL,GAAG;AAAA,QACH,QAAQ;AAAA,QACR,QAAQ,OAAO;AAAA,QACf,aAAa,oBAAI,KAAK;AAAA,MACxB;AAAA,IACF;AAEA,WAAO;AAAA,MACL,GAAG;AAAA,MACH,QAAQ;AAAA,MACR,OAAO,OAAO;AAAA,MACd,aAAa,oBAAI,KAAK;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,mBAAmB,QAA2B;AAC5C,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,aAAa,oBAAI,KAAK;AAAA,IACxB;AAAA,EACF;AACF;;;ACjTO,IAAM,mBAAN,MAAM,kBAAiB;AAAA,EAW5B,YAAY,QAAgC;AAN5C,SAAQ,YAAY,oBAAI,IAA0C;AAOhE,SAAK,SAAS;AAAA,MACZ,gBAAgB;AAAA,MAChB,OAAO;AAAA,MACP,cAAc,kBAAiB;AAAA,MAC/B,GAAG;AAAA,IACL;AAGA,SAAK,eAAe,IAAI,aAAa;AAAA,MACnC,GAAG,KAAK,OAAO;AAAA,MACf,aAAa,KAAK,OAAO;AAAA,IAC3B,CAAC;AAED,SAAK,oBAAoB,IAAI,kBAAkB;AAAA,MAC7C,SAAS,KAAK,OAAO;AAAA,MACrB,OAAO,KAAK,OAAO;AAAA,IACrB,CAAC;AAED,SAAK,cAAc,IAAI,YAAY;AAAA,MACjC,cAAc,KAAK;AAAA,MACnB,mBAAmB,KAAK;AAAA,MACxB,gBAAgB,KAAK,OAAO;AAAA,MAC5B,OAAO,KAAK,OAAO;AAAA;AAAA;AAAA,MAGnB,WAAW,MAAM;AAAA,MAAC;AAAA,IACpB,CAAC;AAED,QAAI,KAAK,OAAO,aAAa;AAC3B,YAAM,KAAK,KAAK,OAAO;AACvB,WAAK,eAAe;AAAA,QAClB,SAAS,CAAC,QAAQ,IAAI,WAAW,SAAS,GAAG,QAAQ,IAAI,EAAE,IAAI,QAAQ,QAAQ,IAAI;AAAA,QACnF,UAAU,CAAC,QAAQ,IAAI,WAAW,SAAS,GAAG,SAAS,IAAI,EAAE,IAAI,QAAQ,QAAQ,IAAI;AAAA,QACrF,WAAW,CAAC,QAAQ,IAAI,WAAW,SAAS,GAAG,UAAU,IAAI,EAAE,IAAI,QAAQ,QAAQ,IAAI;AAAA,QACvF,MAAM,CAAC,QAAQ,IAAI,WAAW,SAAS,GAAG,KAAK,IAAI,EAAE,IAAI,QAAQ,QAAQ,IAAI;AAAA,QAC7E,QAAQ,CAAC,QAAQ,IAAI,WAAW,SAAS,GAAG,OAAO,IAAI,EAAE,IAAI,QAAQ,QAAQ,MAAS;AAAA,MACxF;AAAA,IACF;AAAA,EACF;AAAA,EAzCA;AAAA;AAAA,SAAwB,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA,EA8ChD,SAA0BC,WAAqD;AAC7E,SAAK,UAAU,IAAIA,UAAS,IAAIA,SAAQ;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAsD;AAChE,WAAO,KAAK,UAAU,IAAI,EAAE;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MACJ,YACA,OACA,SACkC;AAClC,UAAMA,YAAW,KAAK,UAAU,IAAI,UAAU;AAC9C,QAAI,CAACA,WAAU;AACb,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAGA,QAAIA,UAAS,aAAa;AACxB,MAAAA,UAAS,YAAY,MAAM,KAAK;AAAA,IAClC;AAGA,UAAM,MAAoC;AAAA,MACxC,IAAI,SAAS,SAAS,WAAW,KAAK;AAAA,MACtC;AAAA,MACA,SAASA,UAAS;AAAA,MAClB,QAAQ;AAAA,MACR;AAAA,MACA,YAAY,CAAC;AAAA,MACb,cAAc,CAAC;AAAA,MACf,SAAS,EAAE,MAAM;AAAA,MACjB,aAAa,CAAC;AAAA,MACd,kBAAkB,CAAC;AAAA,MACnB,WAAW,oBAAI,KAAK;AAAA,IACtB;AAGA,UAAM,KAAK,OAAO,QAAQ,UAAU,GAAG;AAGvC,SAAK,aAAa,IAAI,EAAE,EAAE,MAAM,CAAC,UAAU;AACzC,cAAQ,MAAM,YAAY,IAAI,EAAE,YAAY,KAAK;AAAA,IACnD,CAAC;AAED,WAAO,KAAK,aAAsB,IAAI,EAAE;AAAA,EAC1C;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAAe,gBAAwC;AAClE,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAClD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AAEA,QAAI,IAAI,WAAW,aAAa,IAAI,WAAW,WAAW;AACxD,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,yBAAyB,IAAI,MAAM;AAAA,MAEzE;AAAA,IACF;AAGA,UAAMA,YAAW,KAAK,UAAU,IAAI,IAAI,UAAU;AAClD,QAAI,CAACA,WAAU;AACb,YAAM,IAAI,MAAM,uBAAuB,IAAI,UAAU,EAAE;AAAA,IACzD;AAGA,UAAM,QAAQ,KAAK,aAAaA,WAAU,IAAI,OAAO;AAGrD,UAAM,aAAa,MAAM,KAAK,kBAAkB;AAAA,MAC9C;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,QAAI,kBAAkB,CAAC,YAAY;AACjC,YAAM,IAAI;AAAA,QACR,eAAe,cAAc,wBAAwB,KAAK;AAAA,MAE5D;AAAA,IACF;AAEA,QAAI,YAAY;AAEd,YAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,QACzC,QAAQ;AAAA,QACR,SAAS,WAAW;AAAA,QACpB,YAAY,WAAW;AAAA,MACzB,CAAC;AAAA,IACH;AAGA,UAAM,KAAK,aAAa,OAAO,YAAY,aAAa;AAAA,EAC1D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,aAAa,OAAe,eAAuC;AACvE,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAClD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AAGA,UAAMA,YAAW,KAAK,UAAU,IAAI,IAAI,UAAU;AAClD,QAAI,CAACA,WAAU;AACb,YAAM,IAAI,MAAM,uBAAuB,IAAI,UAAU,EAAE;AAAA,IACzD;AAGA,UAAM,aAAa,KAAK,OAAO,kBAAkB,SAC/C,eAAe,KAAK,OAAO,OAAO;AACpC,UAAM,eAAe,KAAK,OAAO;AAEjC,QAAI,YAAY;AACd,YAAM,WAAW,MAAM,KAAK,OAAO,QAAQ,YAAa,OAAO,YAAY;AAC3E,UAAI,CAAC,UAAU;AACb,cAAM,IAAI;AAAA,UACR,gCAAgC,KAAK;AAAA,QAEvC;AAAA,MACF;AAEA,UAAI,KAAK,OAAO,OAAO;AACrB,gBAAQ,IAAI,6CAA6C,KAAK,EAAE;AAAA,MAClE;AAAA,IACF;AAEA,QAAI;AAEF,YAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,QACzC,QAAQ;AAAA,QACR,WAAW,IAAI,aAAa,oBAAI,KAAK;AAAA,MACvC,CAAC;AAGD,YAAM,aAAa,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AACzD,WAAK,OAAO,UAAU,UAAW;AAGjC,YAAM,QAAQ,KAAK,aAAaA,WAAU,IAAI,OAAO;AAGrD,YAAM,SAAS,MAAM,KAAK;AAAA,QACxB,MAAM,KAAK,YAAY,QAAQ,OAAO,KAAoB,aAAa;AAAA,QACvEA,UAAS;AAAA,MACX;AAGA,UAAI,OAAO,WAAW;AAEpB,cAAM,WAAW,MAAM,KAAK;AAAA,UAC1B;AAAA,UACA,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAGA,YAAIA,UAAS,cAAc;AACzB,UAAAA,UAAS,aAAa,MAAM,SAAS,MAAM;AAAA,QAC7C;AAGA,cAAMA,UAAS,aAAa,SAAS,QAAQ,SAAS,OAAO;AAC7D,aAAK,OAAO,aAAa,QAAQ;AAAA,MACnC,WAAW,OAAO,SAAS;AAEzB,cAAM,KAAK;AAAA,UACT;AAAA,UACA,OAAO;AAAA,UACP,OAAO;AAAA,UACP,OAAO;AAAA,QACT;AAEA,cAAM,YAAY,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AACxD,aAAK,OAAO,YAAY,WAAY,OAAO,WAAY;AAAA,MACzD,OAAO;AAEL,cAAM,QAAQ,IAAI,MAAM,OAAO,SAAS,eAAe;AACvD,cAAM,KAAK,QAAQ,OAAO,OAAO,OAAO,SAAS,OAAO,UAAU;AAElE,cAAMA,UAAS,UAAU,OAAO,OAAO,OAAO;AAC9C,aAAK,OAAO,UAAU,KAAK,KAAK;AAAA,MAClC;AAAA,IACF,SAAS,OAAO;AAEd,YAAM,MAAM,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,YAAM,KAAK,QAAQ,OAAO,KAAK,IAAI,SAAS,IAAI,UAAU;AAE1D,YAAMA,UAAS,UAAU,KAAK,IAAI,OAAO;AACzC,WAAK,OAAO,UAAU,KAAK,GAAG;AAE9B,YAAM;AAAA,IACR,UAAE;AAEA,UAAI,YAAY;AACd,cAAM,KAAK,OAAO,QAAQ,YAAa,KAAK;AAE5C,YAAI,KAAK,OAAO,OAAO;AACrB,kBAAQ,IAAI,6CAA6C,KAAK,EAAE;AAAA,QAClE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,aACNA,WACA,SACgB;AAChB,QAAI;AAEJ,QAAI,MAAM,QAAQA,UAAS,KAAK,GAAG;AACjC,cAAQA,UAAS;AAAA,IACnB,OAAO;AAEL,UAAI,CAAC,KAAK,OAAO,aAAa;AAAA,MAG9B;AAEA,YAAM,iBAAqC;AAAA,QACzC,OAAO,QAAQ;AAAA,QACf;AAAA,QACA,aAAa,KAAK,OAAO;AAAA,QACzB,MAAM,KAAK;AAAA,MACb;AACA,cAAQA,UAAS,MAAM,cAAc;AAAA,IACvC;AAGA,SAAK,cAAc,OAAOA,UAAS,EAAE;AAErC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,OAAuB,YAA0B;AACrE,QAAI,CAAC,MAAM,QAAQ,KAAK,GAAG;AACzB,YAAM,IAAI,MAAM,aAAa,UAAU,kCAAkC;AAAA,IAC3E;AAEA,QAAI,MAAM,WAAW,GAAG;AACtB,YAAM,IAAI,MAAM,aAAa,UAAU,+BAA+B;AAAA,IACxE;AAEA,UAAM,UAAU,oBAAI,IAAY;AAEhC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,OAAO,MAAM,CAAC;AAEpB,UAAI,CAAC,MAAM;AACT,cAAM,IAAI,MAAM,aAAa,UAAU,iCAAiC,CAAC,EAAE;AAAA,MAC7E;AAEA,UAAI,CAAC,KAAK,MAAM,OAAO,KAAK,OAAO,UAAU;AAC3C,cAAM,IAAI,MAAM,aAAa,UAAU,mBAAmB,CAAC,iBAAiB;AAAA,MAC9E;AAEA,UAAI,QAAQ,IAAI,KAAK,EAAE,GAAG;AACxB,cAAM,IAAI,MAAM,aAAa,UAAU,6BAA6B,KAAK,EAAE,GAAG;AAAA,MAChF;AACA,cAAQ,IAAI,KAAK,EAAE;AAEnB,UAAI,CAAC,KAAK,UAAU,OAAO,KAAK,WAAW,UAAU;AACnD,cAAM,IAAI,MAAM,aAAa,UAAU,WAAW,KAAK,EAAE,sBAAsB;AAAA,MACjF;AAEA,UAAI,CAAC,KAAK,OAAO,MAAM;AACrB,cAAM,IAAI,MAAM,aAAa,UAAU,WAAW,KAAK,EAAE,uBAAuB;AAAA,MAClF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,mBACZ,IACA,SACY;AACZ,QAAI,CAAC,SAAS;AACZ,aAAO,GAAG;AAAA,IACZ;AAEA,UAAM,YAAY,cAAc,OAAO;AACvC,QAAI;AAEJ,UAAM,iBAAiB,IAAI,QAAe,CAAC,GAAG,WAAW;AACvD,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,MAAM,4BAA4B,SAAS,IAAI,CAAC;AAAA,MAC7D,GAAG,SAAS;AAAA,IACd,CAAC;AAED,QAAI;AACF,aAAO,MAAM,QAAQ,KAAK,CAAC,GAAG,GAAG,cAAc,CAAC;AAAA,IAClD,UAAE;AACA,UAAI,cAAc,QAAW;AAC3B,qBAAa,SAAS;AAAA,MACxB;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,YACZ,OACA,SACA,YACsB;AAEtB,UAAM,SAAS,KAAK,gBAAgB,OAAO;AAE3C,UAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,MACzC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA;AAAA,MACA,aAAa,oBAAI,KAAK;AAAA,IACxB,CAAC;AAED,WAAQ,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,QACZ,OACA,OACA,SACA,YACe;AACf,UAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,MACzC,QAAQ;AAAA,MACR;AAAA,MACA;AAAA,MACA,OAAO;AAAA,QACL,SAAS,MAAM;AAAA,QACf,OAAO,MAAM;AAAA,MACf;AAAA,MACA,aAAa,oBAAI,KAAK;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,SACZ,OACA,aACA,SACA,YACe;AACf,UAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,MACzC,QAAQ;AAAA,MACR,cAAc,CAAC,WAAW;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKQ,gBAAgB,SAAmC;AAEzD,UAAM,EAAE,OAAO,QAAQ,GAAG,KAAK,IAAI;AACnC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,aAAsB,OAAwC;AACpE,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAAA,MAC9C,QAAQ,MAAM,KAAK,cAAuB,KAAK;AAAA,MAC/C,QAAQ,MAAM,KAAK,OAAO,KAAK;AAAA,IACjC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cACZ,OACA,eAAuB,KACL;AAClB,WAAO,MAAM;AACX,YAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAClD,UAAI,CAAC,KAAK;AACR,cAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,MAC3C;AAEA,UAAI,IAAI,WAAW,aAAa;AAC9B,eAAO,IAAI;AAAA,MACb;AAEA,UAAI,IAAI,WAAW,UAAU;AAC3B,cAAM,IAAI,MAAM,IAAI,OAAO,WAAW,iBAAiB;AAAA,MACzD;AAEA,UAAI,IAAI,WAAW,aAAa;AAC9B,cAAM,IAAI,MAAM,wBAAwB;AAAA,MAC1C;AAGA,YAAM,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,YAAY,CAAC;AAAA,IAClE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAO,OAA8B;AACzC,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAClD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AAEA,QAAI,IAAI,WAAW,eAAe,IAAI,WAAW,UAAU;AACzD,YAAM,IAAI;AAAA,QACR,+BAA+B,KAAK,sBAAsB,IAAI,MAAM;AAAA,MAEtE;AAAA,IACF;AAEA,UAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,MACzC,QAAQ;AAAA,MACR,aAAa,oBAAI,KAAK;AAAA,IACxB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,UAAU,OAA4C;AACpD,WAAO,KAAK,OAAO,QAAQ,OAAO,KAAK;AAAA,EACzC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,SAIkB;AACzB,WAAO,KAAK,OAAO,QAAQ,SAAS;AAAA,MAClC,YAAY,SAAS;AAAA,MACrB,QAAQ,SAAS;AAAA,MACjB,OAAO,SAAS;AAAA,IAClB,CAAC;AAAA,EACH;AACF;;;AC9hBO,IAAM,kBAAN,MAAsB;AAAA,EAK3B,YAAY,QAA+B;AAF3C,SAAQ,YAAY;AAGlB,SAAK,SAAS;AAAA,MACZ,yBAAyB;AAAA;AAAA,MACzB,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAGA,QAAI,KAAK,OAAO,2BAA2B,KAAK,OAAO,0BAA0B,GAAG;AAClF,WAAK,uBAAuB;AAAA,IAC9B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eACJ,KACA,QACA,YACA,SAC0B;AAE1B,UAAM,UAAU,OAAO,WAAW,YAAY,aAC1C,MAAM,WAAW,QAAQ,OAAO,IAChC,WAAW;AAGf,UAAM,YAAY,WAAW,UACzB,IAAI,KAAK,KAAK,IAAI,IAAI,cAAc,WAAW,OAAO,CAAC,IACvD;AAEJ,UAAM,WAA4B;AAAA,MAChC,IAAI,WAAW,KAAK;AAAA,MACpB;AAAA,MACA,SAAS,WAAW,WAAW;AAAA,MAC/B;AAAA,MACA,WAAW,WAAW;AAAA,MACtB,aAAa,oBAAI,KAAK;AAAA,MACtB;AAAA,MACA,QAAQ;AAAA,IACV;AAEA,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ,IAAI,uCAAuC,SAAS,EAAE,YAAY,IAAI,EAAE,EAAE;AAAA,IACpF;AAGA,UAAM,KAAK,OAAO,QAAQ,oBAAoB,IAAI,IAAI,QAAQ;AAG9D,QAAI,KAAK,OAAO,UAAU;AACxB,UAAI;AACF,cAAM,KAAK,OAAO,SAAS,UAAU,GAAG;AAAA,MAC1C,SAAS,OAAO;AACd,gBAAQ,MAAM,iDAAiD,KAAK;AAAA,MACtE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,YAAY,SAAS;AAAA,MACrB,OAAO,IAAI;AAAA,MACX;AAAA,MACA,SAAS,SAAS;AAAA,MAClB,SAAS,SAAS;AAAA,MAClB,WAAW,SAAS;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YACJ,OACA,YACiC;AACjC,QAAI,KAAK,OAAO,QAAQ,oBAAoB;AAC1C,aAAO,KAAK,OAAO,QAAQ,mBAAmB,OAAO,UAAU;AAAA,IACjE;AAGA,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,oBAAoB,KAAK;AAC/D,WAAO,IAAI,KAAK,CAAC,MAAM,EAAE,OAAO,UAAU,KAAK;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAoB,OAA2C;AAC7D,WAAO,KAAK,OAAO,QAAQ,oBAAoB,KAAK;AAAA,EACtD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBACJ,OACA,YACA,UACe;AACf,QAAI,KAAK,OAAO,OAAO;AACrB,cAAQ;AAAA,QACN,6CAA6C,UAAU,KACrD,SAAS,WAAW,aAAa,UACnC;AAAA,MACF;AAAA,IACF;AAGA,UAAM,WAAW,MAAM,KAAK,YAAY,OAAO,UAAU;AACzD,QAAI,CAAC,UAAU;AACb,YAAM,IAAI,MAAM,uBAAuB,UAAU,EAAE;AAAA,IACrD;AAGA,QAAI,SAAS,WAAW,WAAW;AACjC,YAAM,IAAI,MAAM,+BAA+B,SAAS,MAAM,EAAE;AAAA,IAClE;AAGA,QAAI,SAAS,aAAa,oBAAI,KAAK,IAAI,SAAS,WAAW;AACzD,YAAM,IAAI,MAAM,sBAAsB;AAAA,IACxC;AAGA,QACE,SAAS,aACT,SAAS,UAAU,SAAS,KAC5B,CAAC,SAAS,UAAU,SAAS,SAAS,QAAQ,GAC9C;AACA,YAAM,IAAI,MAAM,wCAAwC;AAAA,IAC1D;AAGA,UAAM,KAAK,OAAO,QAAQ,eAAe,OAAO,YAAY,QAAQ;AAGpE,UAAM,MAAM,MAAM,KAAK,OAAO,QAAQ,OAAO,KAAK;AAClD,QAAI,CAAC,KAAK;AACR,YAAM,IAAI,MAAM,kBAAkB,KAAK,EAAE;AAAA,IAC3C;AAGA,UAAM,iBAAiB;AAAA,MACrB,GAAG,IAAI;AAAA,MACP,CAAC,SAAS,MAAM,GAAG;AAAA,QACjB,UAAU,SAAS;AAAA,QACnB,UAAU,SAAS;AAAA,QACnB,SAAS,SAAS;AAAA,QAClB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC;AAAA,IACF;AAGA,UAAM,oBAAoB;AAAA,MACxB,GAAG,IAAI;AAAA,MACP,CAAC,SAAS,MAAM,GAAG;AAAA,QACjB,QAAQ,SAAS;AAAA,QACjB,QAAQ;AAAA,QACR,QAAQ;AAAA,UACN,UAAU,SAAS;AAAA,UACnB,UAAU,SAAS;AAAA,UACnB,SAAS,SAAS;AAAA,QACpB;AAAA,QACA,SAAS;AAAA,QACT,aAAa,oBAAI,KAAK;AAAA,MACxB;AAAA,IACF;AAEA,UAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,MACzC,SAAS;AAAA,MACT,YAAY;AAAA,IACd,CAAC;AAGD,QAAI,SAAS,YAAY,KAAK,OAAO,UAAU;AAC7C,UAAI;AACF,cAAM,KAAK,OAAO,SAAS,OAAO,KAAK;AAAA,MACzC,SAAS,OAAO;AACd,gBAAQ,MAAM,gDAAgD,KAAK;AACnE,cAAM;AAAA,MACR;AAAA,IACF,WAAW,CAAC,SAAS,UAAU;AAE7B,YAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,QACzC,QAAQ;AAAA,QACR,OAAO;AAAA,UACL,SAAS,aAAa,UAAU,iBAC9B,SAAS,UAAU,KAAK,SAAS,OAAO,KAAK,EAC/C;AAAA,QACF;AAAA,QACA,aAAa,oBAAI,KAAK;AAAA,MACxB,CAAC;AAAA,IACH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,QACJ,OACA,YACA,UACA,SACe;AACf,UAAM,KAAK,gBAAgB,OAAO,YAAY;AAAA,MAC5C,UAAU;AAAA,MACV;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OACJ,OACA,YACA,UACA,SACe;AACf,UAAM,KAAK,gBAAgB,OAAO,YAAY;AAAA,MAC5C,UAAU;AAAA,MACV;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAGkD;AAC/D,QAAI,KAAK,OAAO,QAAQ,sBAAsB;AAC5C,aAAO,KAAK,OAAO,QAAQ,qBAAqB;AAAA,QAC9C,GAAG;AAAA,QACH,QAAQ;AAAA,MACV,CAAC;AAAA,IACH;AAGA,YAAQ;AAAA,MACN;AAAA,IACF;AACA,WAAO,QAAQ,QAAQ,CAAC,CAAC;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,wBAAuC;AAE3C,QAAI,KAAK,WAAW;AAClB;AAAA,IACF;AAEA,QAAI,CAAC,KAAK,OAAO,QAAQ,sBAAsB;AAC7C;AAAA,IACF;AAEA,UAAM,UAAU,MAAM,KAAK,OAAO,QAAQ,qBAAqB;AAAA,MAC7D,QAAQ;AAAA,IACV,CAAC;AAED,UAAM,MAAM,oBAAI,KAAK;AAErB,eAAW,EAAE,OAAO,SAAS,KAAK,SAAS;AACzC,UAAI,SAAS,aAAa,MAAM,SAAS,WAAW;AAClD,YAAI,KAAK,OAAO,OAAO;AACrB,kBAAQ,IAAI,uCAAuC,SAAS,EAAE,EAAE;AAAA,QAClE;AAGA,cAAM,KAAK,OAAO,QAAQ,eAAe,OAAO,SAAS,IAAI;AAAA,UAC3D,UAAU;AAAA,UACV,UAAU;AAAA,UACV,SAAS;AAAA,QACX,CAAC;AAGD,cAAM,KAAK,OAAO,QAAQ,UAAU,OAAO;AAAA,UACzC,QAAQ;AAAA,UACR,OAAO;AAAA,YACL,SAAS,aAAa,SAAS,EAAE;AAAA,UACnC;AAAA,UACA,aAAa,oBAAI,KAAK;AAAA,QACxB,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,yBAA+B;AACrC,SAAK,kBAAkB,YAAY,MAAM;AACvC,WAAK,sBAAsB,EAAE,MAAM,CAAC,UAAU;AAC5C,gBAAQ,MAAM,8CAA8C,KAAK;AAAA,MACnE,CAAC;AAAA,IACH,GAAG,KAAK,OAAO,uBAAuB;AAAA,EACxC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAa;AACX,SAAK,YAAY;AACjB,QAAI,KAAK,iBAAiB;AACxB,oBAAc,KAAK,eAAe;AAClC,WAAK,kBAAkB;AAAA,IACzB;AAAA,EACF;AACF;;;AClWO,IAAM,uBAAN,MAAoD;AAAA,EAApD;AACL,SAAQ,SAAS,oBAAI,IAAmB;AAAA;AAAA;AAAA;AAAA;AAAA,EAKxC,cAAc,OAAoB;AAChC,SAAK,OAAO,IAAI,MAAM,IAAI,KAAK;AAAA,EACjC;AAAA;AAAA;AAAA;AAAA,EAKA,eAAe,QAAuB;AACpC,eAAW,SAAS,QAAQ;AAC1B,WAAK,cAAc,KAAK;AAAA,IAC1B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,IAA+B;AACjC,WAAO,KAAK,OAAO,IAAI,EAAE;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,IAAqB;AAC5B,WAAO,KAAK,OAAO,IAAI,EAAE;AAAA,EAC3B;AAAA;AAAA;AAAA;AAAA,EAKA,eAAyB;AACvB,WAAO,MAAM,KAAK,KAAK,OAAO,KAAK,CAAC;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKA,YAAY,IAAqB;AAC/B,WAAO,KAAK,OAAO,OAAO,EAAE;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,OAAO,MAAM;AAAA,EACpB;AACF;AAOO,IAAM,sBAAN,MAAkD;AAAA,EAAlD;AACL,SAAQ,QAAQ,oBAAI,IAAkB;AAAA;AAAA;AAAA;AAAA;AAAA,EAKtC,aAAa,MAAkB;AAC7B,SAAK,MAAM,IAAI,KAAK,IAAI,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA,EAKA,cAAc,OAAqB;AACjC,eAAW,QAAQ,OAAO;AACxB,WAAK,aAAa,IAAI;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,MAAgC;AAClC,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,MAAuB;AAC7B,WAAO,KAAK,MAAM,IAAI,IAAI;AAAA,EAC5B;AAAA;AAAA;AAAA;AAAA,EAKA,gBAA0B;AACxB,WAAO,MAAM,KAAK,KAAK,MAAM,KAAK,CAAC;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKA,WAAW,MAAuB;AAChC,WAAO,KAAK,MAAM,OAAO,IAAI;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,MAAM,MAAM;AAAA,EACnB;AACF;AAiBO,SAAS,gBACd,IACA,UAQI,CAAC,GACE;AACP,SAAO;AAAA,IACL;AAAA,IACA,QAAQ;AAAA,MACN,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,MAAM,SAAS,OAAyE;AACtF,YAAM,WAAW,OAAO,MAAM,UAAU,WAAW,MAAM,QAAQ,KAAK,UAAU,MAAM,KAAK;AAE3F,UAAI;AACJ,UAAI,QAAQ,cAAc;AACxB,eAAO,MAAM,QAAQ,aAAa,QAAQ;AAAA,MAC5C,OAAO;AACL,eAAO,QAAQ,YAAY,sBAAsB,SAAS,MAAM,GAAG,EAAE,CAAC;AAAA,MACxE;AAEA,aAAO;AAAA,QACL;AAAA,QACA,UAAU;AAAA,UACR,EAAE,MAAM,QAAiB,SAAS,SAAS;AAAA,UAC3C,EAAE,MAAM,aAAsB,SAAS,KAAK;AAAA,QAC9C;AAAA,QACA,WAAW,QAAQ,WAAW,IAAI,CAAC,QAAQ;AAAA,UACzC,GAAG;AAAA,UACH,QAAQ;AAAA,QACV,EAAE,KAAK,CAAC;AAAA,QACR,QAAQ;AAAA,QACR,OAAO;AAAA,UACL,cAAc;AAAA,UACd,kBAAkB;AAAA,UAClB,aAAa;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,IACA,SAAS;AACP,YAAM,IAAI,MAAM,uCAAuC;AAAA,IACzD;AAAA,IACA,UAAU;AACR,YAAM,IAAI,MAAM,4CAA4C;AAAA,IAC9D;AAAA,IACA,YAAY;AACV,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAAA,IACA,iBAAiB;AACf,aAAO,QAAQ,QAAQ;AAAA,QACrB,eAAe;AAAA,QACf,iBAAiB;AAAA,QACjB,MAAM;AAAA,MACR,CAAC;AAAA,IACH;AAAA,IACA,MAAM,cAAc;AAAA,IAEpB;AAAA,EACF;AACF;AAcO,SAAS,eACd,IACA,UAMI,CAAC,GACC;AAEN,QAAM,aAAa,EAAE,OAAO,CAAC,MAAe,EAAE;AAE9C,SAAO;AAAA,IACL;AAAA,IACA,aAAa,QAAQ,eAAe,cAAc,EAAE;AAAA,IACpD,aAAa;AAAA,IACb,MAAM,QAAQ,MAA+B;AAC3C,UAAI,QAAQ,aAAa;AACvB,eAAO,MAAM,QAAQ,YAAY,IAAI;AAAA,MACvC;AACA,aAAO,QAAQ,UAAU,EAAE,SAAS,MAAM,MAAM,IAAI,KAAK;AAAA,IAC3D;AAAA,EACF;AACF;;;AC7NO,IAAM,iBAAN,MAAqB;AAAA,EAM1B,YAAY,SAA+B,CAAC,GAAG;AAC7C,SAAK,QAAQ,OAAO,SAAS;AAC7B,SAAK,UAAU,OAAO,WAAW,IAAI,cAAc,EAAE,OAAO,KAAK,MAAM,CAAC;AAGxE,SAAK,WAAW,IAAI,iBAAiB;AAAA,MACnC,SAAS,KAAK;AAAA,MACd,OAAO,KAAK;AAAA,MACZ,GAAG,OAAO;AAAA,IACZ,CAAC;AAGD,SAAK,kBAAkB,IAAI,gBAAgB;AAAA,MACzC,SAAS,KAAK;AAAA,MACd,UAAU,KAAK;AAAA,MACf,OAAO,KAAK;AAAA,MACZ,GAAG,OAAO;AAAA,IACZ,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,SACEC,WACM;AACN,UAAM,aAAa,gBAAgBA,YAAWA,UAAS,aAAaA;AAEpE,SAAK,SAAS,SAAS,UAAgC;AAEvD,QAAI,KAAK,OAAO;AACd,cAAQ,IAAI,yCAAyC,WAAW,EAAE,EAAE;AAAA,IACtE;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,YACE,WACM;AACN,eAAWA,aAAY,WAAW;AAChC,WAAK,SAASA,SAAQ;AAAA,IACxB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAmBA,MACE,YACA,OACA,SACkC;AAClC,WAAO,KAAK,SAAS,MAAuB,YAAY,OAAO,OAAO;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAA8B;AACnC,WAAO,KAAK,SAAS,OAAO,KAAK;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAA8B;AACnC,WAAO,KAAK,SAAS,OAAO,KAAK;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,OAAO,OAA4C;AACjD,WAAO,KAAK,QAAQ,OAAO,KAAK;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,SAAS,QAA4C;AACnD,WAAO,KAAK,QAAQ,SAAS,UAAU,CAAC,CAAC;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA,EAKA,gBACE,QACA,OACwB;AACxB,WAAO,KAAK,QAAQ,SAAS,EAAE,QAAQ,MAAM,CAAC;AAAA,EAChD;AAAA;AAAA;AAAA;AAAA,EAKA,mBACE,YACA,OACwB;AACxB,WAAO,KAAK,QAAQ,SAAS,EAAE,YAAY,MAAM,CAAC;AAAA,EACpD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,oBAAoB,OAA2C;AAC7D,WAAO,KAAK,gBAAgB,oBAAoB,KAAK;AAAA,EACvD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,QACE,OACA,YACA,UACA,SACe;AACf,WAAO,KAAK,gBAAgB,QAAQ,OAAO,YAAY,UAAU,OAAO;AAAA,EAC1E;AAAA;AAAA;AAAA;AAAA,EAKA,OACE,OACA,YACA,UACA,SACe;AACf,WAAO,KAAK,gBAAgB,OAAO,OAAO,YAAY,UAAU,OAAO;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA,EAKA,wBAAwB,QAGyC;AAC/D,WAAO,KAAK,gBAAgB,eAAe,MAAM;AAAA,EACnD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,aAA8B;AAC5B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,cAAgC;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,qBAAsC;AACpC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UAAyB;AAC7B,SAAK,gBAAgB,KAAK;AAC1B,UAAM,KAAK,QAAQ,QAAQ;AAE3B,QAAI,KAAK,OAAO;AACd,cAAQ,IAAI,4BAA4B;AAAA,IAC1C;AAAA,EACF;AACF;AAKO,SAAS,qBACd,QACgB;AAChB,SAAO,IAAI,eAAe,MAAM;AAClC;;;ACxNO,IAAM,kBAAN,MAAiD;AAAA,EAGtD,YAAY,SAAgC,CAAC,GAAG;AAC9C,SAAK,SAAS;AAAA,MACZ,SAAS;AAAA,MACT,WAAW;AAAA,MACX,WAAW;AAAA,MACX,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAEA,YAAQ;AAAA,MACN;AAAA,IAGF;AAAA,EACF;AAAA;AAAA,EAGA,UAAU,MAAkC;AAE1C,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,OAAO,QAA6C;AAElD,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAAA,EAEA,UAAU,QAAgB,QAA6C;AAErE,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,SAAS,SAA4C;AAEnD,UAAM,IAAI,MAAM,0CAA0C;AAAA,EAC5D;AAAA;AAAA,EAGA,eAAe,QAAgB,aAAwC;AAGrE,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA,EAEA,oBAAoB,QAA4C;AAE9D,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAAA;AAAA,EAGA,oBAAoB,QAAgB,WAA2C;AAE7E,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAAA,EAEA,oBAAoB,QAA4C;AAC9D,UAAM,IAAI,MAAM,qDAAqD;AAAA,EACvE;AAAA,EAEA,eACE,QACA,aACA,WACe;AAEf,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAAA;AAAA,EAGA,QAAQ,MAAkC;AAExC,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAAA,EAEA,UAAuC;AAErC,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAAA,EAEA,YAAY,QAA+B;AAEzC,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA,EAGA,UAAyB;AAEvB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACF;;;AClGO,IAAM,iBAAN,MAAgD;AAAA,EAGrD,YAAY,SAA+B,CAAC,GAAG;AAC7C,SAAK,SAAS;AAAA,MACZ,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAEA,gBAAO;AAAA,MACL;AAAA,IAGF;AAAA,EACF;AAAA;AAAA,EAGA,UAAU,MAAkC;AAC1C,UAAM,IAAI,MAAM,0CAA0C;AAAA,EAC5D;AAAA,EAEA,OAAO,QAA6C;AAClD,UAAM,IAAI,MAAM,uCAAuC;AAAA,EACzD;AAAA,EAEA,UAAU,QAAgB,QAA6C;AACrE,UAAM,IAAI,MAAM,0CAA0C;AAAA,EAC5D;AAAA,EAEA,SAAS,SAA4C;AACnD,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAAA;AAAA,EAGA,eAAe,QAAgB,aAAwC;AACrE,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AAAA,EAEA,oBAAoB,QAA4C;AAC9D,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAAA;AAAA,EAGA,oBAAoB,QAAgB,WAA2C;AAC7E,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAAA,EAEA,oBAAoB,QAA4C;AAC9D,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AAAA,EAEA,eACE,QACA,aACA,WACe;AACf,UAAM,IAAI,MAAM,+CAA+C;AAAA,EACjE;AAAA;AAAA,EAGA,QAAQ,MAAkC;AAGxC,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAAA,EAEA,UAAuC;AAErC,UAAM,IAAI,MAAM,wCAAwC;AAAA,EAC1D;AAAA,EAEA,YAAY,QAA+B;AAEzC,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA,EAGA,UAAyB;AAEvB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACF;;;ACzEO,IAAM,oBAAN,MAAmD;AAAA,EAGxD,YAAY,SAAkC,CAAC,GAAG;AAChD,SAAK,SAAS;AAAA,MACZ,sBAAsB;AAAA,MACtB,WAAW;AAAA,MACX,cAAc;AAAA,MACd,OAAO;AAAA,MACP,GAAG;AAAA,IACL;AAEA,YAAQ;AAAA,MACN;AAAA,IAGF;AAAA,EACF;AAAA;AAAA,EAGA,UAAU,MAAkC;AAE1C,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,OAAO,QAA6C;AAElD,UAAM,IAAI,MAAM,0CAA0C;AAAA,EAC5D;AAAA,EAEA,UAAU,QAAgB,QAA6C;AAErE,UAAM,IAAI,MAAM,6CAA6C;AAAA,EAC/D;AAAA,EAEA,SAAS,SAA4C;AAEnD,UAAM,IAAI,MAAM,4CAA4C;AAAA,EAC9D;AAAA;AAAA,EAGA,eAAe,QAAgB,aAAwC;AAErE,UAAM,IAAI,MAAM,kDAAkD;AAAA,EACpE;AAAA,EAEA,oBAAoB,QAA4C;AAC9D,UAAM,IAAI,MAAM,uDAAuD;AAAA,EACzE;AAAA;AAAA,EAGA,oBAAoB,QAAgB,WAA2C;AAC7E,UAAM,IAAI,MAAM,uDAAuD;AAAA,EACzE;AAAA,EAEA,oBAAoB,QAA4C;AAC9D,UAAM,IAAI,MAAM,uDAAuD;AAAA,EACzE;AAAA,EAEA,eACE,QACA,aACA,WACe;AACf,UAAM,IAAI,MAAM,kDAAkD;AAAA,EACpE;AAAA;AAAA,EAGA,QAAQ,MAAkC;AAExC,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,UAAuC;AAErC,UAAM,IAAI,MAAM,2CAA2C;AAAA,EAC7D;AAAA,EAEA,YAAY,QAA+B;AAEzC,WAAO,QAAQ,QAAQ;AAAA,EACzB;AAAA;AAAA,EAGA,UAAyB;AAEvB,WAAO,QAAQ,QAAQ;AAAA,EACzB;AACF;;;ACjHA,SAAS,aAAa,WAAW,QAAQ,gBAAgB;AAyElD,SAAS,YAAY,SAAgD;AAC1E,QAAM;AAAA,IACJ;AAAA,IACA,UAAU;AAAA,IACV,eAAe;AAAA,IACf,cAAc;AAAA,IACd;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,CAAC,KAAK,MAAM,IAAI,SAA6B,IAAI;AACvD,QAAM,CAAC,WAAW,YAAY,IAAI,SAAS,IAAI;AAC/C,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAuB,IAAI;AAErD,QAAM,oBAAoB,OAA8B,IAAI;AAC5D,QAAM,uBAAuB,OAAoB,oBAAI,IAAI,CAAC;AAC1D,QAAM,qBAAqB,OAA+B,IAAI;AAK9D,QAAM,WAAW,YAAY,YAAY;AACvC,QAAI,CAAC;AAAO;AAEZ,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,SAAS,KAAK,IAAI;AAAA,QACvD,QAAQ,mBAAmB,SAAS;AAAA,MACtC,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,EAAE;AAAA,MAChE;AAEA,YAAM,OAAO,MAAM,SAAS,KAAK;AACjC,YAAM,cAAc;AAGpB,UAAI,kBAAkB,WAAW,kBAAkB,YAAY,YAAY,QAAQ;AACjF,yBAAiB,YAAY,QAAQ,kBAAkB,OAAO;AAAA,MAChE;AACA,wBAAkB,UAAU,YAAY;AAGxC,UAAI,YAAY,WAAW,aAAa;AACtC,qBAAa,WAAW;AAAA,MAC1B;AAGA,UAAI,YAAY,WAAW,UAAU;AACnC,cAAM,cAAc,IAAI,MAAM,iBAAiB;AAC/C,kBAAU,aAAa,WAAW;AAAA,MACpC;AAGA,UAAI,YAAY,kBAAkB;AAChC,mBAAW,YAAY,YAAY,kBAAkB;AACnD,cAAI,SAAS,WAAW,aAAa,CAAC,qBAAqB,QAAQ,IAAI,SAAS,EAAE,GAAG;AACnF,iCAAqB,QAAQ,IAAI,SAAS,EAAE;AAC5C,iCAAqB,QAAQ;AAAA,UAC/B;AAAA,QACF;AAAA,MACF;AAEA,aAAO,WAAW;AAClB,eAAS,IAAI;AAAA,IACf,SAAS,KAAK;AACZ,UAAI,eAAe,SAAS,IAAI,SAAS,cAAc;AACrD;AAAA,MACF;AACA,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACrE,eAAS,UAAU;AACnB,gBAAU,UAAU;AAAA,IACtB;AAAA,EACF,GAAG,CAAC,OAAO,SAAS,gBAAgB,YAAY,SAAS,kBAAkB,CAAC;AAK5E,YAAU,MAAM;AACd,uBAAmB,UAAU,IAAI,gBAAgB;AAEjD,UAAM,UAAU,YAAY;AAC1B,mBAAa,IAAI;AACjB,YAAM,SAAS;AACf,mBAAa,KAAK;AAAA,IACpB;AAEA,YAAQ;AAGR,QAAI;AACJ,QAAI,aAAa;AACf,mBAAa,YAAY,MAAM;AAE7B,cAAM,gBAAgB,kBAAkB;AACxC,YAAI,iBAAiB,CAAC,CAAC,aAAa,UAAU,WAAW,EAAE,SAAS,aAAa,GAAG;AAClF,mBAAS;AAAA,QACX;AAAA,MACF,GAAG,YAAY;AAAA,IACjB;AAEA,WAAO,MAAM;AACX,yBAAmB,SAAS,MAAM;AAClC,UAAI,YAAY;AACd,sBAAc,UAAU;AAAA,MAC1B;AAAA,IACF;AAAA,EACF,GAAG,CAAC,OAAO,aAAa,cAAc,QAAQ,CAAC;AAK/C,QAAM,UAAU,YAAY,YAAY;AACtC,iBAAa,IAAI;AACjB,UAAM,SAAS;AACf,iBAAa,KAAK;AAAA,EACpB,GAAG,CAAC,QAAQ,CAAC;AAKb,QAAM,SAAS,YAAY,YAAY;AACrC,QAAI,CAAC;AAAO;AAEZ,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,SAAS,KAAK,WAAW;AAAA,QAC9D,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,8BAA8B,SAAS,MAAM,EAAE;AAAA,MACjE;AAEA,YAAM,QAAQ;AAAA,IAChB,SAAS,KAAK;AACZ,YAAM,cAAc,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACtE,eAAS,WAAW;AACpB,YAAM;AAAA,IACR;AAAA,EACF,GAAG,CAAC,OAAO,SAAS,OAAO,CAAC;AAK5B,QAAM,QAAQ,YAAY,YAAY;AACpC,QAAI,CAAC;AAAO;AAEZ,QAAI;AACF,YAAM,WAAW,MAAM,MAAM,GAAG,OAAO,SAAS,KAAK,UAAU;AAAA,QAC7D,QAAQ;AAAA,MACV,CAAC;AAED,UAAI,CAAC,SAAS,IAAI;AAChB,cAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,EAAE;AAAA,MAChE;AAEA,YAAM,QAAQ;AAAA,IAChB,SAAS,KAAK;AACZ,YAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACrE,eAAS,UAAU;AACnB,YAAM;AAAA,IACR;AAAA,EACF,GAAG,CAAC,OAAO,SAAS,OAAO,CAAC;AAG5B,QAAM,oBAAoB,MAAc;AACtC,QAAI,CAAC,KAAK;AAAY,aAAO;AAE7B,UAAM,SAAS,OAAO,OAAO,IAAI,UAAU;AAC3C,QAAI,OAAO,WAAW;AAAG,aAAO;AAEhC,UAAM,YAAY,OAAO;AAAA,MACvB,CAAC,MAAM,EAAE,WAAW,eAAe,EAAE,WAAW;AAAA,IAClD,EAAE;AAEF,WAAO,KAAK,MAAO,YAAY,OAAO,SAAU,GAAG;AAAA,EACrD;AAEA,SAAO;AAAA,IACL;AAAA,IACA,QAAQ,KAAK,UAAU;AAAA,IACvB,UAAU,kBAAkB;AAAA,IAC5B,cAAc,KAAK,gBAAgB,CAAC;AAAA,IACpC,YAAY,KAAK,cAAc,CAAC;AAAA,IAChC,kBAAkB,KAAK,kBAAkB,OAAO,CAAC,MAAM,EAAE,WAAW,SAAS,KAAK,CAAC;AAAA,IACnF;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;ACzQA,SAAS,eAAAC,cAAa,aAAAC,YAAW,YAAAC,iBAAgB;AA6D1C,SAAS,YAAY,SAAgD;AAC1E,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA,UAAU;AAAA,IACV,WAAW;AAAA,IACX;AAAA,IACA;AAAA,EACF,IAAI;AAEJ,QAAM,CAAC,UAAU,WAAW,IAAIA,UAAiC,IAAI;AACrE,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAS,IAAI;AAC/C,QAAM,CAAC,cAAc,eAAe,IAAIA,UAAS,KAAK;AACtD,QAAM,CAAC,OAAO,QAAQ,IAAIA,UAAuB,IAAI;AAKrD,EAAAD,WAAU,MAAM;AACd,UAAM,gBAAgB,YAAY;AAChC,UAAI;AACF,cAAM,WAAW,MAAM;AAAA,UACrB,GAAG,OAAO,SAAS,KAAK,cAAc,UAAU;AAAA,QAClD;AAEA,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,6BAA6B,SAAS,MAAM,EAAE;AAAA,QAChE;AAEA,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,oBAAY,IAAuB;AACnC,iBAAS,IAAI;AAAA,MACf,SAAS,KAAK;AACZ,cAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACrE,iBAAS,UAAU;AACnB,kBAAU,UAAU;AAAA,MACtB,UAAE;AACA,qBAAa,KAAK;AAAA,MACpB;AAAA,IACF;AAEA,QAAI,SAAS,YAAY;AACvB,oBAAc;AAAA,IAChB;AAAA,EACF,GAAG,CAAC,OAAO,YAAY,SAAS,OAAO,CAAC;AAKxC,QAAM,iBAAiBD;AAAA,IACrB,OAAO,aAA+B;AACpC,UAAI,CAAC,SAAS,CAAC;AAAY;AAE3B,sBAAgB,IAAI;AACpB,eAAS,IAAI;AAEb,UAAI;AACF,cAAM,WAAW,MAAM;AAAA,UACrB,GAAG,OAAO,SAAS,KAAK,cAAc,UAAU;AAAA,UAChD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS;AAAA,cACP,gBAAgB;AAAA,YAClB;AAAA,YACA,MAAM,KAAK,UAAU,QAAQ;AAAA,UAC/B;AAAA,QACF;AAEA,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,8BAA8B,SAAS,MAAM,EAAE;AAAA,QACjE;AAGA;AAAA,UAAY,CAAC,SACX,OACI;AAAA,YACA,GAAG;AAAA,YACH,QAAQ,SAAS,WAAW,aAAa;AAAA,YACzC,YAAY,oBAAI,KAAK;AAAA,YACrB,YAAY,SAAS;AAAA,YACrB,SAAS,SAAS;AAAA,UACpB,IACE;AAAA,QACN;AAEA,qBAAa,QAAQ;AAAA,MACvB,SAAS,KAAK;AACZ,cAAM,cAAc,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACtE,iBAAS,WAAW;AACpB,kBAAU,WAAW;AACrB,cAAM;AAAA,MACR,UAAE;AACA,wBAAgB,KAAK;AAAA,MACvB;AAAA,IACF;AAAA,IACA,CAAC,OAAO,YAAY,SAAS,YAAY,OAAO;AAAA,EAClD;AAKA,QAAM,UAAUA;AAAA,IACd,OAAO,YAAqB;AAC1B,YAAM,eAAe;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,gBAAgB,QAAQ;AAAA,EAC3B;AAKA,QAAM,SAASA;AAAA,IACb,OAAO,YAAqB;AAC1B,YAAM,eAAe;AAAA,QACnB,UAAU;AAAA,QACV;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAAA,IACA,CAAC,gBAAgB,QAAQ;AAAA,EAC3B;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW,UAAU,WAAW;AAAA,IAChC,YAAY,UAAU,WAAW;AAAA,EACnC;AACF;;;ACrMA,SAAS,eAAAG,cAAa,aAAAC,YAAW,YAAAC,iBAAgB;AAmE1C,SAAS,gBACd,UAAkC,CAAC,GACZ;AACvB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,WAAW;AAAA,IACX,UAAU;AAAA,IACV,cAAc;AAAA,IACd,kBAAkB;AAAA,EACpB,IAAI;AAEJ,QAAM,CAAC,MAAM,OAAO,IAAIA,UAAwB,CAAC,CAAC;AAClD,QAAM,CAAC,YAAY,aAAa,IAAIA,UAA6B;AACjE,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAS,IAAI;AAC/C,QAAM,CAAC,OAAO,QAAQ,IAAIA,UAAuB,IAAI;AACrD,QAAM,CAAC,SAAS,UAAU,IAAIA,UAAS,KAAK;AAC5C,QAAM,CAAC,QAAQ,SAAS,IAAIA,UAA6B;AAEzD,QAAM,CAAC,QAAQ,cAAc,IAAIA,UAAoB;AAAA,IACnD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,OAAO;AAAA,EACT,CAAC;AAKD,QAAM,mBAAmBF;AAAA,IACvB,CAAC,aAAwB,gBAAiC;AACxD,YAAM,SAAS,IAAI,gBAAgB;AAEnC,UAAI,YAAY,YAAY;AAC1B,eAAO,IAAI,cAAc,YAAY,UAAU;AAAA,MACjD;AAEA,UAAI,YAAY,QAAQ;AACtB,cAAM,WAAW,MAAM,QAAQ,YAAY,MAAM,IAC7C,YAAY,SACZ,CAAC,YAAY,MAAM;AACvB,iBAAS,QAAQ,CAAC,MAAM,OAAO,OAAO,UAAU,CAAC,CAAC;AAAA,MACpD;AAEA,UAAI,YAAY,cAAc;AAC5B,eAAO,IAAI,gBAAgB,YAAY,aAAa,YAAY,CAAC;AAAA,MACnE;AAEA,UAAI,YAAY,eAAe;AAC7B,eAAO,IAAI,iBAAiB,YAAY,cAAc,YAAY,CAAC;AAAA,MACrE;AAEA,UAAI,YAAY,OAAO;AACrB,eAAO,IAAI,SAAS,OAAO,YAAY,KAAK,CAAC;AAAA,MAC/C;AAEA,UAAI,aAAa;AACf,eAAO,IAAI,UAAU,WAAW;AAAA,MAClC;AAEA,aAAO,OAAO,SAAS;AAAA,IACzB;AAAA,IACA,CAAC;AAAA,EACH;AAKA,QAAM,YAAYA;AAAA,IAChB,OAAO,SAAkB,UAAU;AACjC,UAAI;AACF,cAAM,cAAc,iBAAiB,QAAQ,SAAS,SAAS,MAAS;AACxE,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,SAAS,WAAW,EAAE;AAE7D,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI,MAAM,yBAAyB,SAAS,MAAM,EAAE;AAAA,QAC5D;AAEA,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,cAAM,cAAe,KAAK,QAAQ;AAClC,cAAM,aAAa,KAAK;AACxB,cAAM,QAAQ,KAAK;AAEnB,YAAI,QAAQ;AACV,kBAAQ,CAAC,SAAS,CAAC,GAAG,MAAM,GAAG,WAAW,CAAC;AAAA,QAC7C,OAAO;AACL,kBAAQ,WAAW;AAAA,QACrB;AAEA,kBAAU,UAAU;AACpB,mBAAW,CAAC,CAAC,cAAc,YAAY,WAAW,OAAO,KAAK;AAC9D,sBAAc,KAAK;AACnB,iBAAS,IAAI;AAAA,MACf,SAAS,KAAK;AACZ,cAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACrE,iBAAS,UAAU;AAAA,MACrB;AAAA,IACF;AAAA,IACA,CAAC,SAAS,QAAQ,QAAQ,gBAAgB;AAAA,EAC5C;AAKA,EAAAC,WAAU,MAAM;AACd,UAAM,UAAU,YAAY;AAC1B,mBAAa,IAAI;AACjB,YAAM,UAAU,KAAK;AACrB,mBAAa,KAAK;AAAA,IACpB;AAEA,YAAQ;AAAA,EACV,GAAG,CAAC,MAAM,CAAC;AAKX,EAAAA,WAAU,MAAM;AACd,QAAI,CAAC;AAAa;AAElB,UAAM,aAAa,YAAY,MAAM;AACnC,gBAAU,KAAK;AAAA,IACjB,GAAG,eAAe;AAElB,WAAO,MAAM,cAAc,UAAU;AAAA,EACvC,GAAG,CAAC,aAAa,iBAAiB,SAAS,CAAC;AAK5C,QAAM,WAAWD,aAAY,YAAY;AACvC,QAAI,CAAC,WAAW;AAAW;AAC3B,iBAAa,IAAI;AACjB,UAAM,UAAU,IAAI;AACpB,iBAAa,KAAK;AAAA,EACpB,GAAG,CAAC,SAAS,WAAW,SAAS,CAAC;AAKlC,QAAM,UAAUA,aAAY,YAAY;AACtC,cAAU,MAAS;AACnB,iBAAa,IAAI;AACjB,UAAM,UAAU,KAAK;AACrB,iBAAa,KAAK;AAAA,EACpB,GAAG,CAAC,SAAS,CAAC;AAKd,QAAM,YAAYA;AAAA,IAChB,CAAC,cAA+C;AAC9C,gBAAU,MAAS;AACnB,qBAAe,CAAC,UAAU;AAAA,QACxB,GAAG;AAAA,QACH,YAAY,UAAU,cAAc,KAAK;AAAA,QACzC,QAAQ,UAAU,UAAU,KAAK;AAAA,QACjC,cAAc,UAAU,gBAAgB,KAAK;AAAA,QAC7C,eAAe,UAAU,iBAAiB,KAAK;AAAA,QAC/C,OAAO,UAAU,YAAY,KAAK;AAAA,MACpC,EAAE;AAAA,IACJ;AAAA,IACA,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AC9PA,SAAS,eAAAG,cAAa,YAAAC,iBAAgB;AA0C/B,SAAS,iBACd,SACgC;AAChC,QAAM,EAAE,YAAY,UAAU,kBAAkB,SAAS,QAAQ,IAAI;AAErE,QAAM,CAAC,YAAY,aAAa,IAAIA,UAAS,KAAK;AAClD,QAAM,CAAC,WAAW,YAAY,IAAIA,UAAwB,IAAI;AAC9D,QAAM,CAAC,OAAO,QAAQ,IAAIA,UAAuB,IAAI;AAKrD,QAAM,QAAQD;AAAA,IACZ,OAAO,UAAmC;AACxC,oBAAc,IAAI;AAClB,eAAS,IAAI;AAEb,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,GAAG,OAAO,IAAI,UAAU,UAAU;AAAA,UAC7D,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,UAClB;AAAA,UACA,MAAM,KAAK,UAAU,EAAE,MAAM,CAAC;AAAA,QAChC,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,YAAY,MAAM,SAAS,KAAK,EAAE,MAAM,OAAO,CAAC,EAAE;AACxD,gBAAM,IAAI;AAAA,YACR,UAAU,WAAW,6BAA6B,SAAS,MAAM;AAAA,UACnE;AAAA,QACF;AAEA,cAAM,OAAO,MAAM,SAAS,KAAK;AACjC,cAAM,QAAQ,KAAK,SAAS,KAAK;AAEjC,qBAAa,KAAK;AAClB,kBAAU,KAAK;AAEf,eAAO;AAAA,MACT,SAAS,KAAK;AACZ,cAAM,aAAa,eAAe,QAAQ,MAAM,IAAI,MAAM,OAAO,GAAG,CAAC;AACrE,iBAAS,UAAU;AACnB,kBAAU,UAAU;AACpB,cAAM;AAAA,MACR,UAAE;AACA,sBAAc,KAAK;AAAA,MACrB;AAAA,IACF;AAAA,IACA,CAAC,YAAY,SAAS,SAAS,OAAO;AAAA,EACxC;AAKA,QAAM,aAAaA,aAAY,MAAM;AACnC,aAAS,IAAI;AAAA,EACf,GAAG,CAAC,CAAC;AAEL,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;",
6
6
  "names": ["logger", "hasNodeProcess", "hasNodeProcess", "createError", "data", "state", "workflow", "workflow", "useCallback", "useEffect", "useState", "useCallback", "useEffect", "useState", "useCallback", "useState"]
7
7
  }