@uploadista/core 0.0.20 → 0.1.0-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/dist/{checksum-DVPe3Db4.cjs → checksum-CTpNXWEL.cjs} +1 -1
  2. package/dist/errors/index.cjs +1 -1
  3. package/dist/errors/index.d.mts +2 -2
  4. package/dist/flow/index.cjs +1 -1
  5. package/dist/flow/index.d.cts +2 -2
  6. package/dist/flow/index.d.mts +6 -6
  7. package/dist/flow/index.mjs +1 -1
  8. package/dist/flow-CA8xO6wP.mjs +2 -0
  9. package/dist/flow-CA8xO6wP.mjs.map +1 -0
  10. package/dist/flow-DKJaCPxL.cjs +1 -0
  11. package/dist/index-9gyMMEIB.d.cts.map +1 -1
  12. package/dist/{index-RuQUCROH.d.mts → index-BNhJmMuC.d.mts} +228 -167
  13. package/dist/index-BNhJmMuC.d.mts.map +1 -0
  14. package/dist/{index-DMqaf28W.d.cts → index-BmkhRBV6.d.cts} +226 -165
  15. package/dist/index-BmkhRBV6.d.cts.map +1 -0
  16. package/dist/{index-B9V5SSxl.d.mts → index-D8MZ6P3o.d.mts} +2 -2
  17. package/dist/{index-B9V5SSxl.d.mts.map → index-D8MZ6P3o.d.mts.map} +1 -1
  18. package/dist/{index-BFSHumky.d.mts → index-DQuMQssI.d.mts} +2 -2
  19. package/dist/{index-BFSHumky.d.mts.map → index-DQuMQssI.d.mts.map} +1 -1
  20. package/dist/index.cjs +1 -1
  21. package/dist/index.d.cts +2 -2
  22. package/dist/index.d.mts +6 -6
  23. package/dist/index.mjs +1 -1
  24. package/dist/{stream-limiter-BvkaZXcz.cjs → stream-limiter-DH0vv46_.cjs} +1 -1
  25. package/dist/streams/index.cjs +1 -1
  26. package/dist/streams/index.d.mts +2 -2
  27. package/dist/streams/index.mjs +1 -1
  28. package/dist/testing/index.cjs +2 -2
  29. package/dist/testing/index.d.cts +1 -1
  30. package/dist/testing/index.d.cts.map +1 -1
  31. package/dist/testing/index.d.mts +5 -5
  32. package/dist/testing/index.d.mts.map +1 -1
  33. package/dist/testing/index.mjs +2 -2
  34. package/dist/testing/index.mjs.map +1 -1
  35. package/dist/types/index.cjs +1 -1
  36. package/dist/types/index.d.cts +2 -2
  37. package/dist/types/index.d.mts +6 -6
  38. package/dist/types/index.mjs +1 -1
  39. package/dist/types-BF_tvkRh.cjs +1 -0
  40. package/dist/types-BRnwrJDg.mjs +2 -0
  41. package/dist/types-BRnwrJDg.mjs.map +1 -0
  42. package/dist/upload/index.cjs +1 -1
  43. package/dist/upload/index.d.cts +1 -1
  44. package/dist/upload/index.d.mts +5 -5
  45. package/dist/upload/index.mjs +1 -1
  46. package/dist/upload-CLHJ1SFS.cjs +1 -0
  47. package/dist/upload-CpsShjP3.mjs +2 -0
  48. package/dist/upload-CpsShjP3.mjs.map +1 -0
  49. package/dist/{uploadista-error-DR0XimpE.d.mts → uploadista-error-B1qbOy9N.d.mts} +1 -1
  50. package/dist/{uploadista-error-DR0XimpE.d.mts.map → uploadista-error-B1qbOy9N.d.mts.map} +1 -1
  51. package/dist/{uploadista-error-BgQU45we.cjs → uploadista-error-CLWoRAAr.cjs} +1 -1
  52. package/dist/uploadista-error-CkSxSyNo.mjs.map +1 -1
  53. package/dist/utils/index.cjs +1 -1
  54. package/dist/utils/index.d.mts +2 -2
  55. package/dist/utils/index.mjs +1 -1
  56. package/dist/{utils-UUJt8ILJ.cjs → utils-CvZJUNEo.cjs} +1 -1
  57. package/dist/{utils-B-ZhQ6b0.mjs → utils-DVwfrVBJ.mjs} +1 -1
  58. package/dist/utils-DVwfrVBJ.mjs.map +1 -0
  59. package/package.json +8 -8
  60. package/src/flow/circuit-breaker-store.ts +7 -8
  61. package/src/flow/flow.ts +6 -5
  62. package/src/flow/nodes/transform-node.ts +15 -1
  63. package/src/flow/plugins/image-plugin.ts +12 -3
  64. package/src/flow/plugins/video-plugin.ts +12 -3
  65. package/src/flow/types/flow-types.ts +75 -6
  66. package/src/flow/types/retry-policy.ts +5 -2
  67. package/src/flow/types/type-utils.ts +4 -6
  68. package/src/flow/utils/file-naming.ts +36 -11
  69. package/src/testing/mock-upload-engine.ts +18 -1
  70. package/src/types/circuit-breaker-store.ts +2 -2
  71. package/src/types/data-store.ts +4 -1
  72. package/src/types/kv-store.ts +13 -12
  73. package/src/types/upload-file.ts +29 -4
  74. package/src/upload/upload-chunk.ts +1 -1
  75. package/dist/flow-BHVkk_6W.cjs +0 -1
  76. package/dist/flow-DlhHOlMk.mjs +0 -2
  77. package/dist/flow-DlhHOlMk.mjs.map +0 -1
  78. package/dist/index-DMqaf28W.d.cts.map +0 -1
  79. package/dist/index-RuQUCROH.d.mts.map +0 -1
  80. package/dist/streams-BiD_pOPH.cjs +0 -0
  81. package/dist/streams-Cqjxk2rI.mjs +0 -1
  82. package/dist/types-Cws60JHC.cjs +0 -1
  83. package/dist/types-DKGQJIEr.mjs +0 -2
  84. package/dist/types-DKGQJIEr.mjs.map +0 -1
  85. package/dist/upload-C-C7hn1-.mjs +0 -2
  86. package/dist/upload-C-C7hn1-.mjs.map +0 -1
  87. package/dist/upload-DWBlRXHh.cjs +0 -1
  88. package/dist/utils-B-ZhQ6b0.mjs.map +0 -1
  89. /package/dist/{index-C-svZlpj.d.mts → index-DWe68pTi.d.mts} +0 -0
@@ -1 +0,0 @@
1
- {"version":3,"file":"flow-DlhHOlMk.mjs","names":["DEFAULT_CIRCUIT_BREAKER_CONFIG: Required<\n Omit<CircuitBreakerConfig, \"fallback\">\n> & { fallback: CircuitBreakerFallback }","updatedState: CircuitBreakerStateData","store: CircuitBreakerStore","defaultTypeChecker: TypeCompatibilityChecker","errors: string[]","expectedSchemas: Record<string, unknown>","actualSchemas: Record<string, unknown>","inputFile: InputFile","DEFAULT_RETRY_POLICY: RetryPolicy","items: DeadLetterItem[]","nodeResults: Record<string, unknown>","nextRetryAt: Date | undefined","item: DeadLetterItem","updatedItem: DeadLetterItem","status: DeadLetterItemStatus","byStatus: Record<DeadLetterItemStatus, number>","byFlow: Record<string, number>","oldestItem: Date | undefined","graph: Record<string, string[]>","inDegree: Record<string, number>","levels: ExecutionLevel[]","resolvedNodes: Array<FlowNode<any, any, UploadistaError>>","graph: Record<string, string[]>","inDegree: Record<string, number>","reverseGraph: Record<string, string[]>","queue: string[]","result: string[]","inputs: Record<string, unknown>","mappedInputs: Record<string, z.infer<TFlowInputSchema>>","flowOutputs: Record<string, unknown>","typedOutputs: TypedOutput[]","transferredFile: UploadFile","lastError: UploadistaError | null","nodeInput: unknown","nodeInputsForExecution: Record<string, unknown>","result","executionOrder: string[]","nodeResults: Map<string, unknown>","startIndex: number","captureTraceContextEffect: Effect.Effect<\n FlowJobTraceContext | undefined\n >","job: FlowJob","stringContext: Record<string, string>","transformResult","outputType","outputFileName","result: UploadFile","outputChunks: Uint8Array[]","outputBytes","updatedMetadata","result","flowEdges: FlowEdge[]"],"sources":["../src/flow/circuit-breaker.ts","../src/flow/circuit-breaker-store.ts","../src/flow/distributed-circuit-breaker.ts","../src/flow/edge.ts","../src/flow/event.ts","../src/flow/input-type-registry.ts","../src/flow/output-type-registry.ts","../src/flow/node.ts","../src/flow/types/flow-types.ts","../src/flow/types/type-validator.ts","../src/flow/utils/resolve-upload-metadata.ts","../src/flow/nodes/input-node.ts","../src/flow/node-types/index.ts","../src/flow/types/retry-policy.ts","../src/flow/dead-letter-queue.ts","../src/flow/parallel-scheduler.ts","../src/flow/type-guards.ts","../src/flow/flow.ts","../src/flow/flow-engine.ts","../src/flow/utils/file-naming.ts","../src/flow/nodes/transform-node.ts","../src/flow/plugins/credential-provider.ts","../src/flow/plugins/document-ai-plugin.ts","../src/flow/plugins/document-plugin.ts","../src/flow/plugins/image-ai-plugin.ts","../src/flow/plugins/image-plugin.ts","../src/flow/plugins/types/describe-image-node.ts","../src/flow/plugins/types/describe-video-node.ts","../src/flow/plugins/types/extract-frame-video-node.ts","../src/flow/plugins/types/optimize-node.ts","../src/flow/plugins/types/remove-background-node.ts","../src/flow/plugins/types/resize-node.ts","../src/flow/plugins/types/resize-video-node.ts","../src/flow/plugins/types/transcode-video-node.ts","../src/flow/plugins/types/transform-image-node.ts","../src/flow/plugins/types/trim-video-node.ts","../src/flow/plugins/video-plugin.ts","../src/flow/plugins/virus-scan-plugin.ts","../src/flow/plugins/zip-plugin.ts","../src/flow/typed-flow.ts","../src/flow/types/run-args.ts"],"sourcesContent":["/**\n * Circuit Breaker Pattern types and configuration for the Uploadista flow engine.\n *\n * This module provides the types, configuration, and constants for the circuit breaker\n * pattern. The actual implementation is in {@link DistributedCircuitBreaker} which\n * uses a {@link CircuitBreakerStore} for distributed state management.\n *\n * @module flow/circuit-breaker\n * @see {@link DistributedCircuitBreaker} for the main implementation\n * @see {@link CircuitBreakerStore} for storage interface\n */\n\nimport type { Effect } from \"effect\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\n/**\n * Circuit breaker state machine states.\n *\n * - `closed`: Normal operation, tracking failures in sliding window\n * - `open`: Rejecting all requests immediately, waiting for reset timeout\n * - `half-open`: Allowing limited test requests to probe service health\n */\nexport type CircuitBreakerState = \"closed\" | \"open\" | \"half-open\";\n\n/**\n * Configuration for a circuit breaker.\n *\n * @property enabled - Whether circuit breaker is active (default: false for backward compatibility)\n * @property failureThreshold - Number of failures within window to trip circuit (default: 5)\n * @property resetTimeout - Milliseconds to wait in open state before half-open (default: 30000)\n * @property halfOpenRequests - Number of successful requests in half-open to close (default: 3)\n * @property windowDuration - Sliding window duration in milliseconds (default: 60000)\n * @property fallback - Behavior when circuit is open\n */\nexport interface CircuitBreakerConfig {\n /** Whether circuit breaker is active (default: false) */\n enabled?: boolean;\n /** Number of failures within window to trip circuit (default: 5) */\n failureThreshold?: number;\n /** Milliseconds to wait in open state before half-open (default: 30000) */\n resetTimeout?: number;\n /** Number of successful requests in half-open to close (default: 3) */\n halfOpenRequests?: number;\n /** Sliding window duration in milliseconds (default: 60000) */\n windowDuration?: number;\n /** Behavior when circuit is open */\n fallback?: CircuitBreakerFallback;\n}\n\n/**\n * Fallback behavior when circuit is open.\n *\n * - `fail`: Fail immediately with CIRCUIT_BREAKER_OPEN error (default)\n * - `skip`: Skip node, pass input through as output\n * - `default`: Return a configured default value\n */\nexport type CircuitBreakerFallback =\n | { type: \"fail\" }\n | { type: \"skip\"; passThrough: true }\n | { type: \"default\"; value: unknown };\n\n/**\n * Event emitted when circuit state changes.\n */\nexport interface CircuitBreakerEvent {\n nodeType: string;\n previousState: CircuitBreakerState;\n newState: CircuitBreakerState;\n timestamp: number;\n failureCount?: number;\n}\n\n/**\n * Callback type for circuit state change events.\n */\nexport type CircuitBreakerEventHandler = (\n event: CircuitBreakerEvent,\n) => Effect.Effect<void, never, never>;\n\n// ============================================================================\n// Default Configuration\n// ============================================================================\n\n/**\n * Default circuit breaker configuration values.\n */\nexport const DEFAULT_CIRCUIT_BREAKER_CONFIG: Required<\n Omit<CircuitBreakerConfig, \"fallback\">\n> & { fallback: CircuitBreakerFallback } = {\n enabled: false,\n failureThreshold: 5,\n resetTimeout: 30000,\n halfOpenRequests: 3,\n windowDuration: 60000,\n fallback: { type: \"fail\" },\n};\n","/**\n * Circuit Breaker Store Implementations.\n *\n * Provides implementations of the CircuitBreakerStore interface for\n * different storage backends.\n *\n * @module flow/circuit-breaker-store\n */\n\nimport { Effect, Layer } from \"effect\";\n\nimport { UploadistaError } from \"../errors\";\nimport {\n type BaseKvStore,\n BaseKvStoreService,\n jsonSerializer,\n} from \"../types/kv-store\";\nimport {\n type CircuitBreakerStateData,\n type CircuitBreakerStats,\n type CircuitBreakerStore,\n CircuitBreakerStoreService,\n createInitialCircuitBreakerState,\n} from \"../types/circuit-breaker-store\";\n\n// ============================================================================\n// Key Prefix\n// ============================================================================\n\nconst CIRCUIT_BREAKER_KEY_PREFIX = \"uploadista:circuit-breaker:\";\n\n// ============================================================================\n// KV Store Adapter\n// ============================================================================\n\n/**\n * Creates a CircuitBreakerStore backed by any BaseKvStore.\n *\n * This adapter wraps a generic KV store to provide circuit breaker state\n * storage. It handles:\n * - JSON serialization of state data\n * - Sliding window expiry (checked on read/increment)\n * - Read-modify-write for increment operations\n *\n * Note: This implementation uses read-modify-write for increments, which\n * may have race conditions under high concurrency. This is acceptable for\n * circuit breakers as they tolerate eventual consistency.\n *\n * @param baseStore - The underlying KV store\n * @returns A CircuitBreakerStore implementation\n *\n * @example\n * ```typescript\n * const baseStore = makeRedisBaseKvStore({ redis: redisClient });\n * const cbStore = makeKvCircuitBreakerStore(baseStore);\n *\n * // Use the store\n * yield* cbStore.incrementFailures(\"describe-image\", 60000);\n * ```\n */\nexport function makeKvCircuitBreakerStore(\n baseStore: BaseKvStore,\n): CircuitBreakerStore {\n const makeKey = (nodeType: string) =>\n `${CIRCUIT_BREAKER_KEY_PREFIX}${nodeType}`;\n\n const getStateInternal = (\n nodeType: string,\n ): Effect.Effect<CircuitBreakerStateData | null, UploadistaError> =>\n Effect.gen(function* () {\n const key = makeKey(nodeType);\n const raw = yield* baseStore.get(key);\n\n if (raw === null) {\n return null;\n }\n\n try {\n const state = jsonSerializer.deserialize<CircuitBreakerStateData>(raw);\n return state;\n } catch {\n // Corrupted state - delete and return null\n yield* baseStore.delete(key);\n return null;\n }\n });\n\n const setStateInternal = (\n nodeType: string,\n state: CircuitBreakerStateData,\n ): Effect.Effect<void, UploadistaError> => {\n const key = makeKey(nodeType);\n const serialized = jsonSerializer.serialize(state);\n return baseStore.set(key, serialized);\n };\n\n return {\n getState: getStateInternal,\n\n setState: setStateInternal,\n\n incrementFailures: (nodeType: string, windowDuration: number) =>\n Effect.gen(function* () {\n const now = Date.now();\n let state = yield* getStateInternal(nodeType);\n\n if (state === null) {\n // Initialize with default config - will be overwritten by real config on first use\n state = createInitialCircuitBreakerState({\n failureThreshold: 5,\n resetTimeout: 30000,\n halfOpenRequests: 3,\n windowDuration,\n });\n }\n\n // Check if window has expired\n if (now - state.windowStart > windowDuration) {\n // Window expired - reset count and window\n state = {\n ...state,\n failureCount: 1, // This is the first failure in new window\n windowStart: now,\n };\n } else {\n // Window still valid - increment\n state = {\n ...state,\n failureCount: state.failureCount + 1,\n };\n }\n\n yield* setStateInternal(nodeType, state);\n return state.failureCount;\n }),\n\n resetFailures: (nodeType: string) =>\n Effect.gen(function* () {\n const state = yield* getStateInternal(nodeType);\n if (state !== null) {\n yield* setStateInternal(nodeType, {\n ...state,\n failureCount: 0,\n windowStart: Date.now(),\n });\n }\n }),\n\n incrementHalfOpenSuccesses: (nodeType: string) =>\n Effect.gen(function* () {\n const state = yield* getStateInternal(nodeType);\n if (state === null) {\n return 1;\n }\n\n const newState = {\n ...state,\n halfOpenSuccesses: state.halfOpenSuccesses + 1,\n };\n yield* setStateInternal(nodeType, newState);\n return newState.halfOpenSuccesses;\n }),\n\n getAllStats: () =>\n Effect.gen(function* () {\n const stats = new Map<string, CircuitBreakerStats>();\n\n if (!baseStore.list) {\n // If list is not supported, return empty map\n return stats;\n }\n\n const keys = yield* baseStore.list(CIRCUIT_BREAKER_KEY_PREFIX);\n const now = Date.now();\n\n for (const key of keys) {\n const nodeType = key; // Key is already stripped of prefix by list()\n const state = yield* getStateInternal(nodeType);\n\n if (state !== null) {\n const timeSinceLastStateChange = now - state.lastStateChange;\n\n stats.set(nodeType, {\n nodeType,\n state: state.state,\n failureCount: state.failureCount,\n halfOpenSuccesses: state.halfOpenSuccesses,\n timeSinceLastStateChange,\n timeUntilHalfOpen:\n state.state === \"open\"\n ? Math.max(\n 0,\n state.config.resetTimeout - timeSinceLastStateChange,\n )\n : undefined,\n });\n }\n }\n\n return stats;\n }),\n\n delete: (nodeType: string) => baseStore.delete(makeKey(nodeType)),\n };\n}\n\n// ============================================================================\n// Memory Store (for single-instance / testing)\n// ============================================================================\n\n/**\n * Creates an in-memory CircuitBreakerStore.\n *\n * This implementation keeps all state in memory and is suitable for:\n * - Single-instance deployments\n * - Development and testing\n * - Serverless functions (where state is ephemeral anyway)\n *\n * @returns A CircuitBreakerStore backed by in-memory Map\n *\n * @example\n * ```typescript\n * const cbStore = makeMemoryCircuitBreakerStore();\n *\n * // Use for testing\n * yield* cbStore.incrementFailures(\"test-node\", 60000);\n * const state = yield* cbStore.getState(\"test-node\");\n * ```\n */\nexport function makeMemoryCircuitBreakerStore(): CircuitBreakerStore {\n const store = new Map<string, CircuitBreakerStateData>();\n\n return {\n getState: (nodeType: string) =>\n Effect.succeed(store.get(nodeType) ?? null),\n\n setState: (nodeType: string, state: CircuitBreakerStateData) =>\n Effect.sync(() => {\n store.set(nodeType, state);\n }),\n\n incrementFailures: (nodeType: string, windowDuration: number) =>\n Effect.sync(() => {\n const now = Date.now();\n let state = store.get(nodeType);\n\n if (state === undefined) {\n state = createInitialCircuitBreakerState({\n failureThreshold: 5,\n resetTimeout: 30000,\n halfOpenRequests: 3,\n windowDuration,\n });\n }\n\n // Check if window has expired\n if (now - state.windowStart > windowDuration) {\n state = {\n ...state,\n failureCount: 1,\n windowStart: now,\n };\n } else {\n state = {\n ...state,\n failureCount: state.failureCount + 1,\n };\n }\n\n store.set(nodeType, state);\n return state.failureCount;\n }),\n\n resetFailures: (nodeType: string) =>\n Effect.sync(() => {\n const state = store.get(nodeType);\n if (state !== undefined) {\n store.set(nodeType, {\n ...state,\n failureCount: 0,\n windowStart: Date.now(),\n });\n }\n }),\n\n incrementHalfOpenSuccesses: (nodeType: string) =>\n Effect.sync(() => {\n const state = store.get(nodeType);\n if (state === undefined) {\n return 1;\n }\n\n const newState = {\n ...state,\n halfOpenSuccesses: state.halfOpenSuccesses + 1,\n };\n store.set(nodeType, newState);\n return newState.halfOpenSuccesses;\n }),\n\n getAllStats: () =>\n Effect.sync(() => {\n const stats = new Map<string, CircuitBreakerStats>();\n const now = Date.now();\n\n for (const [nodeType, state] of store) {\n const timeSinceLastStateChange = now - state.lastStateChange;\n\n stats.set(nodeType, {\n nodeType,\n state: state.state,\n failureCount: state.failureCount,\n halfOpenSuccesses: state.halfOpenSuccesses,\n timeSinceLastStateChange,\n timeUntilHalfOpen:\n state.state === \"open\"\n ? Math.max(\n 0,\n state.config.resetTimeout - timeSinceLastStateChange,\n )\n : undefined,\n });\n }\n\n return stats;\n }),\n\n delete: (nodeType: string) =>\n Effect.sync(() => {\n store.delete(nodeType);\n }),\n };\n}\n\n// ============================================================================\n// Effect Layers\n// ============================================================================\n\n/**\n * Effect Layer that provides a CircuitBreakerStore backed by the BaseKvStore.\n *\n * Use this layer when you want circuit breaker state to be distributed\n * across multiple instances (e.g., in a cluster).\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const cbStore = yield* CircuitBreakerStoreService;\n * // ...\n * }).pipe(\n * Effect.provide(kvCircuitBreakerStoreLayer),\n * Effect.provide(redisKvStore({ redis: redisClient }))\n * );\n * ```\n */\nexport const kvCircuitBreakerStoreLayer = Layer.effect(\n CircuitBreakerStoreService,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return makeKvCircuitBreakerStore(baseStore);\n }),\n);\n\n/**\n * Effect Layer that provides an in-memory CircuitBreakerStore.\n *\n * Use this layer for single-instance deployments, development, or testing.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const cbStore = yield* CircuitBreakerStoreService;\n * // ...\n * }).pipe(\n * Effect.provide(memoryCircuitBreakerStoreLayer)\n * );\n * ```\n */\nexport const memoryCircuitBreakerStoreLayer = Layer.succeed(\n CircuitBreakerStoreService,\n makeMemoryCircuitBreakerStore(),\n);\n","/**\n * Distributed Circuit Breaker implementation.\n *\n * This module provides a circuit breaker that stores state in a distributed\n * store, allowing multiple instances in a cluster to share circuit state.\n *\n * @module flow/distributed-circuit-breaker\n */\n\nimport { Effect } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport {\n type CircuitBreakerStateData,\n type CircuitBreakerStateValue,\n type CircuitBreakerStore,\n createInitialCircuitBreakerState,\n} from \"../types/circuit-breaker-store\";\nimport {\n type CircuitBreakerConfig,\n type CircuitBreakerEventHandler,\n type CircuitBreakerFallback,\n DEFAULT_CIRCUIT_BREAKER_CONFIG,\n} from \"./circuit-breaker\";\n\n// ============================================================================\n// Distributed Circuit Breaker\n// ============================================================================\n\n/**\n * Result of checking if a request is allowed.\n */\nexport interface AllowRequestResult {\n allowed: boolean;\n state: CircuitBreakerStateValue;\n failureCount: number;\n}\n\n/**\n * Distributed circuit breaker that uses a store for state persistence.\n *\n * Unlike the in-memory CircuitBreaker, this implementation stores all state\n * in a CircuitBreakerStore, allowing multiple instances to share circuit state.\n *\n * All operations are Effect-based since they may involve I/O.\n *\n * @example\n * ```typescript\n * const breaker = new DistributedCircuitBreaker(\n * \"describe-image\",\n * { enabled: true, failureThreshold: 5 },\n * store\n * );\n *\n * // Check if request is allowed\n * const { allowed, state } = yield* breaker.allowRequest();\n * if (!allowed) {\n * // Handle circuit open\n * }\n *\n * // Record result\n * try {\n * const result = yield* executeNode();\n * yield* breaker.recordSuccess();\n * return result;\n * } catch (error) {\n * yield* breaker.recordFailure(error.message);\n * throw error;\n * }\n * ```\n */\nexport class DistributedCircuitBreaker {\n private eventHandler?: CircuitBreakerEventHandler;\n\n readonly nodeType: string;\n readonly config: Required<Omit<CircuitBreakerConfig, \"fallback\">> & {\n fallback: CircuitBreakerFallback;\n };\n readonly store: CircuitBreakerStore;\n\n constructor(\n nodeType: string,\n config: CircuitBreakerConfig,\n store: CircuitBreakerStore,\n ) {\n this.nodeType = nodeType;\n this.config = {\n enabled: config.enabled ?? DEFAULT_CIRCUIT_BREAKER_CONFIG.enabled,\n failureThreshold:\n config.failureThreshold ??\n DEFAULT_CIRCUIT_BREAKER_CONFIG.failureThreshold,\n resetTimeout:\n config.resetTimeout ?? DEFAULT_CIRCUIT_BREAKER_CONFIG.resetTimeout,\n halfOpenRequests:\n config.halfOpenRequests ??\n DEFAULT_CIRCUIT_BREAKER_CONFIG.halfOpenRequests,\n windowDuration:\n config.windowDuration ?? DEFAULT_CIRCUIT_BREAKER_CONFIG.windowDuration,\n fallback: config.fallback ?? DEFAULT_CIRCUIT_BREAKER_CONFIG.fallback,\n };\n this.store = store;\n }\n\n /**\n * Sets the event handler for state change notifications.\n */\n setEventHandler(handler: CircuitBreakerEventHandler): void {\n this.eventHandler = handler;\n }\n\n /**\n * Checks if a request is allowed through the circuit.\n *\n * This method reads state from the store, checks for time-based transitions,\n * and returns whether the request should proceed.\n */\n allowRequest(): Effect.Effect<AllowRequestResult, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n if (!self.config.enabled) {\n return { allowed: true, state: \"closed\" as const, failureCount: 0 };\n }\n\n let state = yield* self.store.getState(self.nodeType);\n const now = Date.now();\n\n // Initialize state if not exists\n if (state === null) {\n state = createInitialCircuitBreakerState({\n failureThreshold: self.config.failureThreshold,\n resetTimeout: self.config.resetTimeout,\n halfOpenRequests: self.config.halfOpenRequests,\n windowDuration: self.config.windowDuration,\n });\n yield* self.store.setState(self.nodeType, state);\n }\n\n // Check for time-based transition: open -> half-open\n if (state.state === \"open\") {\n const timeSinceOpen = now - state.lastStateChange;\n if (timeSinceOpen >= self.config.resetTimeout) {\n // Transition to half-open\n const previousState = state.state;\n state = {\n ...state,\n state: \"half-open\",\n halfOpenSuccesses: 0,\n lastStateChange: now,\n };\n yield* self.store.setState(self.nodeType, state);\n yield* self.emitEvent(previousState, \"half-open\", state.failureCount);\n }\n }\n\n // Determine if request is allowed\n const allowed = state.state !== \"open\";\n\n return {\n allowed,\n state: state.state,\n failureCount: state.failureCount,\n };\n });\n }\n\n /**\n * Gets the current circuit state from the store.\n */\n getState(): Effect.Effect<CircuitBreakerStateValue, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n const state = yield* self.store.getState(self.nodeType);\n return state?.state ?? \"closed\";\n });\n }\n\n /**\n * Gets the current failure count from the store.\n */\n getFailureCount(): Effect.Effect<number, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n const state = yield* self.store.getState(self.nodeType);\n return state?.failureCount ?? 0;\n });\n }\n\n /**\n * Records a successful execution.\n *\n * In half-open state, tracks successes toward closing the circuit.\n * In closed state, resets the failure count.\n */\n recordSuccess(): Effect.Effect<void, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n if (!self.config.enabled) {\n return;\n }\n\n const state = yield* self.store.getState(self.nodeType);\n if (state === null) {\n return;\n }\n\n if (state.state === \"half-open\") {\n const newSuccessCount = yield* self.store.incrementHalfOpenSuccesses(\n self.nodeType,\n );\n if (newSuccessCount >= self.config.halfOpenRequests) {\n // Transition to closed\n yield* self.transitionTo(\"closed\", state.failureCount);\n }\n } else if (state.state === \"closed\") {\n // Reset failure count on success\n yield* self.store.resetFailures(self.nodeType);\n }\n });\n }\n\n /**\n * Records a failed execution.\n *\n * In closed state, increments failure count and may trip the circuit.\n * In half-open state, immediately reopens the circuit.\n */\n recordFailure(_errorMessage: string): Effect.Effect<void, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n if (!self.config.enabled) {\n return;\n }\n\n const state = yield* self.store.getState(self.nodeType);\n\n if (state === null || state.state === \"closed\") {\n // Increment failures and check threshold\n const newFailureCount = yield* self.store.incrementFailures(\n self.nodeType,\n self.config.windowDuration,\n );\n\n if (newFailureCount >= self.config.failureThreshold) {\n // Trip the circuit\n yield* self.transitionTo(\"open\", newFailureCount);\n }\n } else if (state.state === \"half-open\") {\n // Any failure in half-open reopens the circuit\n yield* self.transitionTo(\"open\", state.failureCount);\n }\n // In open state, failures are ignored (requests shouldn't reach here)\n });\n }\n\n /**\n * Gets the fallback configuration.\n */\n getFallback(): CircuitBreakerFallback {\n return this.config.fallback;\n }\n\n /**\n * Resets the circuit breaker to closed state.\n */\n reset(): Effect.Effect<void, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n const state = yield* self.store.getState(self.nodeType);\n const previousState = state?.state ?? \"closed\";\n\n yield* self.store.setState(\n self.nodeType,\n createInitialCircuitBreakerState({\n failureThreshold: self.config.failureThreshold,\n resetTimeout: self.config.resetTimeout,\n halfOpenRequests: self.config.halfOpenRequests,\n windowDuration: self.config.windowDuration,\n }),\n );\n\n if (previousState !== \"closed\") {\n yield* self.emitEvent(previousState, \"closed\", 0);\n }\n });\n }\n\n /**\n * Transitions to a new state.\n */\n private transitionTo(\n newState: CircuitBreakerStateValue,\n failureCount: number,\n ): Effect.Effect<void, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n const currentState = yield* self.store.getState(self.nodeType);\n const previousState = currentState?.state ?? \"closed\";\n\n if (previousState === newState) {\n return;\n }\n\n const now = Date.now();\n const updatedState: CircuitBreakerStateData = {\n state: newState,\n failureCount: newState === \"closed\" ? 0 : failureCount,\n lastStateChange: now,\n halfOpenSuccesses: 0,\n windowStart:\n newState === \"closed\" ? now : (currentState?.windowStart ?? now),\n config: {\n failureThreshold: self.config.failureThreshold,\n resetTimeout: self.config.resetTimeout,\n halfOpenRequests: self.config.halfOpenRequests,\n windowDuration: self.config.windowDuration,\n },\n };\n\n yield* self.store.setState(self.nodeType, updatedState);\n yield* self.emitEvent(previousState, newState, failureCount);\n });\n }\n\n /**\n * Emits a state change event if handler is set.\n */\n private emitEvent(\n previousState: CircuitBreakerStateValue,\n newState: CircuitBreakerStateValue,\n failureCount: number,\n ): Effect.Effect<void, never, never> {\n const self = this;\n return Effect.gen(function* () {\n if (self.eventHandler) {\n yield* self.eventHandler({\n nodeType: self.nodeType,\n previousState,\n newState,\n timestamp: Date.now(),\n failureCount,\n });\n }\n });\n }\n}\n\n// ============================================================================\n// Distributed Circuit Breaker Registry\n// ============================================================================\n\n/**\n * Registry for managing distributed circuit breakers.\n *\n * Unlike the in-memory CircuitBreakerRegistry, this registry creates\n * DistributedCircuitBreaker instances that share state via a store.\n *\n * @example\n * ```typescript\n * const store = makeKvCircuitBreakerStore(baseKvStore);\n * const registry = new DistributedCircuitBreakerRegistry(store);\n *\n * const breaker = registry.getOrCreate(\"describe-image\", {\n * enabled: true,\n * failureThreshold: 5\n * });\n * ```\n */\nexport class DistributedCircuitBreakerRegistry {\n private breakers: Map<string, DistributedCircuitBreaker> = new Map();\n private eventHandler?: CircuitBreakerEventHandler;\n\n constructor(readonly store: CircuitBreakerStore) {}\n\n /**\n * Sets a global event handler for all circuit breakers.\n */\n setEventHandler(handler: CircuitBreakerEventHandler): void {\n this.eventHandler = handler;\n for (const breaker of this.breakers.values()) {\n breaker.setEventHandler(handler);\n }\n }\n\n /**\n * Gets an existing circuit breaker or creates a new one.\n */\n getOrCreate(\n nodeType: string,\n config: CircuitBreakerConfig,\n ): DistributedCircuitBreaker {\n let breaker = this.breakers.get(nodeType);\n if (!breaker) {\n breaker = new DistributedCircuitBreaker(nodeType, config, this.store);\n if (this.eventHandler) {\n breaker.setEventHandler(this.eventHandler);\n }\n this.breakers.set(nodeType, breaker);\n }\n return breaker;\n }\n\n /**\n * Gets an existing circuit breaker if it exists.\n */\n get(nodeType: string): DistributedCircuitBreaker | undefined {\n return this.breakers.get(nodeType);\n }\n\n /**\n * Gets statistics for all circuit breakers from the store.\n */\n getAllStats(): Effect.Effect<\n Map<string, { state: CircuitBreakerStateValue; failureCount: number }>,\n UploadistaError\n > {\n return this.store.getAllStats();\n }\n\n /**\n * Resets all circuit breakers.\n */\n resetAll(): Effect.Effect<void, UploadistaError> {\n const self = this;\n return Effect.gen(function* () {\n for (const breaker of self.breakers.values()) {\n yield* breaker.reset();\n }\n });\n }\n\n /**\n * Clears all circuit breakers from the local cache.\n * Note: This does not clear state from the store.\n */\n clear(): void {\n this.breakers.clear();\n }\n}\n","import type { FlowEdge as EnhancedFlowEdge } from \"./types/flow-types\";\n\n/**\n * Represents a connection between two nodes in a flow, defining the data flow direction.\n *\n * Edges connect the output of a source node to the input of a target node,\n * enabling data to flow through the processing pipeline in a directed acyclic graph (DAG).\n */\nexport type FlowEdge = EnhancedFlowEdge;\n\n/**\n * Creates a flow edge connecting two nodes in a processing pipeline.\n *\n * Edges define how data flows between nodes. The data output from the source node\n * becomes the input for the target node. For nodes with multiple inputs/outputs,\n * ports can be specified to route data to specific connections.\n *\n * @param config - Edge configuration\n * @param config.source - ID of the source node (data originates here)\n * @param config.target - ID of the target node (data flows to here)\n * @param config.sourcePort - Optional port name on the source node for multi-output nodes\n * @param config.targetPort - Optional port name on the target node for multi-input nodes\n *\n * @returns A FlowEdge object representing the connection\n *\n * @example\n * ```typescript\n * // Simple edge connecting two nodes\n * const edge = createFlowEdge({\n * source: \"input-1\",\n * target: \"process-1\"\n * });\n *\n * // Edge with ports for multi-input/output nodes\n * const portEdge = createFlowEdge({\n * source: \"multiplex-1\",\n * target: \"merge-1\",\n * sourcePort: \"out-a\",\n * targetPort: \"in-1\"\n * });\n * ```\n */\nexport function createFlowEdge({\n source,\n target,\n sourcePort,\n targetPort,\n}: {\n source: string;\n target: string;\n sourcePort?: string;\n targetPort?: string;\n}): FlowEdge {\n return {\n source,\n target,\n sourcePort,\n targetPort,\n };\n}\n","/**\n * Flow execution event types and definitions.\n *\n * This module defines the event system used to monitor and track flow execution.\n * Events are emitted at various stages of flow and node execution, allowing\n * real-time monitoring, logging, and WebSocket updates to clients.\n *\n * @module flow/event\n * @see {@link FlowEvent} for the union of all event types\n */\n\nimport type { NodeType } from \"./node\";\nimport type { TypedOutput } from \"./types/flow-types\";\n\n/**\n * Enumeration of all possible flow and node execution event types.\n *\n * Events follow a lifecycle pattern:\n * - Job level: JobStart → ... → JobEnd\n * - Flow level: FlowStart → ... → (FlowEnd | FlowError)\n * - Node level: NodeStart → ... → (NodeEnd | NodePause | NodeError)\n *\n * @example\n * ```typescript\n * // Listen for flow completion\n * if (event.eventType === EventType.FlowEnd) {\n * console.log(\"Flow completed:\", event.result);\n * }\n * ```\n */\nexport enum EventType {\n /** Emitted when a job starts execution */\n JobStart = \"job-start\",\n /** Emitted when a job completes (success or failure) */\n JobEnd = \"job-end\",\n /** Emitted when a flow begins execution */\n FlowStart = \"flow-start\",\n /** Emitted when a flow completes successfully */\n FlowEnd = \"flow-end\",\n /** Emitted when a flow encounters an error */\n FlowError = \"flow-error\",\n /** Emitted when a flow is paused by user request */\n FlowPause = \"flow-pause\",\n /** Emitted when a flow is cancelled by user request */\n FlowCancel = \"flow-cancel\",\n /** Emitted when a node starts processing */\n NodeStart = \"node-start\",\n /** Emitted when a node completes successfully */\n NodeEnd = \"node-end\",\n /** Emitted when a node pauses (waiting for additional data) */\n NodePause = \"node-pause\",\n /** Emitted when a paused node resumes execution */\n NodeResume = \"node-resume\",\n /** Emitted when a node encounters an error */\n NodeError = \"node-error\",\n /** Emitted for streaming node data (e.g., progress updates) */\n NodeStream = \"node-stream\",\n /** Emitted for node response data */\n NodeResponse = \"node-response\",\n /** Emitted when a job is added to the Dead Letter Queue */\n DlqItemAdded = \"dlq-item-added\",\n /** Emitted when a DLQ retry attempt starts */\n DlqRetryStart = \"dlq-retry-start\",\n /** Emitted when a DLQ retry succeeds */\n DlqRetrySuccess = \"dlq-retry-success\",\n /** Emitted when a DLQ retry fails */\n DlqRetryFailed = \"dlq-retry-failed\",\n /** Emitted when a DLQ item is exhausted (max retries reached) */\n DlqItemExhausted = \"dlq-item-exhausted\",\n /** Emitted when a DLQ item is resolved */\n DlqItemResolved = \"dlq-item-resolved\",\n}\n\n/**\n * Event emitted when a job starts execution.\n */\nexport type FlowEventJobStart = {\n jobId: string;\n eventType: EventType.JobStart;\n};\n\n/**\n * Event emitted when a job completes (either successfully or with failure).\n */\nexport type FlowEventJobEnd = {\n jobId: string;\n eventType: EventType.JobEnd;\n};\n\n/**\n * Event emitted when a flow begins execution.\n * This is the first event after JobStart in the execution lifecycle.\n */\nexport type FlowEventFlowStart = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowStart;\n};\n\n/**\n * Event emitted when a flow completes successfully.\n *\n * @property outputs - Array of typed outputs from all output nodes in the flow\n * @property result - Legacy field for backward compatibility (deprecated, use outputs instead)\n *\n * @remarks\n * The `outputs` field contains an array of TypedOutput objects, each with:\n * - nodeId: The specific node that produced the output\n * - nodeType: The registered type ID (e.g., \"storage-output-v1\")\n * - data: The actual output data\n * - timestamp: When the output was produced\n *\n * @example\n * ```typescript\n * // Handle flow completion with typed outputs\n * if (event.eventType === EventType.FlowEnd && event.outputs) {\n * for (const output of event.outputs) {\n * console.log(`${output.nodeId} (${output.nodeType}):`, output.data);\n * }\n * }\n * ```\n */\nexport type FlowEventFlowEnd = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowEnd;\n outputs?: TypedOutput[]; // Typed outputs from all output nodes\n result?: unknown; // Legacy field (deprecated, use outputs instead)\n};\n\n/**\n * Event emitted when a flow encounters an unrecoverable error.\n *\n * @property error - Error message describing what went wrong\n */\nexport type FlowEventFlowError = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowError;\n error: string;\n};\n\n/**\n * Event emitted when a flow is paused by user request.\n *\n * Unlike NodePause which occurs when a node needs more data,\n * this event is triggered by an explicit user action to pause the flow.\n */\nexport type FlowEventFlowPause = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowPause;\n pausedAt?: string; // nodeId where execution was paused\n};\n\n/**\n * Event emitted when a flow is cancelled by user request.\n *\n * Cancelled flows will clean up intermediate files and stop execution.\n */\nexport type FlowEventFlowCancel = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowCancel;\n};\n\n/**\n * Event emitted when a node begins processing.\n *\n * @property nodeName - Human-readable node name\n * @property nodeType - Type of node (input, transform, conditional, output, etc.)\n */\nexport type FlowEventNodeStart = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeStart;\n nodeName: string;\n nodeType: NodeType;\n};\n\n/**\n * Event emitted when a node fails after all retry attempts.\n *\n * @property error - Error message from the failed execution\n * @property retryCount - Number of retry attempts made before giving up\n */\nexport type FlowEventNodeError = {\n jobId: string;\n flowId: string;\n nodeId: string;\n nodeName: string;\n eventType: EventType.NodeError;\n error: string;\n retryCount?: number; // Number of retries attempted before failure\n};\n\n/**\n * Event emitted when a node completes successfully.\n *\n * @property result - The typed output data produced by the node\n *\n * @remarks\n * For output nodes, the result will be a TypedOutput containing type information.\n * For other nodes, it may be untyped (nodeType will be undefined).\n */\nexport type FlowEventNodeEnd = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeEnd;\n nodeName: string;\n result?: TypedOutput | unknown; // Typed output for output nodes, or untyped for others\n};\n\n/**\n * Event emitted when a node pauses execution, waiting for additional data.\n *\n * This typically occurs with input nodes that need more chunks or nodes\n * waiting for external services.\n *\n * @property partialData - Any partial result available before pausing\n */\nexport type FlowEventNodePause = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodePause;\n nodeName: string;\n partialData?: unknown; // Partial result from waiting node\n};\n\n/**\n * Event emitted when a paused node resumes execution.\n *\n * This occurs after providing the additional data needed by a paused node.\n */\nexport type FlowEventNodeResume = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeResume;\n nodeName: string;\n nodeType: NodeType;\n};\n\n/**\n * Event emitted for node-specific response data.\n *\n * Used for streaming intermediate results or progress updates.\n */\nexport type FlowEventNodeResponse = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeResponse;\n nodeName: string;\n data: unknown;\n};\n\n// ============================================================================\n// Dead Letter Queue Events\n// ============================================================================\n\n/**\n * Event emitted when a job is added to the Dead Letter Queue.\n */\nexport type FlowEventDlqItemAdded = {\n eventType: EventType.DlqItemAdded;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n errorCode: string;\n errorMessage: string;\n retryCount: number;\n maxRetries: number;\n};\n\n/**\n * Event emitted when a DLQ retry attempt starts.\n */\nexport type FlowEventDlqRetryStart = {\n eventType: EventType.DlqRetryStart;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n attemptNumber: number;\n};\n\n/**\n * Event emitted when a DLQ retry succeeds.\n */\nexport type FlowEventDlqRetrySuccess = {\n eventType: EventType.DlqRetrySuccess;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n attemptNumber: number;\n durationMs: number;\n};\n\n/**\n * Event emitted when a DLQ retry fails.\n */\nexport type FlowEventDlqRetryFailed = {\n eventType: EventType.DlqRetryFailed;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n attemptNumber: number;\n error: string;\n durationMs: number;\n nextRetryAt?: string; // ISO 8601 timestamp\n};\n\n/**\n * Event emitted when a DLQ item is exhausted (max retries reached).\n */\nexport type FlowEventDlqItemExhausted = {\n eventType: EventType.DlqItemExhausted;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n totalAttempts: number;\n};\n\n/**\n * Event emitted when a DLQ item is resolved.\n */\nexport type FlowEventDlqItemResolved = {\n eventType: EventType.DlqItemResolved;\n dlqItemId: string;\n jobId: string;\n flowId: string;\n resolvedBy: \"retry\" | \"manual\";\n};\n\n/**\n * Union of all DLQ-related events.\n */\nexport type DlqEvent =\n | FlowEventDlqItemAdded\n | FlowEventDlqRetryStart\n | FlowEventDlqRetrySuccess\n | FlowEventDlqRetryFailed\n | FlowEventDlqItemExhausted\n | FlowEventDlqItemResolved;\n\n/**\n * Union of all possible flow execution events.\n *\n * This discriminated union allows type-safe event handling based on eventType.\n *\n * @example\n * ```typescript\n * function handleFlowEvent(event: FlowEvent) {\n * switch (event.eventType) {\n * case EventType.FlowStart:\n * console.log(\"Flow started:\", event.flowId);\n * break;\n * case EventType.NodeEnd:\n * console.log(\"Node completed:\", event.nodeName, event.result);\n * break;\n * case EventType.FlowError:\n * console.error(\"Flow failed:\", event.error);\n * break;\n * case EventType.FlowCancel:\n * console.log(\"Flow cancelled:\", event.flowId);\n * break;\n * case EventType.DlqItemAdded:\n * console.log(\"Job added to DLQ:\", event.dlqItemId);\n * break;\n * }\n * }\n * ```\n */\nexport type FlowEvent =\n | FlowEventJobStart\n | FlowEventJobEnd\n | FlowEventFlowStart\n | FlowEventFlowEnd\n | FlowEventFlowError\n | FlowEventFlowPause\n | FlowEventFlowCancel\n | FlowEventNodeStart\n | FlowEventNodeEnd\n | FlowEventNodePause\n | FlowEventNodeResume\n | FlowEventNodeError\n | DlqEvent;\n","/**\n * Input type registry for flow entry point nodes.\n *\n * This module provides a registry for input node type definitions. Input types\n * describe how data enters the flow system from external sources (e.g., streaming\n * uploads, URL fetches, webhook triggers).\n *\n * Input types are distinct from output types - they describe the external interface\n * that clients use to interact with input nodes, not the data shape that flows\n * through the system.\n *\n * @module flow/input-type-registry\n * @see {@link outputTypeRegistry} for output types\n *\n * @example\n * ```typescript\n * import { inputTypeRegistry } from \"@uploadista/core/flow\";\n * import { z } from \"zod\";\n *\n * // Register a custom input type\n * inputTypeRegistry.register({\n * id: \"webhook-input-v1\",\n * schema: z.object({\n * payload: z.unknown(),\n * headers: z.record(z.string()),\n * }),\n * version: \"1.0.0\",\n * description: \"Webhook-triggered file input\",\n * });\n * ```\n */\n\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Defines a registered input type with its schema and metadata.\n *\n * Input type definitions describe how external clients interact with input nodes.\n * Unlike output types, input types define the external interface (e.g., init/finalize\n * operations for streaming uploads).\n *\n * @template TSchema - The Zod schema type for this input's data\n *\n * @property id - Unique identifier (e.g., \"streaming-input-v1\")\n * @property schema - Zod schema for validating input data from clients\n * @property version - Semantic version (e.g., \"1.0.0\") for tracking type evolution\n * @property description - Human-readable explanation of what this input type does\n */\nexport interface InputTypeDefinition<TSchema = unknown> {\n id: string;\n schema: z.ZodSchema<TSchema>;\n version: string;\n description: string;\n}\n\n/**\n * Result type for input validation operations.\n *\n * @template T - The expected type on successful validation\n */\nexport type InputValidationResult<T> =\n | { success: true; data: T }\n | { success: false; error: UploadistaError };\n\n/**\n * Registry for input node type definitions.\n *\n * The InputTypeRegistry maintains a global registry of input types with their schemas\n * and metadata. Input types describe how data enters the flow from external sources.\n *\n * @remarks\n * - Use the exported `inputTypeRegistry` singleton instance\n * - Types cannot be unregistered or modified after registration\n * - Duplicate type IDs are rejected\n *\n * @example\n * ```typescript\n * // Register a new input type\n * inputTypeRegistry.register({\n * id: \"form-input-v1\",\n * schema: formInputSchema,\n * version: \"1.0.0\",\n * description: \"Form-based file input\",\n * });\n *\n * // Check if type exists\n * if (inputTypeRegistry.has(\"streaming-input-v1\")) {\n * const def = inputTypeRegistry.get(\"streaming-input-v1\");\n * }\n * ```\n */\nexport class InputTypeRegistry {\n private readonly types: Map<string, InputTypeDefinition<unknown>>;\n\n constructor() {\n this.types = new Map();\n }\n\n /**\n * Register a new input type in the registry.\n *\n * @template T - The TypeScript type inferred from the Zod schema\n * @param definition - The complete type definition including schema and metadata\n * @throws {UploadistaError} If a type with the same ID is already registered\n */\n register<T>(definition: InputTypeDefinition<T>): void {\n if (this.types.has(definition.id)) {\n throw UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Input type \"${definition.id}\" is already registered. Types cannot be modified or re-registered.`,\n details: { typeId: definition.id },\n });\n }\n\n this.types.set(definition.id, definition as InputTypeDefinition<unknown>);\n }\n\n /**\n * Retrieve a registered type definition by its ID.\n *\n * @param id - The unique type identifier (e.g., \"streaming-input-v1\")\n * @returns The type definition if found, undefined otherwise\n */\n get(id: string): InputTypeDefinition<unknown> | undefined {\n return this.types.get(id);\n }\n\n /**\n * List all registered input types.\n *\n * @returns Array of all input type definitions\n */\n list(): InputTypeDefinition<unknown>[] {\n return Array.from(this.types.values());\n }\n\n /**\n * Validate data against a registered type's schema.\n *\n * @template T - The expected TypeScript type after validation\n * @param typeId - The ID of the registered type to validate against\n * @param data - The data to validate\n * @returns A result object with either the validated data or an error\n */\n validate<T>(typeId: string, data: unknown): InputValidationResult<T> {\n const typeDef = this.types.get(typeId);\n\n if (!typeDef) {\n return {\n success: false,\n error: UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Input type \"${typeId}\" is not registered`,\n details: { typeId },\n }),\n };\n }\n\n try {\n const parsed = typeDef.schema.parse(data);\n return { success: true, data: parsed as T };\n } catch (error) {\n return {\n success: false,\n error: UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Data validation failed for input type \"${typeId}\"`,\n cause: error,\n details: { typeId, validationErrors: error },\n }),\n };\n }\n }\n\n /**\n * Check if a type is registered.\n *\n * @param id - The unique type identifier to check\n * @returns True if the type is registered, false otherwise\n */\n has(id: string): boolean {\n return this.types.has(id);\n }\n\n /**\n * Get the total number of registered types.\n *\n * @returns The count of registered types\n */\n size(): number {\n return this.types.size;\n }\n}\n\n/**\n * Global singleton instance of the input type registry.\n *\n * Use this instance to register and access input node type definitions.\n * Input types describe how data enters the flow from external sources.\n *\n * @example\n * ```typescript\n * import { inputTypeRegistry } from \"@uploadista/core/flow\";\n *\n * // Register a type\n * inputTypeRegistry.register({\n * id: \"my-input-v1\",\n * schema: myInputSchema,\n * version: \"1.0.0\",\n * description: \"My custom input type\",\n * });\n *\n * // Validate data\n * const result = inputTypeRegistry.validate(\"my-input-v1\", data);\n * ```\n */\nexport const inputTypeRegistry = new InputTypeRegistry();\n\n/**\n * Validates flow input data against a registered input type.\n *\n * @param typeId - The registered type ID (e.g., \"streaming-input-v1\")\n * @param data - The input data to validate\n * @returns A validation result with either the typed data or an error\n */\nexport function validateFlowInput<T = unknown>(\n typeId: string,\n data: unknown,\n): InputValidationResult<T> {\n return inputTypeRegistry.validate<T>(typeId, data);\n}\n","/**\n * Output type registry for flow result nodes.\n *\n * This module provides a registry for output node type definitions. Output types\n * describe the data shapes that flow through the system and appear in results\n * (e.g., UploadFile for storage outputs, OcrOutput for OCR results).\n *\n * Output types are distinct from input types - they describe the data that nodes\n * produce, enabling type-safe result consumption in clients.\n *\n * @module flow/output-type-registry\n * @see {@link inputTypeRegistry} for input types\n *\n * @example\n * ```typescript\n * import { outputTypeRegistry } from \"@uploadista/core/flow\";\n * import { z } from \"zod\";\n *\n * // Register a custom output type\n * outputTypeRegistry.register({\n * id: \"thumbnail-output-v1\",\n * schema: z.object({\n * url: z.string(),\n * width: z.number(),\n * height: z.number(),\n * }),\n * version: \"1.0.0\",\n * description: \"Thumbnail generation output\",\n * });\n * ```\n */\n\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Defines a registered output type with its schema and metadata.\n *\n * Output type definitions describe the data shapes produced by nodes. This enables\n * type-safe result consumption where clients can narrow types based on the\n * `nodeType` field in results.\n *\n * @template TSchema - The Zod schema type for this output's data\n *\n * @property id - Unique identifier (e.g., \"storage-output-v1\", \"ocr-output-v1\")\n * @property schema - Zod schema for validating output data\n * @property version - Semantic version (e.g., \"1.0.0\") for tracking type evolution\n * @property description - Human-readable explanation of what this output type contains\n */\nexport interface OutputTypeDefinition<TSchema = unknown> {\n id: string;\n schema: z.ZodSchema<TSchema>;\n version: string;\n description: string;\n}\n\n/**\n * Result type for output validation operations.\n *\n * @template T - The expected type on successful validation\n */\nexport type OutputValidationResult<T> =\n | { success: true; data: T }\n | { success: false; error: UploadistaError };\n\n/**\n * Registry for output node type definitions.\n *\n * The OutputTypeRegistry maintains a global registry of output types with their schemas\n * and metadata. Output types describe the data shapes that flow through the system\n * and appear in results.\n *\n * @remarks\n * - Use the exported `outputTypeRegistry` singleton instance\n * - Types cannot be unregistered or modified after registration\n * - Duplicate type IDs are rejected\n *\n * @example\n * ```typescript\n * // Register a new output type\n * outputTypeRegistry.register({\n * id: \"metadata-output-v1\",\n * schema: metadataSchema,\n * version: \"1.0.0\",\n * description: \"File metadata extraction output\",\n * });\n *\n * // Validate result data\n * const result = outputTypeRegistry.validate(\"storage-output-v1\", data);\n * if (result.success) {\n * console.log(result.data.url);\n * }\n * ```\n */\nexport class OutputTypeRegistry {\n private readonly types: Map<string, OutputTypeDefinition<unknown>>;\n\n constructor() {\n this.types = new Map();\n }\n\n /**\n * Register a new output type in the registry.\n *\n * @template T - The TypeScript type inferred from the Zod schema\n * @param definition - The complete type definition including schema and metadata\n * @throws {UploadistaError} If a type with the same ID is already registered\n */\n register<T>(definition: OutputTypeDefinition<T>): void {\n if (this.types.has(definition.id)) {\n throw UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Output type \"${definition.id}\" is already registered. Types cannot be modified or re-registered.`,\n details: { typeId: definition.id },\n });\n }\n\n this.types.set(definition.id, definition as OutputTypeDefinition<unknown>);\n }\n\n /**\n * Retrieve a registered type definition by its ID.\n *\n * @param id - The unique type identifier (e.g., \"storage-output-v1\")\n * @returns The type definition if found, undefined otherwise\n */\n get(id: string): OutputTypeDefinition<unknown> | undefined {\n return this.types.get(id);\n }\n\n /**\n * List all registered output types.\n *\n * @returns Array of all output type definitions\n */\n list(): OutputTypeDefinition<unknown>[] {\n return Array.from(this.types.values());\n }\n\n /**\n * Validate data against a registered type's schema.\n *\n * @template T - The expected TypeScript type after validation\n * @param typeId - The ID of the registered type to validate against\n * @param data - The data to validate\n * @returns A result object with either the validated data or an error\n */\n validate<T>(typeId: string, data: unknown): OutputValidationResult<T> {\n const typeDef = this.types.get(typeId);\n\n if (!typeDef) {\n return {\n success: false,\n error: UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Output type \"${typeId}\" is not registered`,\n details: { typeId },\n }),\n };\n }\n\n try {\n const parsed = typeDef.schema.parse(data);\n return { success: true, data: parsed as T };\n } catch (error) {\n return {\n success: false,\n error: UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n body: `Data validation failed for output type \"${typeId}\"`,\n cause: error,\n details: { typeId, validationErrors: error },\n }),\n };\n }\n }\n\n /**\n * Check if a type is registered.\n *\n * @param id - The unique type identifier to check\n * @returns True if the type is registered, false otherwise\n */\n has(id: string): boolean {\n return this.types.has(id);\n }\n\n /**\n * Get the total number of registered types.\n *\n * @returns The count of registered types\n */\n size(): number {\n return this.types.size;\n }\n}\n\n/**\n * Global singleton instance of the output type registry.\n *\n * Use this instance to register and access output node type definitions.\n * Output types describe the data shapes produced by nodes and used in results.\n *\n * @example\n * ```typescript\n * import { outputTypeRegistry } from \"@uploadista/core/flow\";\n *\n * // Register a type\n * outputTypeRegistry.register({\n * id: \"my-output-v1\",\n * schema: myOutputSchema,\n * version: \"1.0.0\",\n * description: \"My custom output type\",\n * });\n *\n * // Validate result data\n * const result = outputTypeRegistry.validate(\"my-output-v1\", data);\n * ```\n */\nexport const outputTypeRegistry = new OutputTypeRegistry();\n\n/**\n * Validates flow output data against a registered output type.\n *\n * @param typeId - The registered type ID (e.g., \"storage-output-v1\")\n * @param data - The output data to validate\n * @returns A validation result with either the typed data or an error\n */\nexport function validateFlowOutput<T = unknown>(\n typeId: string,\n data: unknown,\n): OutputValidationResult<T> {\n return outputTypeRegistry.validate<T>(typeId, data);\n}\n","import { Effect } from \"effect\";\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\nimport { inputTypeRegistry } from \"./input-type-registry\";\nimport { outputTypeRegistry } from \"./output-type-registry\";\nimport type {\n FlowCircuitBreakerConfig,\n FlowNode,\n FlowNodeData,\n NodeExecutionResult,\n} from \"./types/flow-types\";\n\n/**\n * Defines the type of node in a flow, determining its role in the processing pipeline.\n */\nexport enum NodeType {\n /** Entry point for data into the flow */\n input = \"input\",\n /** Transforms data during flow execution */\n process = \"process\",\n /** Routes data based on conditions */\n conditional = \"conditional\",\n /** Splits data to multiple outputs */\n multiplex = \"multiplex\",\n /** Combines multiple inputs into one output */\n merge = \"merge\",\n}\n\n/**\n * Fields that can be evaluated in conditional node conditions.\n * These fields are typically found in file metadata.\n */\nexport type ConditionField =\n | \"mimeType\"\n | \"size\"\n | \"width\"\n | \"height\"\n | \"extension\";\n\n/**\n * Operators available for comparing values in conditional node conditions.\n */\nexport type ConditionOperator =\n | \"equals\"\n | \"notEquals\"\n | \"greaterThan\"\n | \"lessThan\"\n | \"contains\"\n | \"startsWith\";\n\n/**\n * Value used in conditional node comparisons.\n * Can be either a string or number depending on the field being evaluated.\n */\nexport type ConditionValue = string | number;\n\n/**\n * Creates a flow node with automatic input/output validation and retry logic.\n *\n * Flow nodes are the building blocks of processing pipelines. Each node:\n * - Validates its input against a Zod schema\n * - Executes its processing logic\n * - Validates its output against a Zod schema\n * - Can optionally retry on failure with exponential backoff\n *\n * @template Input - The expected input type for this node\n * @template Output - The output type produced by this node\n *\n * @param config - Node configuration\n * @param config.id - Unique identifier for this node in the flow\n * @param config.name - Human-readable name for the node\n * @param config.description - Description of what this node does\n * @param config.type - The type of node (input, process, conditional, multiplex, merge)\n * @param config.inputSchema - Zod schema for validating input data\n * @param config.outputSchema - Zod schema for validating output data\n * @param config.run - The processing function to execute for this node\n * @param config.condition - Optional condition for conditional nodes to determine if they should execute\n * @param config.multiInput - If true, node receives all inputs as a record instead of a single input\n * @param config.multiOutput - If true, node can output to multiple targets\n * @param config.pausable - If true, node can pause execution and wait for additional data\n * @param config.retry - Optional retry configuration for handling transient failures\n * @param config.retry.maxRetries - Maximum number of retry attempts (default: 0)\n * @param config.retry.retryDelay - Base delay in milliseconds between retries (default: 1000)\n * @param config.retry.exponentialBackoff - Whether to use exponential backoff for retries (default: true)\n * @param config.inputTypeId - Optional input type ID from inputTypeRegistry (e.g., \"streaming-input-v1\"). Used for input nodes to describe external interface.\n * @param config.outputTypeId - Optional output type ID from outputTypeRegistry (e.g., \"storage-output-v1\"). Used for result type tagging.\n * @param config.keepOutput - If true, preserves this node's output even if it has outgoing edges (default: false). Useful for flows where intermediate results need to be kept (e.g., preserving the original file when also running OCR on it).\n * @param config.circuitBreaker - Optional circuit breaker configuration for resilience. Overrides flow-level circuit breaker defaults for this node.\n * @param config.nodeTypeId - Stable node type identifier for circuit breaker configuration. Used to share circuit breaker state across nodes of the same type and for nodeTypeOverrides. Example: \"describe-image\", \"remove-background\", \"scan-virus\"\n *\n * @returns An Effect that succeeds with the created FlowNode\n *\n * @example\n * ```typescript\n * const resizeNode = createFlowNode({\n * id: \"resize-1\",\n * name: \"Resize Image\",\n * description: \"Resizes images to 800x600\",\n * type: NodeType.process,\n * inputSchema: z.object({\n * stream: z.instanceof(Uint8Array),\n * metadata: z.object({ width: z.number(), height: z.number() })\n * }),\n * outputSchema: z.object({\n * stream: z.instanceof(Uint8Array),\n * metadata: z.object({ width: z.literal(800), height: z.literal(600) })\n * }),\n * run: ({ data }) => Effect.gen(function* () {\n * const resized = yield* resizeImage(data.stream, 800, 600);\n * return {\n * type: \"complete\",\n * data: { stream: resized, metadata: { width: 800, height: 600 } }\n * };\n * }),\n * retry: {\n * maxRetries: 3,\n * retryDelay: 1000,\n * exponentialBackoff: true\n * }\n * });\n * ```\n */\nexport function createFlowNode<\n Input,\n Output,\n TType extends NodeType = NodeType,\n>({\n id,\n name,\n description,\n type,\n inputSchema,\n outputSchema,\n run,\n condition,\n multiInput = false,\n multiOutput = false,\n pausable = false,\n retry,\n inputTypeId,\n outputTypeId,\n keepOutput = false,\n circuitBreaker,\n nodeTypeId,\n}: {\n id: string;\n name: string;\n description: string;\n type: TType;\n inputSchema: z.ZodSchema<Input>;\n outputSchema: z.ZodSchema<Output>;\n run: (args: {\n data: Input;\n jobId: string;\n storageId: string;\n flowId: string;\n clientId: string | null;\n }) => Effect.Effect<NodeExecutionResult<Output>, UploadistaError>;\n condition?: {\n field: ConditionField;\n operator: ConditionOperator;\n value: ConditionValue;\n };\n multiInput?: boolean;\n multiOutput?: boolean;\n pausable?: boolean;\n retry?: {\n maxRetries?: number;\n retryDelay?: number;\n exponentialBackoff?: boolean;\n };\n /** Input type ID from inputTypeRegistry - for input nodes describing external interface */\n inputTypeId?: string;\n /** Output type ID from outputTypeRegistry - for result type tagging */\n outputTypeId?: string;\n keepOutput?: boolean;\n /** Circuit breaker configuration for resilience (overrides flow defaults) */\n circuitBreaker?: FlowCircuitBreakerConfig;\n /**\n * Stable node type identifier for circuit breaker configuration.\n * Used to share circuit breaker state across nodes of the same type and for nodeTypeOverrides.\n * Example: \"describe-image\", \"remove-background\", \"scan-virus\"\n */\n nodeTypeId?: string;\n}): Effect.Effect<\n FlowNode<Input, Output, UploadistaError> & { type: TType },\n UploadistaError\n> {\n return Effect.gen(function* () {\n // Validate inputTypeId against inputTypeRegistry if provided\n if (inputTypeId) {\n const inputTypeDef = inputTypeRegistry.get(inputTypeId);\n if (!inputTypeDef) {\n return yield* UploadistaError.fromCode(\"INVALID_INPUT_TYPE\", {\n body: `Input type \"${inputTypeId}\" is not registered in inputTypeRegistry`,\n details: { inputTypeId, nodeId: id },\n }).toEffect();\n }\n }\n\n // Validate outputTypeId against outputTypeRegistry if provided\n if (outputTypeId) {\n const outputTypeDef = outputTypeRegistry.get(outputTypeId);\n if (!outputTypeDef) {\n return yield* UploadistaError.fromCode(\"INVALID_OUTPUT_TYPE\", {\n body: `Output type \"${outputTypeId}\" is not registered in outputTypeRegistry`,\n details: { outputTypeId, nodeId: id },\n }).toEffect();\n }\n }\n\n return {\n id,\n name,\n description,\n type,\n inputTypeId,\n outputTypeId,\n keepOutput,\n inputSchema,\n outputSchema,\n pausable,\n run: ({\n data,\n jobId,\n flowId,\n storageId,\n clientId,\n }: {\n data: Input;\n jobId: string;\n flowId: string;\n storageId: string;\n clientId: string | null;\n }) =>\n Effect.gen(function* () {\n // Validate input data against schema\n const validatedData = yield* Effect.try({\n try: () => inputSchema.parse(data),\n catch: (error) => {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n return UploadistaError.fromCode(\"FLOW_INPUT_VALIDATION_ERROR\", {\n body: `Node '${name}' (${id}) input validation failed: ${errorMessage}`,\n cause: error,\n });\n },\n });\n\n // Run the node logic\n const result = yield* run({\n data: validatedData,\n jobId,\n storageId,\n flowId,\n clientId,\n });\n\n // If the node returned waiting state, add type information and pass through\n if (result.type === \"waiting\") {\n return {\n type: \"waiting\" as const,\n partialData: result.partialData,\n nodeType: outputTypeId,\n nodeId: id,\n };\n }\n\n // Validate output data against schema for completed results\n const validatedResult = yield* Effect.try({\n try: () => outputSchema.parse(result.data),\n catch: (error) => {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n return UploadistaError.fromCode(\"FLOW_OUTPUT_VALIDATION_ERROR\", {\n body: `Node '${name}' (${id}) output validation failed: ${errorMessage}`,\n cause: error,\n });\n },\n });\n\n // Return with type information (using outputTypeId for result typing)\n return {\n type: \"complete\" as const,\n data: validatedResult,\n nodeType: outputTypeId,\n nodeId: id,\n };\n }),\n condition,\n multiInput,\n multiOutput,\n retry,\n circuitBreaker,\n nodeTypeId,\n } as FlowNode<Input, Output, UploadistaError> & { type: TType };\n });\n}\n\n/**\n * Extracts serializable node metadata from a FlowNode instance.\n *\n * This function is useful for serializing flow configurations or\n * transmitting node information over the network without including\n * the executable run function or schemas.\n *\n * @param node - The flow node to extract data from\n * @returns A plain object containing the node's metadata (id, name, description, type, inputTypeId, outputTypeId)\n */\nexport const getNodeData = (\n // biome-ignore lint/suspicious/noExplicitAny: maybe type later\n node: FlowNode<any, any, UploadistaError>,\n): FlowNodeData => {\n return {\n id: node.id,\n name: node.name,\n description: node.description,\n type: node.type,\n inputTypeId: node.inputTypeId,\n outputTypeId: node.outputTypeId,\n nodeTypeId: node.nodeTypeId,\n };\n};\n","/**\n * Core flow type definitions and node specifications.\n *\n * This module defines the fundamental types for the Flow Engine, including node\n * definitions, execution results, edges, and configuration. These types form the\n * foundation of the DAG processing system.\n *\n * @module flow/types/flow-types\n * @see {@link FlowNode} for node specification\n * @see {@link FlowConfig} for flow configuration\n */\n\n/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\n\nimport type { Effect } from \"effect\";\nimport type { z } from \"zod\";\nimport type { UploadistaError } from \"../../errors\";\nimport type { UploadFile } from \"../../types/upload-file\";\nimport type { FlowEvent, FlowEventFlowEnd, FlowEventFlowStart } from \"../event\";\nimport { NodeType } from \"../node\";\nimport type { RetryPolicy } from \"./retry-policy\";\n\n/**\n * Type mapping for node input/output schemas.\n * Used for type-safe node connections in typed flows.\n */\nexport type NodeTypeMap = Record<string, { input: unknown; output: unknown }>;\n\n/**\n * Minimal node data without execution logic.\n * Used for serialization and UI display.\n *\n * @property id - Unique node identifier\n * @property name - Human-readable node name\n * @property description - Explanation of what the node does\n * @property type - Node category (input, transform, conditional, output, etc.)\n * @property inputTypeId - Optional input type ID from inputTypeRegistry (for input nodes)\n * @property outputTypeId - Optional output type ID from outputTypeRegistry (for result typing)\n * @property keepOutput - If true, preserves this node's output even if it has outgoing edges (default: false)\n */\nexport type FlowNodeData = {\n id: string;\n name: string;\n description: string;\n type: NodeType;\n /** Input type ID from inputTypeRegistry - describes how external clients interact with this node */\n inputTypeId?: string;\n /** Output type ID from outputTypeRegistry - describes the data shape this node produces */\n outputTypeId?: string;\n keepOutput?: boolean;\n /**\n * Stable node type identifier for circuit breaker configuration.\n * Used to share circuit breaker state across nodes of the same type and for nodeTypeOverrides.\n * Example: \"describe-image\", \"remove-background\", \"scan-virus\"\n */\n nodeTypeId?: string;\n};\n\n/**\n * Built-in typed outputs with automatic TypeScript narrowing.\n *\n * These outputs use discriminated unions to enable automatic type narrowing\n * in switch statements without requiring type guards.\n *\n * @remarks\n * Built-in types automatically narrow when using switch statements:\n * ```typescript\n * switch (output.nodeType) {\n * case 'storage-output-v1':\n * output.data.url // ✅ TypeScript knows data is UploadFile\n * break;\n * }\n * ```\n */\nexport type BuiltInTypedOutput = {\n nodeType: \"storage-output-v1\";\n data: UploadFile;\n nodeId: string;\n timestamp: string;\n};\n\n/**\n * Custom typed output for user-defined node types.\n *\n * Custom outputs require type guards for type narrowing:\n * ```typescript\n * if (isThumbnailOutput(output)) {\n * output.data.width // ✅ Type guard narrows data to ThumbnailOutput\n * }\n * ```\n *\n * @template T - The TypeScript type of the output data\n */\nexport type CustomTypedOutput<T = unknown> = {\n nodeType?: string; // Custom type ID or undefined for untyped nodes\n data: T;\n nodeId: string;\n timestamp: string;\n};\n\n/**\n * Typed output structure from a flow node.\n *\n * This is a discriminated union that provides automatic type narrowing for\n * built-in types while maintaining extensibility for custom types.\n *\n * @template T - The TypeScript type of the output data (for custom outputs)\n *\n * @property nodeId - Node instance ID that produced this output\n * @property nodeType - Type ID from the registry (e.g., \"storage-output-v1\")\n * @property data - The actual output data from the node\n * @property timestamp - ISO 8601 timestamp when the result was produced\n *\n * @remarks\n * **Built-in types (automatic narrowing):**\n * - `storage-output-v1` - Storage node output (UploadFile)\n *\n * Use switch statements for automatic narrowing:\n * ```typescript\n * for (const output of state.flowOutputs) {\n * switch (output.nodeType) {\n * case 'storage-output-v1':\n * // ✅ output.data is automatically UploadFile\n * console.log(output.data.url);\n * break;\n * }\n * }\n * ```\n *\n * **Custom types (require type guards):**\n * ```typescript\n * import { isThumbnailOutput } from './type-guards';\n *\n * if (isThumbnailOutput(output)) {\n * // ✅ Type guard narrows output.data to ThumbnailOutput\n * console.log(output.data.width);\n * }\n * ```\n *\n * **Untyped nodes (backward compatible):**\n * ```typescript\n * const untypedOutput: TypedOutput = {\n * nodeId: \"custom-node-1\",\n * data: { custom: \"data\" },\n * timestamp: \"2024-01-15T10:30:00Z\"\n * };\n * ```\n *\n * @example\n * ```typescript\n * // Storage output result (built-in, automatic narrowing)\n * const output: TypedOutput = {\n * nodeId: \"storage-1\",\n * nodeType: \"storage-output-v1\",\n * data: {\n * id: \"file-123\",\n * url: \"https://cdn.example.com/file.jpg\",\n * size: 1024000,\n * // ... rest of UploadFile\n * },\n * timestamp: \"2024-01-15T10:30:00Z\"\n * };\n *\n * // Custom output (requires type guard)\n * const thumbnailOutput: TypedOutput<ThumbnailOutput> = {\n * nodeId: \"thumbnail-1\",\n * nodeType: \"thumbnail-output-v1\",\n * data: {\n * url: \"https://cdn.example.com/thumb.jpg\",\n * width: 200,\n * height: 200,\n * format: \"webp\",\n * },\n * timestamp: \"2024-01-15T10:30:00Z\"\n * };\n * ```\n */\nexport type TypedOutput<T = unknown> =\n | BuiltInTypedOutput\n | CustomTypedOutput<T>;\n\n/**\n * Result of a node execution - either complete or waiting for more data.\n *\n * @template TOutput - Type of the node's output data\n *\n * @remarks\n * Nodes can return \"waiting\" to pause flow execution when they need additional\n * data (e.g., chunked uploads, external service responses). The flow can be\n * resumed later with the missing data.\n *\n * Results now include optional type information (`nodeType` and `nodeId`) to\n * enable type-safe result consumption. These fields are automatically added\n * by the node execution wrapper when a node is created with an `outputTypeId`.\n *\n * @example\n * ```typescript\n * // Node completes immediately with type information\n * return completeNodeExecution({ processedData });\n * // Result will be wrapped with: { type: \"complete\", data, nodeType, nodeId }\n *\n * // Node waits for more chunks\n * if (needsMoreData) {\n * return waitingNodeExecution({ receivedChunks: 3, totalChunks: 10 });\n * }\n * ```\n */\nexport type NodeExecutionResult<TOutput> =\n | {\n type: \"complete\";\n data: TOutput;\n nodeType?: string;\n nodeId?: string;\n }\n | {\n type: \"waiting\";\n partialData?: unknown;\n nodeType?: string;\n nodeId?: string;\n };\n\n/**\n * Helper function to create a complete node execution result.\n *\n * @template TOutput - Type of the output data\n * @param data - The output data from the node\n * @returns A complete execution result\n *\n * @example\n * ```typescript\n * return completeNodeExecution({\n * url: uploadedFile.url,\n * size: uploadedFile.size\n * });\n * ```\n */\nexport const completeNodeExecution = <TOutput>(data: TOutput) => ({\n type: \"complete\" as const,\n data,\n});\n\n/**\n * Helper function to create a waiting node execution result.\n *\n * @param partialData - Optional partial data available so far\n * @returns A waiting execution result that pauses the flow\n *\n * @example\n * ```typescript\n * // Wait for more upload chunks\n * return waitingNodeExecution({\n * receivedBytes: currentSize,\n * totalBytes: expectedSize\n * });\n * ```\n */\nexport const waitingNodeExecution = (partialData?: unknown) => ({\n type: \"waiting\" as const,\n partialData,\n});\n\n/**\n * A flow node represents a single processing step in the DAG.\n *\n * Nodes are the building blocks of flows. Each node has typed inputs/outputs,\n * execution logic, and optional features like conditions, retries, and pausing.\n *\n * @template TInput - Type of data the node accepts\n * @template TOutput - Type of data the node produces\n * @template TError - Type of errors the node can throw\n *\n * @property inputSchema - Zod schema for validating input data\n * @property outputSchema - Zod schema for validating output data\n * @property run - Effect-based execution function\n * @property condition - Optional conditional execution rule\n * @property multiInput - Whether node accepts multiple inputs (default: false)\n * @property multiOutput - Whether node produces multiple outputs (default: false)\n * @property pausable - Whether node can pause execution (default: false)\n * @property retry - Optional retry configuration\n *\n * @remarks\n * - Nodes use Effect for composable error handling and dependency injection\n * - Input/output schemas ensure type safety at runtime\n * - Conditions are evaluated before execution\n * - Retry logic supports exponential backoff\n * - Pausable nodes can halt flow execution and resume later\n *\n * @example\n * ```typescript\n * const resizeNode: FlowNode<InputFile, UploadFile> = {\n * id: \"resize\",\n * name: \"Resize Image\",\n * description: \"Resize image to specified dimensions\",\n * type: NodeType.transform,\n * inputSchema: inputFileSchema,\n * outputSchema: uploadFileSchema,\n * run: ({ data, storageId }) => Effect.gen(function* () {\n * const resized = yield* resizeImage(data, { width: 1920, height: 1080 });\n * return completeNodeExecution(resized);\n * }),\n * retry: {\n * maxRetries: 3,\n * retryDelay: 1000,\n * exponentialBackoff: true\n * }\n * };\n * ```\n *\n * @see {@link NodeExecutionResult} for return type options\n * @see {@link FlowCondition} for conditional execution\n */\nexport type FlowNode<\n TInput = unknown,\n TOutput = unknown,\n TError = UploadistaError,\n> = FlowNodeData & {\n inputSchema: z.ZodSchema<TInput>;\n outputSchema: z.ZodSchema<TOutput>;\n run: (args: {\n data: TInput;\n jobId: string;\n storageId: string;\n flowId: string;\n inputs?: Record<string, unknown>;\n clientId: string | null;\n }) => Effect.Effect<NodeExecutionResult<TOutput>, TError>;\n condition?: {\n field: string;\n operator: string;\n value: unknown;\n };\n multiInput?: boolean;\n multiOutput?: boolean;\n pausable?: boolean; // Flag to indicate this node can pause execution\n retry?: {\n maxRetries?: number; // Maximum number of retry attempts (default: 0)\n retryDelay?: number; // Base delay in ms between retries (default: 1000)\n exponentialBackoff?: boolean; // Use exponential backoff (default: true)\n };\n /** Circuit breaker configuration for this node (overrides flow defaults) */\n circuitBreaker?: FlowCircuitBreakerConfig;\n};\n\n/**\n * Represents a directed edge connecting two nodes in the flow graph.\n *\n * Edges define the data flow direction and can specify ports for multi-input/output nodes.\n *\n * @property source - ID of the source node\n * @property target - ID of the target node\n * @property sourcePort - Optional output port name for multi-output nodes\n * @property targetPort - Optional input port name for multi-input nodes\n *\n * @remarks\n * - Edges must not create cycles (DAG constraint)\n * - Source node's output type should be compatible with target node's input type\n * - Ports allow routing specific outputs to specific inputs\n *\n * @example\n * ```typescript\n * // Simple edge\n * const edge: FlowEdge = {\n * source: \"resize-node\",\n * target: \"optimize-node\"\n * };\n *\n * // Edge with ports (for multiplex nodes)\n * const multiplexEdge: FlowEdge = {\n * source: \"multiplex-node\",\n * target: \"output-node\",\n * sourcePort: \"image\",\n * targetPort: \"primary\"\n * };\n * ```\n */\nexport type FlowEdge = {\n source: string;\n target: string;\n sourcePort?: string; // For multi-output nodes\n targetPort?: string; // For multi-input nodes\n};\n\n/**\n * Function type for checking schema compatibility between nodes.\n *\n * @param from - Source node's output schema\n * @param to - Target node's input schema\n * @returns true if schemas are compatible\n *\n * @remarks\n * Custom type checkers can implement more sophisticated compatibility rules\n * than the default checker.\n *\n * @see {@link FlowTypeValidator} for the default implementation\n */\nexport type TypeCompatibilityChecker = (\n from: z.ZodSchema<any>,\n to: z.ZodSchema<any>,\n) => boolean;\n\n/**\n * Interface for validating node connections and schema compatibility.\n *\n * @remarks\n * Validators ensure that connected nodes have compatible types,\n * preventing runtime type errors in flow execution.\n *\n * @see {@link FlowTypeValidator} for the implementation\n */\nexport type NodeConnectionValidator = {\n validateConnection: (\n sourceNode: FlowNode<any, any>,\n targetNode: FlowNode<any, any>,\n edge: FlowEdge,\n ) => boolean;\n getCompatibleTypes: (\n sourceSchema: z.ZodSchema<any>,\n targetSchema: z.ZodSchema<any>,\n ) => boolean;\n};\n\n// ============================================================================\n// Circuit Breaker Types (re-exported from circuit-breaker.ts for convenience)\n// ============================================================================\n\n/**\n * Fallback behavior when circuit is open.\n *\n * - `fail`: Fail immediately with CIRCUIT_BREAKER_OPEN error (default)\n * - `skip`: Skip node, pass input through as output\n * - `default`: Return a configured default value\n */\nexport type FlowCircuitBreakerFallback =\n | { type: \"fail\" }\n | { type: \"skip\"; passThrough: true }\n | { type: \"default\"; value: unknown };\n\n/**\n * Configuration for a circuit breaker on a flow or node.\n *\n * @property enabled - Whether circuit breaker is active (default: false for backward compatibility)\n * @property failureThreshold - Number of failures within window to trip circuit (default: 5)\n * @property resetTimeout - Milliseconds to wait in open state before half-open (default: 30000)\n * @property halfOpenRequests - Number of successful requests in half-open to close (default: 3)\n * @property windowDuration - Sliding window duration in milliseconds (default: 60000)\n * @property fallback - Behavior when circuit is open\n *\n * @example\n * ```typescript\n * const config: FlowCircuitBreakerConfig = {\n * enabled: true,\n * failureThreshold: 5,\n * resetTimeout: 30000,\n * halfOpenRequests: 3,\n * windowDuration: 60000,\n * fallback: { type: \"fail\" }\n * };\n * ```\n */\nexport interface FlowCircuitBreakerConfig {\n /** Whether circuit breaker is active (default: false) */\n enabled?: boolean;\n /** Number of failures within window to trip circuit (default: 5) */\n failureThreshold?: number;\n /** Milliseconds to wait in open state before half-open (default: 30000) */\n resetTimeout?: number;\n /** Number of successful requests in half-open to close (default: 3) */\n halfOpenRequests?: number;\n /** Sliding window duration in milliseconds (default: 60000) */\n windowDuration?: number;\n /** Behavior when circuit is open */\n fallback?: FlowCircuitBreakerFallback;\n}\n\n// ============================================================================\n// Dead Letter Queue Types\n// ============================================================================\n\n/**\n * Configuration for Dead Letter Queue on a flow.\n *\n * When enabled, failed flow jobs are captured in the DLQ for later retry,\n * debugging, or manual intervention.\n *\n * @property enabled - Whether DLQ is enabled for this flow (default: true when service is provided)\n * @property retryPolicy - Retry policy configuration for automatic retries\n *\n * @example\n * ```typescript\n * // Enable DLQ with custom retry policy\n * const flowConfig = {\n * flowId: \"image-pipeline\",\n * deadLetterQueue: {\n * enabled: true,\n * retryPolicy: {\n * enabled: true,\n * maxRetries: 5,\n * backoff: {\n * type: \"exponential\",\n * initialDelayMs: 1000,\n * maxDelayMs: 60000,\n * multiplier: 2,\n * jitter: true\n * },\n * nonRetryableErrors: [\"VALIDATION_ERROR\"]\n * }\n * }\n * };\n *\n * // Disable DLQ for best-effort flows\n * const bestEffortFlow = {\n * flowId: \"analytics-pipeline\",\n * deadLetterQueue: {\n * enabled: false\n * }\n * };\n * ```\n */\nexport interface FlowDeadLetterQueueConfig {\n /** Whether DLQ is enabled for this flow (default: true when service is provided) */\n enabled?: boolean;\n /** Retry policy configuration for automatic retries */\n retryPolicy?: RetryPolicy;\n}\n\n/**\n * Configuration object for creating a new flow.\n *\n * FlowConfig defines all aspects of a flow including its nodes, connections,\n * schemas, and optional features like event handlers and parallel execution.\n *\n * @template TFlowInputSchema - Zod schema for flow inputs\n * @template TFlowOutputSchema - Zod schema for flow outputs\n * @template TNodeError - Union of possible errors from node Effects\n * @template TNodeRequirements - Union of requirements from node Effects\n *\n * @property flowId - Unique identifier for the flow\n * @property name - Human-readable flow name\n * @property nodes - Array of nodes (can be plain nodes or Effects resolving to nodes)\n * @property edges - Array of edges connecting the nodes\n * @property inputSchema - Zod schema for validating flow inputs\n * @property outputSchema - Zod schema for validating flow outputs\n * @property typeChecker - Optional custom type compatibility checker\n * @property onEvent - Optional event handler for monitoring execution\n * @property parallelExecution - Optional parallel execution configuration\n *\n * @remarks\n * - Nodes can be provided as plain objects or Effect-wrapped for lazy initialization\n * - Event handlers receive all flow and node events\n * - Parallel execution is experimental and disabled by default\n * - Type checker allows custom schema compatibility rules\n *\n * @example\n * ```typescript\n * const config: FlowConfig<\n * z.ZodObject<{ file: z.ZodType<File> }>,\n * z.ZodType<UploadFile>,\n * never,\n * never\n * > = {\n * flowId: \"image-upload\",\n * name: \"Image Upload Pipeline\",\n * nodes: [inputNode, resizeNode, optimizeNode, storageNode],\n * edges: [\n * { source: \"input\", target: \"resize\" },\n * { source: \"resize\", target: \"optimize\" },\n * { source: \"optimize\", target: \"storage\" }\n * ],\n * inputSchema: z.object({ file: z.instanceof(File) }),\n * outputSchema: uploadFileSchema,\n * onEvent: (event) => Effect.gen(function* () {\n * yield* logEvent(event);\n * return { eventId: event.jobId };\n * })\n * };\n * ```\n *\n * @see {@link createFlowWithSchema} for creating flows from config\n * @see {@link FlowNode} for node specifications\n * @see {@link FlowEdge} for edge specifications\n */\nexport type FlowConfig<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TNodeError = never,\n TNodeRequirements = never,\n> = {\n flowId: string;\n name: string;\n nodes: Array<\n | FlowNode<any, any, UploadistaError>\n | Effect.Effect<\n FlowNode<any, any, UploadistaError>,\n TNodeError,\n TNodeRequirements\n >\n >;\n edges: FlowEdge[];\n inputSchema: TFlowInputSchema;\n outputSchema: TFlowOutputSchema;\n typeChecker?: TypeCompatibilityChecker;\n onEvent?: (\n event: FlowEvent,\n ) => Effect.Effect<{ eventId: string | null }, UploadistaError>;\n checkJobStatus?: (\n jobId: string,\n ) => Effect.Effect<\"running\" | \"paused\" | \"cancelled\", UploadistaError>;\n parallelExecution?: {\n enabled?: boolean;\n maxConcurrency?: number;\n };\n /**\n * Circuit breaker configuration for the flow.\n *\n * When enabled, the circuit breaker monitors node execution failures and\n * automatically prevents requests to failing services, protecting against\n * cascade failures.\n *\n * @example\n * ```typescript\n * circuitBreaker: {\n * defaults: {\n * enabled: true,\n * failureThreshold: 5,\n * resetTimeout: 30000\n * },\n * nodeTypeOverrides: {\n * \"virus-scan\": { failureThreshold: 3 }\n * }\n * }\n * ```\n */\n circuitBreaker?: {\n /** Default circuit breaker config for all nodes */\n defaults?: FlowCircuitBreakerConfig;\n /** Override circuit breaker config per node type */\n nodeTypeOverrides?: Record<string, FlowCircuitBreakerConfig>;\n };\n /**\n * Dead Letter Queue configuration for the flow.\n *\n * When enabled, failed jobs are captured in the DLQ for later retry,\n * debugging, or manual intervention.\n *\n * @example\n * ```typescript\n * deadLetterQueue: {\n * enabled: true,\n * retryPolicy: {\n * enabled: true,\n * maxRetries: 5,\n * backoff: {\n * type: \"exponential\",\n * initialDelayMs: 1000,\n * maxDelayMs: 60000,\n * multiplier: 2,\n * jitter: true\n * }\n * }\n * }\n * ```\n */\n deadLetterQueue?: FlowDeadLetterQueueConfig;\n hooks?: {\n /**\n * Called when a sink node (terminal node with no outgoing edges) produces an output.\n * This hook runs after auto-persistence for UploadFile outputs.\n *\n * Use this hook to perform additional post-processing such as:\n * - Saving output metadata to a database\n * - Tracking outputs in external systems\n * - Adding custom metadata to outputs\n * - Triggering downstream workflows\n *\n * The hook receives the output and context, and can optionally modify\n * and return the output (e.g., adding metadata fields).\n *\n * **Important**: The hook must not have any service requirements (Effect requirements must be `never`).\n * All necessary services should be captured in the closure when defining the hook.\n *\n * @param context - Output context including the output data, node ID, flow ID, etc.\n * @returns Effect or Promise that resolves to the (optionally modified) output\n *\n * @example\n * ```typescript\n * // Using Effect\n * hooks: {\n * onNodeOutput: ({ output, nodeId, flowId }) =>\n * Effect.gen(function* () {\n * // Save to database\n * yield* Effect.promise(() => db.save(output));\n * // Return output with additional metadata\n * return { ...output, metadata: { ...output.metadata, tracked: true } };\n * })\n * }\n *\n * // Using Promise (simpler for most users)\n * hooks: {\n * onNodeOutput: async ({ output, nodeId, flowId }) => {\n * // Save to database\n * await db.save(output);\n * // Return output with additional metadata\n * return { ...output, metadata: { ...output.metadata, tracked: true } };\n * }\n * }\n * ```\n */\n onNodeOutput?: <TOutput>(context: {\n output: TOutput;\n nodeId: string;\n flowId: string;\n jobId: string;\n storageId: string;\n clientId: string | null;\n }) => Effect.Effect<TOutput, UploadistaError, never> | Promise<TOutput>;\n };\n};\n\n// ============================================================================\n// File Naming Types\n// ============================================================================\n\n/**\n * Context provided to file naming functions and templates.\n *\n * Contains all relevant information about the current file, node, and flow\n * execution that can be used to generate dynamic file names.\n *\n * @property baseName - Filename without extension (e.g., \"photo\" from \"photo.jpg\")\n * @property extension - File extension without dot (e.g., \"jpg\")\n * @property fileName - Full original filename (e.g., \"photo.jpg\")\n * @property nodeType - Type of processing node (e.g., \"resize\", \"optimize\")\n * @property nodeId - Specific node instance ID\n * @property flowId - Flow identifier\n * @property jobId - Execution job ID\n * @property timestamp - ISO 8601 timestamp of processing\n * @property width - Output width (image/video nodes only)\n * @property height - Output height (image/video nodes only)\n * @property format - Output format (e.g., \"webp\", \"mp4\")\n * @property quality - Quality setting (e.g., 80)\n *\n * @example\n * ```typescript\n * // Available in templates as {{variable}}\n * const pattern = \"{{baseName}}-{{width}}x{{height}}.{{extension}}\";\n * // Result: \"photo-800x600.jpg\"\n * ```\n */\nexport type NamingContext = {\n /** Filename without extension */\n baseName: string;\n /** File extension without dot */\n extension: string;\n /** Full original filename */\n fileName: string;\n /** Type of processing node */\n nodeType: string;\n /** Specific node instance ID */\n nodeId: string;\n /** Flow identifier */\n flowId: string;\n /** Execution job ID */\n jobId: string;\n /** ISO 8601 timestamp of processing */\n timestamp: string;\n /** Output width (image/video nodes) */\n width?: number;\n /** Output height (image/video nodes) */\n height?: number;\n /** Output format */\n format?: string;\n /** Quality setting */\n quality?: number;\n /** Page number (document nodes) */\n pageNumber?: number;\n /** Additional custom variables */\n [key: string]: string | number | undefined;\n};\n\n/**\n * Function type for custom file naming logic.\n *\n * @param file - The UploadFile being processed\n * @param context - Naming context with all available variables\n * @returns The new filename (including extension)\n *\n * @example\n * ```typescript\n * const customRename: FileNamingFunction = (file, ctx) =>\n * `${ctx.flowId}-${ctx.baseName}-${ctx.timestamp}.${ctx.extension}`;\n * ```\n */\nexport type FileNamingFunction = (\n file: UploadFile,\n context: NamingContext,\n) => string;\n\n/**\n * Function type for generating auto-naming suffixes.\n *\n * Each node type can define its own auto suffix generator that creates\n * a descriptive suffix based on the processing parameters.\n *\n * @param context - Naming context with all available variables\n * @returns The suffix to append (without leading dash)\n *\n * @example\n * ```typescript\n * // Resize node auto suffix\n * const resizeAutoSuffix: AutoNamingSuffixGenerator = (ctx) =>\n * `${ctx.width}x${ctx.height}`;\n * // Result: \"photo-800x600.jpg\"\n *\n * // Optimize node auto suffix\n * const optimizeAutoSuffix: AutoNamingSuffixGenerator = (ctx) =>\n * ctx.format ?? 'optimized';\n * // Result: \"photo-webp.webp\"\n * ```\n */\nexport type AutoNamingSuffixGenerator = (context: NamingContext) => string;\n\n/**\n * Configuration for file naming behavior on a node.\n *\n * Supports three modes:\n * - `undefined` or no config: Preserve original filename (backward compatible)\n * - `mode: 'auto'`: Generate smart suffix based on node type\n * - `mode: 'custom'`: Use template pattern or rename function\n *\n * @property mode - Naming mode: 'auto' for smart suffixes, 'custom' for templates/functions\n * @property pattern - Mustache-style template string (for custom mode)\n * @property rename - Custom function for full control (for custom mode, SDK only)\n * @property autoSuffix - Generator function for auto mode suffix\n *\n * @example\n * ```typescript\n * // Auto mode with smart suffix\n * const autoNaming: FileNamingConfig = {\n * mode: 'auto',\n * autoSuffix: (ctx) => `${ctx.width}x${ctx.height}`\n * };\n *\n * // Custom mode with template\n * const templateNaming: FileNamingConfig = {\n * mode: 'custom',\n * pattern: '{{baseName}}-{{nodeType}}.{{extension}}'\n * };\n *\n * // Custom mode with function\n * const functionNaming: FileNamingConfig = {\n * mode: 'custom',\n * rename: (file, ctx) => `processed-${ctx.fileName}`\n * };\n * ```\n */\nexport type FileNamingConfig = {\n /** Naming mode: 'auto' for smart suffixes, 'custom' for templates/functions */\n mode: \"auto\" | \"custom\";\n /** Mustache-style template string (for custom mode) */\n pattern?: string;\n /** Custom function for full control (for custom mode, SDK only) */\n rename?: FileNamingFunction;\n /** Generator function for auto mode suffix */\n autoSuffix?: AutoNamingSuffixGenerator;\n};\n\n// Re-export existing types for compatibility\nexport { NodeType };\nexport type { FlowEvent, FlowEventFlowEnd, FlowEventFlowStart };\n","/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\nimport type { z } from \"zod\";\n\nimport type {\n FlowEdge,\n FlowNode,\n NodeConnectionValidator,\n TypeCompatibilityChecker,\n} from \"./flow-types\";\n\n// Default type compatibility checker using Zod schemas\nexport const defaultTypeChecker: TypeCompatibilityChecker = (\n fromSchema,\n toSchema,\n) => {\n // Basic schema compatibility rules\n if (fromSchema === toSchema) return true;\n\n // Check if schemas are compatible by comparing their types\n try {\n // For now, assume schemas are compatible if they're both Zod schemas\n // In a more sophisticated system, you'd check actual schema compatibility\n if (\n fromSchema &&\n toSchema &&\n typeof fromSchema === \"object\" &&\n typeof toSchema === \"object\"\n ) {\n return true;\n }\n\n return false;\n } catch {\n // If schema comparison fails, assume compatible\n return true;\n }\n};\n\n// Enhanced type validator with Zod schema support\nexport class FlowTypeValidator implements NodeConnectionValidator {\n private typeChecker: TypeCompatibilityChecker;\n\n constructor(typeChecker: TypeCompatibilityChecker = defaultTypeChecker) {\n this.typeChecker = typeChecker;\n }\n\n validateConnection(\n sourceNode: FlowNode<any, any>,\n targetNode: FlowNode<any, any>,\n _edge: FlowEdge,\n ): boolean {\n // Check if source node output schema is compatible with target node input schema\n return this.getCompatibleTypes(\n sourceNode.outputSchema,\n targetNode.inputSchema,\n );\n }\n\n getCompatibleTypes(\n sourceSchema: z.ZodSchema<any>,\n targetSchema: z.ZodSchema<any>,\n ): boolean {\n return this.typeChecker(sourceSchema, targetSchema);\n }\n\n // Validate entire flow for type compatibility\n validateFlow(\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): {\n isValid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n\n for (const edge of edges) {\n const sourceNode = nodeMap.get(edge.source);\n const targetNode = nodeMap.get(edge.target);\n\n if (!sourceNode) {\n errors.push(`Source node ${edge.source} not found`);\n continue;\n }\n\n if (!targetNode) {\n errors.push(`Target node ${edge.target} not found`);\n continue;\n }\n\n if (!this.validateConnection(sourceNode, targetNode, edge)) {\n errors.push(\n `Schema mismatch: ${sourceNode.id} output schema incompatible with ${targetNode.id} input schema`,\n );\n }\n }\n\n return {\n isValid: errors.length === 0,\n errors,\n };\n }\n\n // Get expected input schemas for a node based on its incoming edges\n getExpectedInputSchemas(\n nodeId: string,\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): Record<string, unknown> {\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n const expectedSchemas: Record<string, unknown> = {};\n\n for (const edge of edges) {\n if (edge.target === nodeId) {\n const sourceNode = nodeMap.get(edge.source);\n if (sourceNode) {\n const portKey = edge.sourcePort || edge.source;\n expectedSchemas[portKey] = sourceNode.outputSchema;\n }\n }\n }\n\n return expectedSchemas;\n }\n\n // Get actual output schemas for a node based on its outgoing edges\n getActualOutputSchemas(\n nodeId: string,\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): Record<string, unknown> {\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n const actualSchemas: Record<string, unknown> = {};\n\n for (const edge of edges) {\n if (edge.source === nodeId) {\n const targetNode = nodeMap.get(edge.target);\n if (targetNode) {\n const portKey = edge.targetPort || edge.target;\n actualSchemas[portKey] = targetNode.inputSchema;\n }\n }\n }\n\n return actualSchemas;\n }\n\n // Validate data against a schema\n validateData(\n data: unknown,\n schema: unknown,\n ): { isValid: boolean; errors: string[] } {\n try {\n (schema as z.ZodSchema<any>).parse(data);\n return { isValid: true, errors: [] };\n } catch (error) {\n if (error instanceof Error && \"errors\" in error) {\n return {\n isValid: false,\n errors: (\n error as { errors: Array<{ path: string[]; message: string }> }\n ).errors.map((err) => `${err.path.join(\".\")}: ${err.message}`),\n };\n }\n return {\n isValid: false,\n errors: [error instanceof Error ? error.message : \"Validation failed\"],\n };\n }\n }\n}\n\n// Utility functions for common type checks\nexport const typeUtils = {\n // Check if a schema is assignable to another\n isAssignable(\n fromSchema: z.ZodSchema<any>,\n toSchema: z.ZodSchema<any>,\n ): boolean {\n return defaultTypeChecker(fromSchema, toSchema);\n },\n\n // Get the most specific common schema\n getCommonSchema(\n schema1: z.ZodSchema<any>,\n schema2: z.ZodSchema<any>,\n ): z.ZodSchema<any> {\n if (schema1 === schema2) return schema1;\n\n // For now, return the more specific schema or schema1\n // In a more sophisticated system, you'd compute the intersection\n return schema1;\n },\n\n // Check if a value matches a schema\n matchesSchema(value: unknown, schema: z.ZodSchema<any>): boolean {\n try {\n schema.parse(value);\n return true;\n } catch {\n return false;\n }\n },\n};\n","import type { UploadFile } from \"../../types\";\n\ntype FileMetadata = UploadFile[\"metadata\"];\n\nexport type ResolvedUploadMetadata = {\n type: string;\n fileName: string;\n metadata: FileMetadata;\n metadataJson: string | undefined;\n};\n\nexport function resolveUploadMetadata(\n metadata: FileMetadata,\n): ResolvedUploadMetadata {\n if (!metadata) {\n return {\n type: \"\",\n fileName: \"\",\n metadata: undefined,\n metadataJson: undefined,\n };\n }\n\n const normalized = { ...metadata };\n const type = String(\n normalized.type || normalized.mimeType || normalized[\"content-type\"] || \"\",\n );\n if (type) {\n normalized.type ||= type;\n normalized.mimeType ||= type;\n }\n\n const fileName = String(\n normalized.fileName || normalized.originalName || normalized.name || \"\",\n );\n if (fileName) {\n normalized.fileName ||= fileName;\n normalized.originalName ||= fileName;\n normalized.name ||= fileName;\n }\n\n return {\n type,\n fileName,\n metadata: normalized,\n metadataJson: JSON.stringify(normalized),\n };\n}\n","import { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport { UploadistaError } from \"../../errors\";\nimport type { InputFile } from \"../../types\";\nimport { uploadFileSchema } from \"../../types\";\nimport { UploadEngine } from \"../../upload\";\nimport { arrayBuffer, fetchFile } from \"../../upload/upload-url\";\nimport { createFlowNode, NodeType } from \"../node\";\nimport { STORAGE_OUTPUT_TYPE_ID, STREAMING_INPUT_TYPE_ID } from \"../node-types\";\nimport { completeNodeExecution, waitingNodeExecution } from \"../types\";\nimport { resolveUploadMetadata } from \"../utils/resolve-upload-metadata\";\n\n/**\n * Schema for initializing a streaming upload operation.\n * Creates a new upload session for chunked file uploads.\n */\nconst initStreamingInputSchema = z.object({\n /** Operation type identifier */\n operation: z.literal(\"init\"),\n /** Storage ID where the file will be stored */\n storageId: z.string(),\n /** Optional metadata for the file */\n metadata: z.record(z.string(), z.any()).optional(),\n});\n\n/**\n * Schema for finalizing a streaming upload operation.\n * Completes the upload process after all chunks have been uploaded.\n */\nconst finalizeStreamingInputSchema = z.object({\n /** Operation type identifier */\n operation: z.literal(\"finalize\"),\n /** Upload ID from the init operation */\n uploadId: z.string(),\n});\n\n/**\n * Schema for fetching a file from a URL.\n * Downloads and processes a file from a remote URL.\n */\nconst urlInputSchema = z.object({\n /** Operation type identifier */\n operation: z.literal(\"url\"),\n /** URL to fetch the file from */\n url: z.string(),\n /** Optional storage ID where the file will be stored */\n storageId: z.string().optional(),\n /** Optional metadata for the file */\n metadata: z.record(z.string(), z.any()).optional(),\n});\n\n/**\n * Union schema for all input operations.\n * Defines the possible input data structures for the input node.\n */\nexport const inputDataSchema = z.union([\n initStreamingInputSchema,\n finalizeStreamingInputSchema,\n urlInputSchema,\n]);\n\n/**\n * Type representing the input data for the input node.\n * Can be one of three operation types: init, finalize, or url.\n */\nexport type InputData = z.infer<typeof inputDataSchema>;\n\n/**\n * Schema for input node filtering parameters.\n * Defines validation rules for incoming files.\n */\nexport const inputNodeParamsSchema = z.object({\n /** Array of allowed MIME types (supports wildcards like \"image/*\") */\n allowedMimeTypes: z.array(z.string()).optional(),\n /** Minimum file size in bytes */\n minSize: z.number().positive().optional(),\n /** Maximum file size in bytes */\n maxSize: z.number().positive().optional(),\n});\n\n/**\n * Parameters for configuring input node validation.\n * Controls which files are accepted based on type and size constraints.\n */\nexport type InputNodeParams = z.infer<typeof inputNodeParamsSchema>;\n\n/**\n * Helper function to validate file against input parameters.\n * Performs MIME type and size validation based on the provided parameters.\n *\n * @param file - File information to validate\n * @param params - Validation parameters\n * @returns An Effect that succeeds if validation passes or fails with validation error\n */\nfunction validateFile(\n file: { type: string; size: number },\n params?: InputNodeParams,\n): Effect.Effect<void, UploadistaError> {\n return Effect.gen(function* () {\n if (!params) return;\n\n // Check MIME type\n if (params.allowedMimeTypes && params.allowedMimeTypes.length > 0) {\n const isAllowed = params.allowedMimeTypes.some((allowed) => {\n // Support wildcard patterns like \"image/*\"\n if (allowed.endsWith(\"/*\")) {\n const prefix = allowed.slice(0, -2);\n return file.type.startsWith(prefix);\n }\n return file.type === allowed;\n });\n\n if (!isAllowed) {\n throw yield* UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n cause: new Error(\n `File type \"${\n file.type\n }\" is not allowed. Allowed types: ${params.allowedMimeTypes.join(\n \", \",\n )}`,\n ),\n }).toEffect();\n }\n }\n\n // Check minimum size\n if (params.minSize !== undefined && file.size < params.minSize) {\n throw yield* UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n cause: new Error(\n `File size (${file.size} bytes) is below minimum (${params.minSize} bytes)`,\n ),\n }).toEffect();\n }\n\n // Check maximum size\n if (params.maxSize !== undefined && file.size > params.maxSize) {\n throw yield* UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n cause: new Error(\n `File size (${file.size} bytes) exceeds maximum (${params.maxSize} bytes)`,\n ),\n }).toEffect();\n }\n });\n}\n\n/**\n * Creates an input node for handling file input through multiple methods.\n *\n * The input node supports three operation types:\n * - `init`: Initialize a streaming upload session\n * - `finalize`: Complete a streaming upload after all chunks are uploaded\n * - `url`: Fetch a file directly from a URL\n *\n * @param id - Unique identifier for the node\n * @param params - Optional validation parameters for filtering incoming files\n * @returns An Effect that creates a flow node configured for file input\n *\n * @example\n * ```typescript\n * // Create input node with validation\n * const inputNode = yield* createInputNode(\"file-input\", {\n * allowedMimeTypes: [\"image/*\", \"application/pdf\"],\n * maxSize: 10 * 1024 * 1024, // 10MB\n * });\n *\n * // Create input node without validation\n * const openInputNode = yield* createInputNode(\"open-input\");\n * ```\n */\nexport function createInputNode(\n id: string,\n params?: InputNodeParams,\n options?: { keepOutput?: boolean },\n) {\n const keepOutput = options?.keepOutput ?? false;\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n return yield* createFlowNode({\n id,\n name: \"Input\",\n description:\n \"Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch\",\n type: NodeType.input,\n nodeTypeId: \"input\",\n inputSchema: inputDataSchema,\n outputSchema: uploadFileSchema,\n keepOutput,\n inputTypeId: STREAMING_INPUT_TYPE_ID,\n outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n run: ({ data, flowId, jobId, clientId }) => {\n return Effect.gen(function* () {\n switch (data.operation) {\n case \"init\": {\n // Create upload using upload server - it handles all state management\n const inputFile: InputFile = {\n storageId: data.storageId,\n size: data.metadata?.size || 0,\n type: data.metadata?.mimeType || \"application/octet-stream\",\n fileName: data.metadata?.originalName,\n lastModified: data.metadata?.size ? Date.now() : undefined,\n metadata: data.metadata\n ? JSON.stringify(data.metadata)\n : undefined,\n flow: {\n flowId,\n nodeId: id,\n jobId,\n },\n };\n\n const uploadFile = yield* uploadEngine.createUpload(\n inputFile,\n clientId,\n );\n\n // Return waiting state with the upload file\n // Client will upload chunks directly to the upload API\n return waitingNodeExecution(uploadFile);\n }\n\n case \"finalize\": {\n // Get final upload file from upload server's KV store\n const finalUploadFile = yield* uploadEngine.getUpload(\n data.uploadId,\n );\n\n // Extract type and size from metadata for validation\n const { type } = resolveUploadMetadata(finalUploadFile.metadata);\n const size = finalUploadFile.size || 0;\n\n // Validate file against params\n yield* validateFile({ type, size }, params);\n\n // Complete the node execution with the final upload file\n // Flow can now continue to next nodes (e.g., save to storage, optimize)\n return completeNodeExecution(finalUploadFile);\n }\n\n case \"url\": {\n // Fetch file from URL directly\n const response = yield* fetchFile(data.url);\n const buffer = yield* arrayBuffer(response);\n\n // Extract metadata from response or use provided metadata\n const mimeType =\n data.metadata?.mimeType ||\n response.headers.get(\"content-type\") ||\n \"application/octet-stream\";\n const size =\n data.metadata?.size ||\n Number(response.headers.get(\"content-length\") || 0);\n const fileName =\n data.metadata?.originalName ||\n data.url.split(\"/\").pop() ||\n \"file\";\n\n // Validate file against params\n yield* validateFile({ type: mimeType, size }, params);\n\n // Create a readable stream from the buffer\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n },\n });\n\n // Use upload server to create and store the file\n const inputFile: InputFile = {\n storageId: data.storageId || \"buffer\",\n size,\n type: mimeType,\n fileName,\n lastModified: Date.now(),\n metadata: data.metadata\n ? JSON.stringify(data.metadata)\n : undefined,\n };\n\n const uploadFile = yield* uploadEngine.upload(\n inputFile,\n clientId,\n stream,\n );\n\n // Complete the node execution with the upload file\n return completeNodeExecution({\n ...uploadFile,\n flow: {\n flowId,\n nodeId: id,\n jobId,\n },\n });\n }\n\n default:\n throw yield* UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n cause: new Error(\"Invalid operation\"),\n }).toEffect();\n }\n });\n },\n });\n });\n}\n","/**\n * Built-in node type registrations for the flow engine.\n *\n * This module automatically registers the standard input and output node types\n * when imported. These types enable type-safe result consumption in clients.\n *\n * Input types are registered in `inputTypeRegistry` and describe how data enters\n * the flow from external sources. Output types are registered in `outputTypeRegistry`\n * and describe the data shapes produced by nodes.\n *\n * @module flow/node-types\n *\n * @remarks\n * This module should be imported by the flow engine initialization to ensure\n * built-in types are registered before any flows are created.\n *\n * @example\n * ```typescript\n * // Types are automatically registered on import\n * import \"@uploadista/core/flow\";\n * import { inputTypeRegistry, outputTypeRegistry } from \"@uploadista/core/flow\";\n *\n * // Check registered types\n * const inputTypes = inputTypeRegistry.list();\n * console.log(inputTypes.map(t => t.id)); // [\"streaming-input-v1\"]\n *\n * const outputTypes = outputTypeRegistry.list();\n * console.log(outputTypes.map(t => t.id)); // [\"storage-output-v1\", \"ocr-output-v1\", ...]\n * ```\n */\n\nimport { z } from \"zod\";\nimport { uploadFileSchema } from \"../../types/upload-file\";\nimport { inputTypeRegistry } from \"../input-type-registry\";\nimport { inputDataSchema } from \"../nodes/input-node\";\nimport { outputTypeRegistry } from \"../output-type-registry\";\n\n/**\n * Type ID constants for built-in node types.\n *\n * Use these constants when creating nodes with type information to ensure\n * consistency and avoid typos.\n *\n * @example\n * ```typescript\n * import { STREAMING_INPUT_TYPE_ID, STORAGE_OUTPUT_TYPE_ID } from \"@uploadista/core/flow\";\n *\n * const inputNode = createFlowNode({\n * // ... other config\n * inputTypeId: STREAMING_INPUT_TYPE_ID,\n * outputTypeId: STORAGE_OUTPUT_TYPE_ID,\n * });\n * ```\n */\nexport const STORAGE_OUTPUT_TYPE_ID = \"storage-output-v1\";\nexport const OCR_OUTPUT_TYPE_ID = \"ocr-output-v1\";\nexport const IMAGE_DESCRIPTION_OUTPUT_TYPE_ID = \"image-description-output-v1\";\nexport const STREAMING_INPUT_TYPE_ID = \"streaming-input-v1\";\n\n/**\n * OCR output schema - structured text extraction result.\n *\n * @property extractedText - The text extracted from the document\n * @property format - Output format (text, markdown, or JSON)\n * @property taskType - Type of OCR task performed\n * @property confidence - Optional confidence score (0-1)\n */\nexport const ocrOutputSchema = z.object({\n extractedText: z.string(),\n format: z.enum([\"markdown\", \"plain\", \"structured\"]),\n taskType: z.enum([\n \"convertToMarkdown\",\n \"freeOcr\",\n \"parseFigure\",\n \"locateObject\",\n ]),\n confidence: z.number().min(0).max(1).optional(),\n});\n\nexport type OcrOutput = z.infer<typeof ocrOutputSchema>;\n\n/**\n * Image description output schema - AI-generated image analysis result.\n *\n * @property description - Human-readable description of the image\n * @property confidence - Confidence score for the description (0-1)\n * @property metadata - Additional metadata about the description\n */\nexport const imageDescriptionOutputSchema = z.object({\n description: z.string(),\n confidence: z.number().min(0).max(1).optional(),\n metadata: z.record(z.string(), z.unknown()).optional(),\n});\n\nexport type ImageDescriptionOutput = z.infer<\n typeof imageDescriptionOutputSchema\n>;\n\n/**\n * Register streaming input node type in inputTypeRegistry.\n *\n * This is the standard input type for flows that accept file uploads via\n * streaming chunks or direct URL fetches. It supports three operations:\n * - init: Initialize a streaming file upload session\n * - finalize: Complete the upload after all chunks are uploaded\n * - url: Fetch a file directly from a URL\n */\ninputTypeRegistry.register({\n id: STREAMING_INPUT_TYPE_ID,\n schema: inputDataSchema,\n version: \"1.0.0\",\n description:\n \"Streaming file input with init/finalize/url operations for flexible file ingestion\",\n});\n\n/**\n * Register storage output node type in outputTypeRegistry.\n *\n * This is the standard output type for flows that save files to storage backends\n * (S3, Azure, GCS, etc.). It produces UploadFile objects with final storage URLs.\n */\noutputTypeRegistry.register({\n id: STORAGE_OUTPUT_TYPE_ID,\n schema: uploadFileSchema,\n version: \"1.0.0\",\n description:\n \"Storage output node that saves files to configured storage backend\",\n});\n\n/**\n * Register OCR output node type in outputTypeRegistry.\n *\n * This output type is for document text extraction nodes that use AI/OCR to\n * extract structured text from images or PDFs.\n */\noutputTypeRegistry.register({\n id: OCR_OUTPUT_TYPE_ID,\n schema: ocrOutputSchema,\n version: \"1.0.0\",\n description:\n \"OCR output node that extracts structured text from documents using AI\",\n});\n\n/**\n * Register image description output node type in outputTypeRegistry.\n *\n * This output type is for AI-powered image analysis nodes that generate\n * textual descriptions of image content.\n */\noutputTypeRegistry.register({\n id: IMAGE_DESCRIPTION_OUTPUT_TYPE_ID,\n schema: imageDescriptionOutputSchema,\n version: \"1.0.0\",\n description:\n \"Image description output node that generates AI-powered descriptions of images\",\n});\n\n// Export the registries for convenience\nexport { inputTypeRegistry } from \"../input-type-registry\";\nexport { outputTypeRegistry } from \"../output-type-registry\";\n","/**\n * Retry policy types for the Dead Letter Queue.\n *\n * Defines configurable retry strategies including immediate, fixed delay,\n * and exponential backoff with jitter.\n *\n * @module flow/types/retry-policy\n * @see {@link DeadLetterQueueService} for DLQ operations\n */\n\n/**\n * Immediate retry strategy - retry as soon as possible.\n *\n * Use for errors that are likely transient and may succeed on immediate retry.\n */\nexport interface ImmediateBackoff {\n type: \"immediate\";\n}\n\n/**\n * Fixed delay retry strategy - wait a fixed duration between retries.\n *\n * @property delayMs - Milliseconds to wait between retries\n *\n * @example\n * ```typescript\n * const fixedBackoff: FixedBackoff = {\n * type: \"fixed\",\n * delayMs: 5000 // Wait 5 seconds between retries\n * };\n * ```\n */\nexport interface FixedBackoff {\n type: \"fixed\";\n /** Milliseconds to wait between retries */\n delayMs: number;\n}\n\n/**\n * Exponential backoff retry strategy - progressively longer delays.\n *\n * Delay = min(initialDelayMs * (multiplier ^ retryCount), maxDelayMs)\n * With optional jitter to prevent thundering herd.\n *\n * @property initialDelayMs - Starting delay in milliseconds (e.g., 1000)\n * @property maxDelayMs - Maximum delay cap in milliseconds (e.g., 300000)\n * @property multiplier - Multiplication factor per retry (e.g., 2)\n * @property jitter - Add randomness to prevent thundering herd\n *\n * @example\n * ```typescript\n * const exponentialBackoff: ExponentialBackoff = {\n * type: \"exponential\",\n * initialDelayMs: 1000, // Start with 1 second\n * maxDelayMs: 300000, // Cap at 5 minutes\n * multiplier: 2, // Double each time\n * jitter: true // Add randomness\n * };\n * // Delays: ~1s, ~2s, ~4s, ~8s, ..., capped at ~5min\n * ```\n */\nexport interface ExponentialBackoff {\n type: \"exponential\";\n /** Starting delay in milliseconds */\n initialDelayMs: number;\n /** Maximum delay cap in milliseconds */\n maxDelayMs: number;\n /** Multiplication factor per retry (e.g., 2 for doubling) */\n multiplier: number;\n /** Add randomness to prevent thundering herd */\n jitter: boolean;\n}\n\n/**\n * Union type for all backoff strategies.\n */\nexport type BackoffStrategy = ImmediateBackoff | FixedBackoff | ExponentialBackoff;\n\n/**\n * Configuration for automatic retry behavior.\n *\n * Defines how failed jobs should be retried, including backoff strategy,\n * max attempts, and error filtering.\n *\n * @property enabled - Whether automatic retry is enabled (default: true)\n * @property maxRetries - Maximum retry attempts (default: 3)\n * @property backoff - Backoff strategy configuration\n * @property retryableErrors - Only retry these error codes (default: all)\n * @property nonRetryableErrors - Never retry these error codes\n * @property ttlMs - Auto-delete items after this time (default: 7 days)\n *\n * @example\n * ```typescript\n * // Conservative retry policy for external APIs\n * const apiRetryPolicy: RetryPolicy = {\n * enabled: true,\n * maxRetries: 5,\n * backoff: {\n * type: \"exponential\",\n * initialDelayMs: 1000,\n * maxDelayMs: 60000,\n * multiplier: 2,\n * jitter: true\n * },\n * nonRetryableErrors: [\"VALIDATION_ERROR\", \"AUTH_ERROR\"],\n * ttlMs: 604800000 // 7 days\n * };\n *\n * // Aggressive retry for transient failures\n * const transientRetryPolicy: RetryPolicy = {\n * enabled: true,\n * maxRetries: 3,\n * backoff: { type: \"immediate\" },\n * retryableErrors: [\"NETWORK_ERROR\", \"TIMEOUT_ERROR\"]\n * };\n *\n * // No automatic retry, manual intervention only\n * const manualPolicy: RetryPolicy = {\n * enabled: false,\n * maxRetries: 0,\n * backoff: { type: \"immediate\" }\n * };\n * ```\n */\nexport interface RetryPolicy {\n /** Whether automatic retry is enabled (default: true) */\n enabled: boolean;\n /** Maximum retry attempts (default: 3) */\n maxRetries: number;\n /** Backoff strategy configuration */\n backoff: BackoffStrategy;\n /** Only retry these error codes. If undefined, retry all errors. */\n retryableErrors?: string[];\n /** Never retry these error codes. Takes precedence over retryableErrors. */\n nonRetryableErrors?: string[];\n /** Auto-delete items after this time in milliseconds (default: 7 days) */\n ttlMs?: number;\n}\n\n/**\n * Default retry policy values.\n */\nexport const DEFAULT_RETRY_POLICY: RetryPolicy = {\n enabled: true,\n maxRetries: 3,\n backoff: {\n type: \"exponential\",\n initialDelayMs: 1000,\n maxDelayMs: 300000, // 5 minutes\n multiplier: 2,\n jitter: true,\n },\n ttlMs: 604800000, // 7 days\n};\n\n/**\n * Calculates the next retry delay based on the backoff strategy.\n *\n * @param backoff - The backoff strategy configuration\n * @param retryCount - Current retry attempt number (0-based)\n * @returns Delay in milliseconds before the next retry\n *\n * @example\n * ```typescript\n * const delay = calculateBackoffDelay(\n * { type: \"exponential\", initialDelayMs: 1000, maxDelayMs: 60000, multiplier: 2, jitter: true },\n * 2 // Third attempt\n * );\n * // Returns approximately 4000ms (1000 * 2^2) with jitter\n * ```\n */\nexport function calculateBackoffDelay(\n backoff: BackoffStrategy,\n retryCount: number,\n): number {\n switch (backoff.type) {\n case \"immediate\":\n return 0;\n\n case \"fixed\":\n return backoff.delayMs;\n\n case \"exponential\": {\n const baseDelay =\n backoff.initialDelayMs * Math.pow(backoff.multiplier, retryCount);\n const cappedDelay = Math.min(baseDelay, backoff.maxDelayMs);\n\n if (backoff.jitter) {\n // Add random jitter: 0.5x to 1.5x of the calculated delay\n const jitterFactor = 0.5 + Math.random();\n return Math.floor(cappedDelay * jitterFactor);\n }\n\n return cappedDelay;\n }\n\n default:\n return 0;\n }\n}\n\n/**\n * Determines if an error should be retried based on the retry policy.\n *\n * @param errorCode - The error code to check\n * @param policy - The retry policy configuration\n * @returns true if the error should be retried\n *\n * @example\n * ```typescript\n * const policy: RetryPolicy = {\n * enabled: true,\n * maxRetries: 3,\n * backoff: { type: \"immediate\" },\n * nonRetryableErrors: [\"VALIDATION_ERROR\"]\n * };\n *\n * isErrorRetryable(\"NETWORK_ERROR\", policy); // true\n * isErrorRetryable(\"VALIDATION_ERROR\", policy); // false\n * ```\n */\nexport function isErrorRetryable(\n errorCode: string,\n policy: RetryPolicy,\n): boolean {\n // Check if policy is enabled\n if (!policy.enabled) {\n return false;\n }\n\n // Non-retryable errors take precedence\n if (policy.nonRetryableErrors?.includes(errorCode)) {\n return false;\n }\n\n // If retryableErrors is specified, only those are retryable\n if (policy.retryableErrors && policy.retryableErrors.length > 0) {\n return policy.retryableErrors.includes(errorCode);\n }\n\n // By default, all errors are retryable\n return true;\n}\n\n/**\n * Calculates the expiration date for a DLQ item.\n *\n * @param createdAt - When the item was created\n * @param ttlMs - Time to live in milliseconds\n * @returns The expiration date, or undefined if no TTL\n */\nexport function calculateExpirationDate(\n createdAt: Date,\n ttlMs?: number,\n): Date | undefined {\n if (ttlMs === undefined || ttlMs <= 0) {\n return undefined;\n }\n return new Date(createdAt.getTime() + ttlMs);\n}\n","/**\n * Dead Letter Queue service for capturing and retrying failed flow jobs.\n *\n * This module provides a comprehensive DLQ implementation that:\n * - Captures failed flow jobs with full context for debugging\n * - Supports configurable retry policies with backoff strategies\n * - Enables automatic scheduled retries and manual intervention\n * - Provides admin operations for DLQ management\n *\n * @module flow/dead-letter-queue\n * @see {@link DeadLetterItem} for the DLQ item structure\n * @see {@link RetryPolicy} for retry configuration\n */\n\nimport { Context, Effect, Layer, Option } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport { DeadLetterQueueKVStore } from \"../types/kv-store\";\nimport type {\n DeadLetterCleanupOptions,\n DeadLetterCleanupResult,\n DeadLetterItem,\n DeadLetterItemStatus,\n DeadLetterListOptions,\n DeadLetterQueueStats,\n} from \"./types/dead-letter-item\";\nimport type { FlowJob } from \"./types/flow-job\";\nimport {\n calculateBackoffDelay,\n calculateExpirationDate,\n DEFAULT_RETRY_POLICY,\n isErrorRetryable,\n type RetryPolicy,\n} from \"./types/retry-policy\";\n\n/**\n * Shape of the Dead Letter Queue service.\n *\n * Provides all operations for managing failed flow jobs including\n * adding items, querying, retrying, and cleanup.\n */\nexport interface DeadLetterQueueServiceShape {\n /**\n * Add a failed job to the DLQ with full failure context.\n *\n * @param job - The failed flow job\n * @param error - The error that caused the failure\n * @param retryPolicy - Optional retry policy (uses default if not provided)\n * @returns The created DLQ item\n */\n add(\n job: FlowJob,\n error: UploadistaError,\n retryPolicy?: RetryPolicy,\n ): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Get a specific DLQ item by ID.\n *\n * @param itemId - The DLQ item ID\n * @returns The DLQ item\n */\n get(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Get a DLQ item by ID, returning None if not found.\n *\n * @param itemId - The DLQ item ID\n * @returns Option of the DLQ item\n */\n getOption(\n itemId: string,\n ): Effect.Effect<Option.Option<DeadLetterItem>, UploadistaError>;\n\n /**\n * Delete a DLQ item.\n *\n * @param itemId - The DLQ item ID to delete\n */\n delete(itemId: string): Effect.Effect<void, UploadistaError>;\n\n /**\n * List DLQ items with optional filtering and pagination.\n *\n * @param options - Filter and pagination options\n * @returns List of items and total count\n */\n list(\n options?: DeadLetterListOptions,\n ): Effect.Effect<{ items: DeadLetterItem[]; total: number }, UploadistaError>;\n\n /**\n * Update a DLQ item.\n *\n * @param itemId - The DLQ item ID\n * @param updates - Partial updates to apply\n * @returns The updated item\n */\n update(\n itemId: string,\n updates: Partial<DeadLetterItem>,\n ): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Mark a DLQ item as being retried.\n *\n * @param itemId - The DLQ item ID\n * @returns The updated item with status \"retrying\"\n */\n markRetrying(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Record a failed retry attempt.\n *\n * @param itemId - The DLQ item ID\n * @param error - Error message from the failed retry\n * @param durationMs - Duration of the retry attempt\n * @returns The updated item\n */\n recordRetryFailure(\n itemId: string,\n error: string,\n durationMs: number,\n ): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Mark a DLQ item as resolved (successfully retried or manually resolved).\n *\n * @param itemId - The DLQ item ID\n * @returns The updated item with status \"resolved\"\n */\n markResolved(itemId: string): Effect.Effect<DeadLetterItem, UploadistaError>;\n\n /**\n * Get items that are due for scheduled retry.\n *\n * @param limit - Maximum number of items to return\n * @returns List of items ready for retry\n */\n getScheduledRetries(\n limit?: number,\n ): Effect.Effect<DeadLetterItem[], UploadistaError>;\n\n /**\n * Cleanup old DLQ items based on options.\n *\n * @param options - Cleanup criteria\n * @returns Number of items deleted\n */\n cleanup(\n options?: DeadLetterCleanupOptions,\n ): Effect.Effect<DeadLetterCleanupResult, UploadistaError>;\n\n /**\n * Get DLQ statistics.\n *\n * @returns Aggregate statistics about the DLQ\n */\n getStats(): Effect.Effect<DeadLetterQueueStats, UploadistaError>;\n}\n\n/**\n * Effect-TS context tag for the Dead Letter Queue service.\n *\n * @example\n * ```typescript\n * const effect = Effect.gen(function* () {\n * const dlq = yield* DeadLetterQueueService;\n * const stats = yield* dlq.getStats();\n * console.log(`DLQ has ${stats.totalItems} items`);\n * });\n * ```\n */\nexport class DeadLetterQueueService extends Context.Tag(\n \"DeadLetterQueueService\",\n)<DeadLetterQueueService, DeadLetterQueueServiceShape>() {\n /**\n * Access the DLQ service optionally (for integration in FlowServer).\n * Returns Option.none if the service is not provided.\n */\n static optional = Effect.serviceOption(DeadLetterQueueService);\n}\n\n/**\n * Creates the Dead Letter Queue service implementation.\n *\n * @returns Effect that creates the DLQ service\n */\nexport function createDeadLetterQueueService(): Effect.Effect<\n DeadLetterQueueServiceShape,\n never,\n DeadLetterQueueKVStore\n> {\n return Effect.gen(function* () {\n const kvStore = yield* DeadLetterQueueKVStore;\n\n /**\n * Generate a unique DLQ item ID.\n */\n const generateId = (): string => `dlq_${crypto.randomUUID()}`;\n\n /**\n * Parse dates from a deserialized DLQ item.\n * JSON serialization converts Date objects to strings.\n */\n const parseDates = (item: DeadLetterItem): DeadLetterItem => ({\n ...item,\n createdAt: new Date(item.createdAt),\n updatedAt: new Date(item.updatedAt),\n expiresAt: item.expiresAt ? new Date(item.expiresAt) : undefined,\n nextRetryAt: item.nextRetryAt ? new Date(item.nextRetryAt) : undefined,\n retryHistory: item.retryHistory.map((attempt) => ({\n ...attempt,\n attemptedAt: new Date(attempt.attemptedAt),\n })),\n });\n\n /**\n * Get all items from the KV store (for filtering/stats).\n * Note: This relies on the list() operation being supported.\n */\n const getAllItems = (): Effect.Effect<DeadLetterItem[], UploadistaError> =>\n Effect.gen(function* () {\n if (!kvStore.list) {\n return [];\n }\n const keys = yield* kvStore.list();\n const items: DeadLetterItem[] = [];\n for (const key of keys) {\n const item = yield* Effect.catchAll(kvStore.get(key), () =>\n Effect.succeed(null as DeadLetterItem | null),\n );\n if (item) {\n items.push(parseDates(item));\n }\n }\n return items;\n });\n\n return {\n add: (job, error, retryPolicy = DEFAULT_RETRY_POLICY) =>\n Effect.gen(function* () {\n const id = generateId();\n const now = new Date();\n\n // Extract error details\n const errorDetails = {\n code: error.code || \"UNKNOWN_ERROR\",\n message: error.body || error.message || \"Unknown error\",\n nodeId: undefined as string | undefined,\n stack: error.stack,\n };\n\n // Find the failed node from job tasks\n const failedTask = job.tasks.find((t) => t.status === \"failed\");\n if (failedTask) {\n errorDetails.nodeId = failedTask.nodeId;\n }\n\n // Extract node results from completed tasks\n const nodeResults: Record<string, unknown> = {};\n for (const task of job.tasks) {\n if (task.result !== undefined) {\n nodeResults[task.nodeId] = task.result;\n }\n }\n\n // Determine if error is retryable\n const isRetryable = isErrorRetryable(errorDetails.code, retryPolicy);\n\n // Calculate next retry time if auto-retry is enabled\n let nextRetryAt: Date | undefined;\n if (\n retryPolicy.enabled &&\n isRetryable &&\n retryPolicy.maxRetries > 0\n ) {\n const delay = calculateBackoffDelay(retryPolicy.backoff, 0);\n nextRetryAt = new Date(now.getTime() + delay);\n }\n\n const item: DeadLetterItem = {\n id,\n jobId: job.id,\n flowId: job.flowId,\n storageId: job.storageId,\n clientId: job.clientId,\n error: errorDetails,\n inputs: job.executionState?.inputs || {},\n nodeResults,\n failedAtNodeId: errorDetails.nodeId,\n retryCount: 0,\n maxRetries: retryPolicy.maxRetries,\n nextRetryAt,\n retryHistory: [],\n createdAt: now,\n updatedAt: now,\n expiresAt: calculateExpirationDate(now, retryPolicy.ttlMs),\n status:\n isRetryable && retryPolicy.enabled ? \"pending\" : \"exhausted\",\n };\n\n yield* kvStore.set(id, item);\n return item;\n }),\n\n get: (itemId) =>\n Effect.gen(function* () {\n const item = yield* kvStore.get(itemId);\n return parseDates(item);\n }),\n\n getOption: (itemId) =>\n Effect.gen(function* () {\n const result = yield* Effect.either(kvStore.get(itemId));\n if (result._tag === \"Left\") {\n // Check if it's a \"not found\" error\n if (result.left.code === \"FILE_NOT_FOUND\") {\n return Option.none<DeadLetterItem>();\n }\n return yield* Effect.fail(result.left);\n }\n return Option.some(parseDates(result.right));\n }),\n\n delete: (itemId) => kvStore.delete(itemId),\n\n list: (options = {}) =>\n Effect.gen(function* () {\n const allItems = yield* getAllItems();\n const { status, flowId, clientId, limit = 50, offset = 0 } = options;\n\n // Filter items\n let filtered = allItems;\n if (status) {\n filtered = filtered.filter((item) => item.status === status);\n }\n if (flowId) {\n filtered = filtered.filter((item) => item.flowId === flowId);\n }\n if (clientId) {\n filtered = filtered.filter((item) => item.clientId === clientId);\n }\n\n // Sort by createdAt descending (newest first)\n filtered.sort(\n (a, b) => b.createdAt.getTime() - a.createdAt.getTime(),\n );\n\n const total = filtered.length;\n const items = filtered.slice(offset, offset + limit);\n\n return { items, total };\n }),\n\n update: (itemId, updates) =>\n Effect.gen(function* () {\n const rawItem = yield* kvStore.get(itemId);\n const item = parseDates(rawItem);\n const updatedItem: DeadLetterItem = {\n ...item,\n ...updates,\n updatedAt: new Date(),\n };\n yield* kvStore.set(itemId, updatedItem);\n return updatedItem;\n }),\n\n markRetrying: (itemId) =>\n Effect.gen(function* () {\n const rawItem = yield* kvStore.get(itemId);\n const item = parseDates(rawItem);\n const updatedItem: DeadLetterItem = {\n ...item,\n status: \"retrying\",\n updatedAt: new Date(),\n };\n yield* kvStore.set(itemId, updatedItem);\n return updatedItem;\n }),\n\n recordRetryFailure: (itemId, error, durationMs) =>\n Effect.gen(function* () {\n const rawItem = yield* kvStore.get(itemId);\n const item = parseDates(rawItem);\n const now = new Date();\n const newRetryCount = item.retryCount + 1;\n\n // Add to retry history\n const retryHistory = [\n ...item.retryHistory,\n {\n attemptedAt: now,\n error,\n durationMs,\n },\n ];\n\n // Determine new status and next retry time\n let status: DeadLetterItemStatus = \"pending\";\n let nextRetryAt: Date | undefined;\n\n if (newRetryCount >= item.maxRetries) {\n // Max retries reached\n status = \"exhausted\";\n nextRetryAt = undefined;\n } else {\n // Calculate next retry time with backoff\n const delay = calculateBackoffDelay(\n DEFAULT_RETRY_POLICY.backoff,\n newRetryCount,\n );\n nextRetryAt = new Date(now.getTime() + delay);\n }\n\n const updatedItem: DeadLetterItem = {\n ...item,\n retryCount: newRetryCount,\n retryHistory,\n status,\n nextRetryAt,\n updatedAt: now,\n };\n\n yield* kvStore.set(itemId, updatedItem);\n return updatedItem;\n }),\n\n markResolved: (itemId) =>\n Effect.gen(function* () {\n const rawItem = yield* kvStore.get(itemId);\n const item = parseDates(rawItem);\n const updatedItem: DeadLetterItem = {\n ...item,\n status: \"resolved\",\n nextRetryAt: undefined,\n updatedAt: new Date(),\n };\n yield* kvStore.set(itemId, updatedItem);\n return updatedItem;\n }),\n\n getScheduledRetries: (limit = 100) =>\n Effect.gen(function* () {\n const allItems = yield* getAllItems();\n const now = new Date();\n\n // Filter items that are:\n // 1. Status is \"pending\"\n // 2. nextRetryAt is in the past or now\n const readyItems = allItems\n .filter(\n (item) =>\n item.status === \"pending\" &&\n item.nextRetryAt &&\n item.nextRetryAt <= now,\n )\n .sort((a, b) => {\n // Sort by nextRetryAt ascending (oldest first)\n const aTime = a.nextRetryAt?.getTime() || 0;\n const bTime = b.nextRetryAt?.getTime() || 0;\n return aTime - bTime;\n })\n .slice(0, limit);\n\n return readyItems;\n }),\n\n cleanup: (options = {}) =>\n Effect.gen(function* () {\n const allItems = yield* getAllItems();\n const { olderThan, status } = options;\n const now = new Date();\n let deleted = 0;\n\n for (const item of allItems) {\n let shouldDelete = false;\n\n // Check expiration\n if (item.expiresAt && item.expiresAt <= now) {\n shouldDelete = true;\n }\n\n // Check age\n if (olderThan && item.createdAt <= olderThan) {\n // If status filter is specified, only delete matching status\n if (status) {\n shouldDelete = item.status === status;\n } else if (\n item.status === \"exhausted\" ||\n item.status === \"resolved\"\n ) {\n // Without status filter, only delete exhausted/resolved\n shouldDelete = true;\n }\n }\n\n if (shouldDelete) {\n yield* Effect.catchAll(kvStore.delete(item.id), () =>\n Effect.succeed(undefined),\n );\n deleted++;\n }\n }\n\n return { deleted };\n }),\n\n getStats: () =>\n Effect.gen(function* () {\n const allItems = yield* getAllItems();\n\n const byStatus: Record<DeadLetterItemStatus, number> = {\n pending: 0,\n retrying: 0,\n exhausted: 0,\n resolved: 0,\n };\n\n const byFlow: Record<string, number> = {};\n let oldestItem: Date | undefined;\n let totalRetryCount = 0;\n\n for (const item of allItems) {\n // Count by status\n byStatus[item.status]++;\n\n // Count by flow\n byFlow[item.flowId] = (byFlow[item.flowId] || 0) + 1;\n\n // Track oldest item\n if (!oldestItem || item.createdAt < oldestItem) {\n oldestItem = item.createdAt;\n }\n\n // Sum retry counts\n totalRetryCount += item.retryCount;\n }\n\n const averageRetryCount =\n allItems.length > 0 ? totalRetryCount / allItems.length : 0;\n\n return {\n totalItems: allItems.length,\n byStatus,\n byFlow,\n oldestItem,\n averageRetryCount,\n };\n }),\n } satisfies DeadLetterQueueServiceShape;\n });\n}\n\n/**\n * Effect Layer that creates the DeadLetterQueueService.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const dlq = yield* DeadLetterQueueService;\n * const stats = yield* dlq.getStats();\n * return stats;\n * }).pipe(\n * Effect.provide(deadLetterQueueService),\n * Effect.provide(deadLetterQueueKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const deadLetterQueueService = Layer.effect(\n DeadLetterQueueService,\n createDeadLetterQueueService(),\n);\n","/**\n * Parallel execution scheduler for flow nodes.\n *\n * The ParallelScheduler analyzes flow dependencies and groups nodes into execution\n * levels where nodes at the same level can run in parallel. It manages concurrency\n * using Effect's built-in concurrency control to prevent resource exhaustion.\n *\n * @module flow/parallel-scheduler\n * @see {@link ParallelScheduler} for the main scheduler class\n *\n * @remarks\n * This scheduler groups nodes by execution level (respecting dependencies) and executes\n * each level in parallel with controlled concurrency. Levels are executed sequentially\n * to ensure dependencies are satisfied before dependent nodes execute.\n *\n * @example\n * ```typescript\n * const scheduler = new ParallelScheduler({ maxConcurrency: 4 });\n *\n * // Group nodes by execution level\n * const levels = scheduler.groupNodesByExecutionLevel(nodes, edges);\n *\n * // Execute nodes in a level with Effect\n * const results = yield* scheduler.executeNodesInParallel([\n * () => executeNode(\"node1\"),\n * () => executeNode(\"node2\"),\n * () => executeNode(\"node3\")\n * ]);\n * ```\n */\n\nimport { Effect } from \"effect\";\nimport type { FlowNode } from \"./types/flow-types\";\n\n/**\n * Represents a level in the execution hierarchy where all nodes can run in parallel.\n *\n * @property level - The execution level (0 = first to execute, higher = later)\n * @property nodes - Array of node IDs that can execute in parallel at this level\n *\n * @example\n * ```\n * Level 0: [input_node] (no dependencies)\n * Level 1: [resize, optimize] (all depend on level 0)\n * Level 2: [storage] (depends on level 1)\n * ```\n */\nexport interface ExecutionLevel {\n level: number;\n nodes: string[];\n}\n\n/**\n * Configuration options for the ParallelScheduler.\n *\n * @property maxConcurrency - Maximum number of nodes to execute in parallel (default: 4)\n * Controls how many nodes run simultaneously within a level\n *\n * @example\n * ```typescript\n * const scheduler = new ParallelScheduler({ maxConcurrency: 8 });\n * ```\n */\nexport interface ParallelSchedulerConfig {\n maxConcurrency?: number;\n}\n\n/**\n * Scheduler for executing flow nodes in parallel while respecting dependencies.\n *\n * The scheduler performs topological sorting to identify nodes that can run\n * concurrently, groups them into execution levels, and provides methods to\n * execute them with controlled concurrency using Effect.\n *\n * Key responsibilities:\n * - Analyze flow dependencies and detect cycles\n * - Group nodes into parallel execution levels\n * - Execute levels in parallel with concurrency limits\n * - Provide utilities to check parallel execution feasibility\n */\nexport class ParallelScheduler {\n private maxConcurrency: number;\n\n /**\n * Creates a new ParallelScheduler instance.\n *\n * @param config - Configuration for the scheduler\n * @example\n * ```typescript\n * const scheduler = new ParallelScheduler({ maxConcurrency: 4 });\n * ```\n */\n constructor(config: ParallelSchedulerConfig = {}) {\n this.maxConcurrency = config.maxConcurrency ?? 4;\n }\n\n /**\n * Groups nodes into execution levels where nodes in the same level can run in parallel.\n *\n * Uses Kahn's algorithm to perform topological sorting with level identification.\n * Nodes are grouped by their distance from source nodes (input nodes with no dependencies).\n *\n * @param nodes - Array of flow nodes to analyze\n * @param edges - Array of edges defining dependencies between nodes\n * @returns Array of execution levels, ordered from 0 (no dependencies) onwards\n * @throws Error if a cycle is detected in the flow graph\n *\n * @example\n * ```typescript\n * const levels = scheduler.groupNodesByExecutionLevel(nodes, edges);\n * // levels = [\n * // { level: 0, nodes: ['input_1'] },\n * // { level: 1, nodes: ['resize_1', 'optimize_1'] },\n * // { level: 2, nodes: ['output_1'] }\n * // ]\n * ```\n */\n groupNodesByExecutionLevel(\n nodes: FlowNode<unknown, unknown>[],\n edges: Array<{ source: string; target: string }>,\n ): ExecutionLevel[] {\n // Build dependency graph\n const graph: Record<string, string[]> = {};\n const inDegree: Record<string, number> = {};\n\n // Initialize graph structure\n nodes.forEach((node) => {\n graph[node.id] = [];\n inDegree[node.id] = 0;\n });\n\n // Build edges and calculate in-degrees\n edges.forEach((edge) => {\n graph[edge.source]?.push(edge.target);\n inDegree[edge.target] = (inDegree[edge.target] || 0) + 1;\n });\n\n const levels: ExecutionLevel[] = [];\n const processedNodes = new Set<string>();\n let levelIndex = 0;\n\n // Use Kahn's algorithm to group nodes by level\n while (processedNodes.size < nodes.length) {\n // Find all nodes with zero in-degree that haven't been processed\n const currentLevelNodes = Object.keys(inDegree).filter(\n (nodeId) => inDegree[nodeId] === 0 && !processedNodes.has(nodeId),\n );\n\n if (currentLevelNodes.length === 0) {\n throw new Error(\n \"Cycle detected in flow graph - cannot execute in parallel\",\n );\n }\n\n levels.push({\n level: levelIndex++,\n nodes: currentLevelNodes,\n });\n\n // Remove current level nodes and update in-degrees for dependent nodes\n currentLevelNodes.forEach((nodeId) => {\n processedNodes.add(nodeId);\n delete inDegree[nodeId];\n\n // Decrease in-degree for all nodes that depend on this node\n graph[nodeId]?.forEach((dependentId) => {\n if (inDegree[dependentId] !== undefined) {\n inDegree[dependentId]--;\n }\n });\n });\n }\n\n return levels;\n }\n\n /**\n * Executes a batch of Effect-based node executors in parallel with concurrency control.\n *\n * All executors are run in parallel, but the number of concurrent executions is limited\n * by maxConcurrency. This prevents resource exhaustion while maximizing parallelism.\n *\n * @template T - The return type of each executor\n * @template E - The error type of the Effects\n * @template R - The requirements type of the Effects\n *\n * @param nodeExecutors - Array of Effect-returning functions to execute in parallel\n * @returns Effect that resolves to array of results in the same order as input\n *\n * @example\n * ```typescript\n * const results = yield* scheduler.executeNodesInParallel([\n * () => executeNode(\"node1\"),\n * () => executeNode(\"node2\"),\n * () => executeNode(\"node3\")\n * ]);\n * // results will be in order: [result1, result2, result3]\n * ```\n */\n executeNodesInParallel<T, E, R>(\n nodeExecutors: Array<() => Effect.Effect<T, E, R>>,\n ): Effect.Effect<T[], E, R> {\n return Effect.all(\n nodeExecutors.map((executor) => executor()),\n {\n concurrency: this.maxConcurrency,\n },\n );\n }\n\n /**\n * Determines if a set of nodes can be safely executed in parallel.\n *\n * Nodes can execute in parallel if all their dependencies have been completed.\n * This is typically called to verify that nodes in an execution level are ready\n * to run given the current node results.\n *\n * @param nodeIds - Array of node IDs to check\n * @param nodeResults - Map of completed node IDs to their results\n * @param reverseGraph - Dependency graph mapping node IDs to their incoming dependencies\n * @returns true if all dependencies for all nodes are in nodeResults, false otherwise\n *\n * @example\n * ```typescript\n * const canRun = scheduler.canExecuteInParallel(\n * ['resize_1', 'optimize_1'],\n * nodeResults,\n * reverseGraph\n * );\n * ```\n */\n canExecuteInParallel(\n nodeIds: string[],\n nodeResults: Map<string, unknown>,\n reverseGraph: Record<string, string[]>,\n ): boolean {\n return nodeIds.every((nodeId) => {\n const dependencies = reverseGraph[nodeId] || [];\n return dependencies.every((depId) => nodeResults.has(depId));\n });\n }\n\n /**\n * Gets execution statistics for monitoring and debugging.\n *\n * @returns Object containing current scheduler configuration\n *\n * @example\n * ```typescript\n * const stats = scheduler.getStats();\n * console.log(`Max concurrency: ${stats.maxConcurrency}`);\n * ```\n */\n getStats() {\n return {\n maxConcurrency: this.maxConcurrency,\n };\n }\n}\n","/**\n * Type guards and helpers for safe type narrowing of flow results and inputs.\n *\n * This module provides runtime type guards for discriminating between different\n * types of flow outputs and input operations. Type guards validate both the type\n * tag and the data structure against registered schemas.\n *\n * @module flow/type-guards\n *\n * @example\n * ```typescript\n * import { isStorageOutput, filterOutputsByType, isUrlOperation } from \"@uploadista/core/flow\";\n *\n * // Type-safe output result consumption\n * if (result.success && result.flowOutputs) {\n * const storageOutputs = filterOutputsByType(result.flowOutputs, isStorageOutput);\n * for (const output of storageOutputs) {\n * // output.data is typed as UploadFile\n * console.log(\"Stored at:\", output.data.url);\n * }\n * }\n *\n * // Type-safe input operation handling\n * if (isUrlOperation(inputData)) {\n * // TypeScript knows inputData has url property\n * console.log(\"Fetching from\", inputData.url);\n * }\n * ```\n */\n\nimport { Effect } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport type { UploadFile } from \"../types\";\nimport { uploadFileSchema } from \"../types\";\nimport {\n IMAGE_DESCRIPTION_OUTPUT_TYPE_ID,\n type ImageDescriptionOutput,\n OCR_OUTPUT_TYPE_ID,\n type OcrOutput,\n} from \"./node-types\";\nimport type { InputData } from \"./nodes/input-node\";\nimport { outputTypeRegistry } from \"./output-type-registry\";\nimport type { TypedOutput } from \"./types/flow-types\";\n\n/**\n * A narrowed typed output with a specific node type and data type.\n * Unlike TypedOutput<T>, this type has a required nodeType field and\n * excludes BuiltInTypedOutput from the union, providing better type narrowing.\n *\n * @template T - The TypeScript type of the output data\n * @template TNodeType - The literal string type of the node type ID\n */\nexport type NarrowedTypedOutput<T, TNodeType extends string = string> = {\n nodeType: TNodeType;\n data: T;\n nodeId: string;\n timestamp: string;\n};\n\n/**\n * Factory function to create type guards for specific node types.\n *\n * Creates a TypeScript type guard that validates both the type tag and\n * the data structure against the registered schema. This enables type-safe\n * narrowing of TypedOutput objects in TypeScript.\n *\n * @template T - The expected TypeScript type after narrowing\n * @template TNodeType - The literal string type of the node type ID\n * @param typeId - The registered type ID to check against (e.g., \"storage-output-v1\")\n * @returns A type guard function that narrows TypedOutput to NarrowedTypedOutput<T, TNodeType>\n *\n * @example\n * ```typescript\n * import { createTypeGuard } from \"@uploadista/core/flow\";\n * import { z } from \"zod\";\n *\n * const descriptionSchema = z.object({\n * description: z.string(),\n * confidence: z.number(),\n * });\n *\n * type DescriptionOutput = z.infer<typeof descriptionSchema>;\n *\n * const isDescriptionOutput = createTypeGuard<DescriptionOutput>(\n * \"description-output-v1\"\n * );\n *\n * // Use in code\n * if (isDescriptionOutput(output)) {\n * // output.data is typed as DescriptionOutput\n * console.log(output.data.description);\n * }\n * ```\n */\nexport function createTypeGuard<T, TNodeType extends string = string>(\n typeId: TNodeType,\n): (output: TypedOutput) => output is NarrowedTypedOutput<T, TNodeType> {\n return (output: TypedOutput): output is NarrowedTypedOutput<T, TNodeType> => {\n // Check type matches\n if (output.nodeType !== typeId) return false;\n\n // Validate against registered schema\n const typeDef = outputTypeRegistry.get(typeId);\n if (!typeDef) return false;\n\n const result = typeDef.schema.safeParse(output.data);\n return result.success;\n };\n}\n\n/**\n * Type guard for UploadFile objects.\n *\n * Validates that a value is a valid UploadFile by checking its structure against the schema.\n * This is useful for determining if a node result is an UploadFile, which affects\n * auto-persistence and intermediate file tracking.\n *\n * @param value - The value to check\n * @returns True if the value is a valid UploadFile\n *\n * @example\n * ```typescript\n * import { isUploadFile } from \"@uploadista/core/flow\";\n *\n * if (isUploadFile(nodeResult)) {\n * // nodeResult is typed as UploadFile\n * console.log(\"File ID:\", nodeResult.id);\n * console.log(\"Storage:\", nodeResult.storage.id);\n * }\n * ```\n */\nexport function isUploadFile(value: unknown): value is UploadFile {\n if (!value || typeof value !== \"object\") return false;\n const result = uploadFileSchema.safeParse(value);\n return result.success;\n}\n\n/**\n * Type guard for storage output nodes.\n *\n * Validates that an output is from a storage node and contains valid UploadFile data.\n *\n * @param output - The output to check\n * @returns True if the output is a storage output with valid UploadFile data\n *\n * @example\n * ```typescript\n * import { isStorageOutput } from \"@uploadista/core/flow\";\n *\n * if (isStorageOutput(output)) {\n * // output.data is typed as UploadFile\n * console.log(\"File URL:\", output.data.url);\n * console.log(\"File size:\", output.data.size);\n * }\n * ```\n */\nexport const isStorageOutput = createTypeGuard<UploadFile>(\"storage-output-v1\");\n\n/**\n * Type guard for OCR output nodes.\n *\n * Validates that an output is from an OCR node and contains valid structured OCR data.\n *\n * @param output - The output to check\n * @returns True if the output is an OCR output with valid structured text data\n *\n * @example\n * ```typescript\n * import { isOcrOutput } from \"@uploadista/core/flow\";\n *\n * if (isOcrOutput(output)) {\n * // output.data is typed as OcrOutput\n * console.log(\"Extracted text:\", output.data.extractedText);\n * console.log(\"Format:\", output.data.format);\n * console.log(\"Task type:\", output.data.taskType);\n * }\n * ```\n */\nexport const isOcrOutput = createTypeGuard<OcrOutput>(OCR_OUTPUT_TYPE_ID);\n\n/**\n * Type guard for image description output nodes.\n *\n * Validates that an output is from an image description node and contains valid description data.\n *\n * @param output - The output to check\n * @returns True if the output is an image description output with valid description data\n *\n * @example\n * ```typescript\n * import { isImageDescriptionOutput } from \"@uploadista/core/flow\";\n *\n * if (isImageDescriptionOutput(output)) {\n * // output.data is typed as ImageDescriptionOutput\n * console.log(\"Description:\", output.data.description);\n * console.log(\"Confidence:\", output.data.confidence);\n * }\n * ```\n */\nexport const isImageDescriptionOutput = createTypeGuard<ImageDescriptionOutput>(\n IMAGE_DESCRIPTION_OUTPUT_TYPE_ID,\n);\n\n/**\n * Filter an array of outputs to only those matching a specific type.\n *\n * This helper function filters outputs using a type guard and returns a\n * properly typed array of results. It's useful for extracting specific\n * output types from multi-output flows.\n *\n * @template TOutput - The expected narrowed output type\n * @param outputs - Array of typed outputs to filter\n * @param typeGuard - Type guard function to use for filtering\n * @returns Array of outputs that match the type guard, properly typed\n *\n * @example\n * ```typescript\n * import { filterOutputsByType, isStorageOutput } from \"@uploadista/core/flow\";\n *\n * // Get all storage outputs from a multi-output flow\n * const storageOutputs = filterOutputsByType(\n * flowResult.outputs,\n * isStorageOutput\n * );\n *\n * for (const output of storageOutputs) {\n * // Each output.data is typed as UploadFile\n * console.log(\"Saved file:\", output.data.url);\n * }\n * ```\n */\nexport function filterOutputsByType<TOutput extends TypedOutput>(\n outputs: TypedOutput[],\n typeGuard: (output: TypedOutput) => output is TOutput,\n): TOutput[] {\n return outputs.filter(typeGuard);\n}\n\n/**\n * Get a single output of a specific type from an array of outputs.\n *\n * This helper function finds exactly one output matching the type guard.\n * It throws an error if no outputs match or if multiple outputs match,\n * ensuring the caller receives exactly the expected result.\n *\n * @template TOutput - The expected narrowed output type\n * @param outputs - Array of typed outputs to search\n * @param typeGuard - Type guard function to use for matching\n * @returns The single matching output, properly typed\n * @throws {UploadistaError} If no outputs match (OUTPUT_NOT_FOUND)\n * @throws {UploadistaError} If multiple outputs match (MULTIPLE_OUTPUTS_FOUND)\n *\n * @example\n * ```typescript\n * import { getSingleOutputByType, isStorageOutput } from \"@uploadista/core/flow\";\n *\n * try {\n * const storageOutput = getSingleOutputByType(\n * flowResult.outputs,\n * isStorageOutput\n * );\n * // storageOutput.data is typed as UploadFile\n * console.log(\"File saved at:\", storageOutput.data.url);\n * } catch (error) {\n * if (error.code === \"OUTPUT_NOT_FOUND\") {\n * console.error(\"No storage output found\");\n * } else if (error.code === \"MULTIPLE_OUTPUTS_FOUND\") {\n * console.error(\"Multiple storage outputs found, expected one\");\n * }\n * }\n * ```\n */\nexport function getSingleOutputByType<TOutput extends TypedOutput>(\n outputs: TypedOutput[],\n typeGuard: (output: TypedOutput) => output is TOutput,\n): Effect.Effect<TOutput, UploadistaError> {\n return Effect.gen(function* () {\n const filtered = filterOutputsByType(outputs, typeGuard);\n\n if (filtered.length === 0) {\n return yield* UploadistaError.fromCode(\"OUTPUT_NOT_FOUND\", {\n body: \"No output of the specified type was found in the flow results\",\n }).toEffect();\n }\n\n if (filtered.length > 1) {\n return yield* UploadistaError.fromCode(\"MULTIPLE_OUTPUTS_FOUND\", {\n body: `Found ${filtered.length} outputs of the specified type, expected exactly one`,\n details: {\n foundCount: filtered.length,\n nodeIds: filtered.map((o) => o.nodeId),\n },\n }).toEffect();\n }\n\n // TypeScript knows filtered.length is 1 here due to the checks above\n // biome-ignore lint/style/noNonNullAssertion: We've checked the length above\n return filtered[0]!;\n });\n}\n\n/**\n * Get the first output of a specific type, if any exists.\n *\n * Unlike getSingleOutputByType, this function returns undefined if no outputs\n * match, and returns the first match if multiple outputs exist. This is useful\n * when you want a more lenient matching strategy.\n *\n * @template TOutput - The expected narrowed output type\n * @param outputs - Array of typed outputs to search\n * @param typeGuard - Type guard function to use for matching\n * @returns The first matching output, or undefined if none match\n *\n * @example\n * ```typescript\n * import { getFirstOutputByType, isStorageOutput } from \"@uploadista/core/flow\";\n *\n * const storageOutput = getFirstOutputByType(\n * flowResult.outputs,\n * isStorageOutput\n * );\n *\n * if (storageOutput) {\n * console.log(\"First storage output:\", storageOutput.data.url);\n * } else {\n * console.log(\"No storage outputs found\");\n * }\n * ```\n */\nexport function getFirstOutputByType<TOutput extends TypedOutput>(\n outputs: TypedOutput[],\n typeGuard: (output: TypedOutput) => output is TOutput,\n): TOutput | undefined {\n const filtered = filterOutputsByType(outputs, typeGuard);\n return filtered[0];\n}\n\n/**\n * Get an output by its node ID.\n *\n * This helper finds an output produced by a specific node instance,\n * regardless of its type. Useful when you know the specific node ID\n * you're looking for.\n *\n * @param outputs - Array of typed outputs to search\n * @param nodeId - The node ID to match\n * @returns The output from the specified node, or undefined if not found\n *\n * @example\n * ```typescript\n * import { getOutputByNodeId } from \"@uploadista/core/flow\";\n *\n * const cdnOutput = getOutputByNodeId(flowResult.outputs, \"cdn-storage\");\n * if (cdnOutput) {\n * console.log(\"CDN output:\", cdnOutput.data);\n * }\n * ```\n */\nexport function getOutputByNodeId(\n outputs: TypedOutput[],\n nodeId: string,\n): TypedOutput | undefined {\n return outputs.find((output) => output.nodeId === nodeId);\n}\n\n/**\n * Check if any outputs match a specific type.\n *\n * Simple predicate function to check if at least one output of a given\n * type exists in the results.\n *\n * @template TOutput - The expected narrowed output type\n * @param outputs - Array of typed outputs to check\n * @param typeGuard - Type guard function to use for checking\n * @returns True if at least one output matches the type guard\n *\n * @example\n * ```typescript\n * import { hasOutputOfType, isStorageOutput } from \"@uploadista/core/flow\";\n *\n * if (hasOutputOfType(flowResult.outputs, isStorageOutput)) {\n * console.log(\"Flow produced at least one storage output\");\n * } else {\n * console.log(\"No storage outputs in this flow\");\n * }\n * ```\n */\nexport function hasOutputOfType<TOutput extends TypedOutput>(\n outputs: TypedOutput[],\n typeGuard: (output: TypedOutput) => output is TOutput,\n): boolean {\n return outputs.some(typeGuard);\n}\n\n// ============================================================================\n// Input Operation Type Guards\n// ============================================================================\n\n/**\n * Type guard for init operation (streaming file upload initialization).\n *\n * Checks if the input data is an init operation that starts a streaming\n * file upload session.\n *\n * @param data - Input data to check\n * @returns True if data is an init operation\n *\n * @example\n * ```typescript\n * if (isInitOperation(inputData)) {\n * console.log(\"Storage ID:\", inputData.storageId);\n * console.log(\"Metadata:\", inputData.metadata);\n * }\n * ```\n */\nexport function isInitOperation(\n data: InputData,\n): data is Extract<InputData, { operation: \"init\" }> {\n return data.operation === \"init\";\n}\n\n/**\n * Type guard for finalize operation (complete streaming upload).\n *\n * Checks if the input data is a finalize operation that completes a\n * previously initialized streaming upload.\n *\n * @param data - Input data to check\n * @returns True if data is a finalize operation\n *\n * @example\n * ```typescript\n * if (isFinalizeOperation(inputData)) {\n * console.log(\"Upload ID:\", inputData.uploadId);\n * }\n * ```\n */\nexport function isFinalizeOperation(\n data: InputData,\n): data is Extract<InputData, { operation: \"finalize\" }> {\n return data.operation === \"finalize\";\n}\n\n/**\n * Type guard for URL operation (direct file fetch from URL).\n *\n * Checks if the input data is a URL operation that fetches a file\n * directly from an external URL.\n *\n * @param data - Input data to check\n * @returns True if data is a URL operation\n *\n * @example\n * ```typescript\n * if (isUrlOperation(inputData)) {\n * console.log(\"Fetching from:\", inputData.url);\n * console.log(\"Optional storage:\", inputData.storageId);\n * }\n * ```\n */\nexport function isUrlOperation(\n data: InputData,\n): data is Extract<InputData, { operation: \"url\" }> {\n return data.operation === \"url\";\n}\n\n/**\n * Type guard for upload operations (init or url).\n *\n * Checks if the input data is either an init or URL operation (i.e., operations\n * that trigger new uploads, as opposed to finalize which completes an existing upload).\n *\n * @param data - Input data to check\n * @returns True if data is an init or URL operation\n *\n * @example\n * ```typescript\n * if (isUploadOperation(inputData)) {\n * // This is a new upload, not a finalization\n * if (isInitOperation(inputData)) {\n * console.log(\"Streaming upload\");\n * } else {\n * console.log(\"URL fetch\");\n * }\n * }\n * ```\n */\nexport function isUploadOperation(\n data: InputData,\n): data is Extract<InputData, { operation: \"init\" | \"url\" }> {\n return data.operation === \"init\" || data.operation === \"url\";\n}\n","/**\n * Core Flow Engine implementation using Effect-based DAG execution.\n *\n * This module implements the Flow Engine, which executes directed acyclic graphs (DAGs)\n * of processing nodes. It supports sequential execution with topological sorting,\n * conditional node execution, retry logic, and pausable flows.\n *\n * @module flow\n * @see {@link createFlowWithSchema} for creating new flows\n * @see {@link Flow} for the flow type definition\n */\n\n/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\n\nimport { Effect, Stream } from \"effect\";\nimport { z } from \"zod\";\n\nimport { UploadistaError } from \"../errors\";\nimport { CircuitBreakerStoreService } from \"../types/circuit-breaker-store\";\nimport { UploadFileDataStores } from \"../types/data-store\";\nimport type { UploadFile } from \"../types/upload-file\";\nimport type { CircuitBreakerConfig } from \"./circuit-breaker\";\nimport { DistributedCircuitBreakerRegistry } from \"./distributed-circuit-breaker\";\nimport type { FlowEdge } from \"./edge\";\nimport { EventType } from \"./event\";\nimport { getNodeData } from \"./node\";\nimport { ParallelScheduler } from \"./parallel-scheduler\";\nimport { isUploadFile } from \"./type-guards\";\nimport type {\n FlowCircuitBreakerConfig,\n FlowConfig,\n FlowNode,\n FlowNodeData,\n TypedOutput,\n} from \"./types/flow-types\";\nimport { FlowTypeValidator } from \"./types/type-validator\";\n\n/**\n * Serialized flow data for storage and transport.\n * Contains the minimal information needed to reconstruct a flow.\n *\n * @property id - Unique flow identifier\n * @property name - Human-readable flow name\n * @property nodes - Array of node data (without execution logic)\n * @property edges - Connections between nodes defining data flow\n */\nexport type FlowData = {\n id: string;\n name: string;\n nodes: FlowNodeData[];\n edges: FlowEdge[];\n};\n\n/**\n * Extracts serializable flow data from a Flow instance.\n * Useful for storing flow definitions or sending them over the network.\n *\n * @template TRequirements - Effect requirements for the flow\n * @param flow - Flow instance to extract data from\n * @returns Serializable flow data without execution logic\n *\n * @example\n * ```typescript\n * const flowData = getFlowData(myFlow);\n * // Store in database or send to client\n * await db.flows.save(flowData);\n * ```\n */\nexport const getFlowData = <TRequirements>(\n flow: Flow<any, any, TRequirements>,\n): FlowData => {\n return {\n id: flow.id,\n name: flow.name,\n nodes: flow.nodes.map(getNodeData),\n edges: flow.edges,\n };\n};\n\n/**\n * Result of a flow execution - either completed or paused.\n *\n * @template TOutput - Type of the flow's output data\n *\n * @remarks\n * Flows can pause when a node needs additional data (e.g., waiting for user input\n * or external service). The execution state allows resuming from where it paused.\n *\n * @example\n * ```typescript\n * const result = await Effect.runPromise(flow.run({ inputs, storageId, jobId }));\n *\n * if (result.type === \"completed\") {\n * console.log(\"Flow completed:\", result.result);\n * } else {\n * console.log(\"Flow paused at node:\", result.nodeId);\n * // Can resume later with: flow.resume({ jobId, executionState: result.executionState, ... })\n * }\n * ```\n */\nexport type FlowExecutionResult<TOutput> =\n | {\n type: \"completed\";\n result: TOutput;\n outputs?: TypedOutput[]; // Typed outputs from all output nodes with registered types\n }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, unknown>;\n };\n };\n\n/**\n * A Flow represents a directed acyclic graph (DAG) of processing nodes.\n *\n * Flows execute nodes in topological order, passing data between nodes through edges.\n * They support conditional execution, retry logic, pausable nodes, and event emission.\n *\n * @template TFlowInputSchema - Zod schema defining the shape of input data\n * @template TFlowOutputSchema - Zod schema defining the shape of output data\n * @template TRequirements - Effect requirements (services/contexts) needed by nodes\n *\n * @property id - Unique flow identifier\n * @property name - Human-readable flow name\n * @property nodes - Array of nodes in the flow\n * @property edges - Connections between nodes\n * @property inputSchema - Zod schema for validating flow inputs\n * @property outputSchema - Zod schema for validating flow outputs\n * @property onEvent - Optional callback for flow execution events\n * @property run - Executes the flow from the beginning\n * @property resume - Resumes a paused flow execution\n * @property validateTypes - Validates node type compatibility\n * @property validateInputs - Validates input data against schema\n * @property validateOutputs - Validates output data against schema\n *\n * @remarks\n * Flows are created using {@link createFlowWithSchema}. The Effect-based design\n * allows for composable error handling, resource management, and dependency injection.\n *\n * @example\n * ```typescript\n * const flow = yield* createFlowWithSchema({\n * flowId: \"image-pipeline\",\n * name: \"Image Processing Pipeline\",\n * nodes: [inputNode, resizeNode, optimizeNode, storageNode],\n * edges: [\n * { source: \"input\", target: \"resize\" },\n * { source: \"resize\", target: \"optimize\" },\n * { source: \"optimize\", target: \"storage\" }\n * ],\n * inputSchema: z.object({ file: z.instanceof(File) }),\n * outputSchema: uploadFileSchema\n * });\n *\n * const result = yield* flow.run({\n * inputs: { input: { file: myFile } },\n * storageId: \"storage-1\",\n * jobId: \"job-123\"\n * });\n * ```\n */\nexport type Flow<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n> = {\n id: string;\n name: string;\n nodes: FlowNode<any, any, UploadistaError>[];\n edges: FlowEdge[];\n inputSchema: TFlowInputSchema;\n outputSchema: TFlowOutputSchema;\n onEvent?: FlowConfig<\n TFlowInputSchema,\n TFlowOutputSchema,\n TRequirements\n >[\"onEvent\"];\n checkJobStatus?: FlowConfig<\n TFlowInputSchema,\n TFlowOutputSchema,\n TRequirements\n >[\"checkJobStatus\"];\n hooks?: FlowConfig<\n TFlowInputSchema,\n TFlowOutputSchema,\n TRequirements\n >[\"hooks\"];\n run: (args: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n clientId: string | null;\n }) => Effect.Effect<\n FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>,\n UploadistaError,\n TRequirements | UploadFileDataStores\n >;\n resume: (args: {\n jobId: string;\n storageId: string;\n nodeResults: Record<string, unknown>; // Reconstructed from tasks\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n clientId: string | null;\n }) => Effect.Effect<\n FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>,\n UploadistaError,\n TRequirements | UploadFileDataStores\n >;\n validateTypes: () => { isValid: boolean; errors: string[] };\n validateInputs: (inputs: unknown) => { isValid: boolean; errors: string[] };\n validateOutputs: (outputs: unknown) => { isValid: boolean; errors: string[] };\n};\n\n/**\n * Creates a new Flow with Zod schema-based type validation.\n *\n * This is the primary way to create flows in Uploadista. It constructs a Flow\n * instance that validates inputs/outputs, executes nodes in topological order,\n * handles errors with retries, and emits events during execution.\n *\n * @template TFlowInputSchema - Zod schema for flow input validation\n * @template TFlowOutputSchema - Zod schema for flow output validation\n * @template TRequirements - Effect requirements/services needed by the flow\n * @template TNodeError - Union of possible errors from nodes\n * @template TNodeRequirements - Union of requirements from nodes\n *\n * @param config - Flow configuration object\n * @param config.flowId - Unique identifier for the flow\n * @param config.name - Human-readable flow name\n * @param config.nodes - Array of nodes (can be plain nodes or Effects resolving to nodes)\n * @param config.edges - Array of edges connecting nodes\n * @param config.inputSchema - Zod schema for validating inputs\n * @param config.outputSchema - Zod schema for validating outputs\n * @param config.typeChecker - Optional custom type compatibility checker\n * @param config.onEvent - Optional event callback for monitoring execution\n *\n * @returns Effect that resolves to a Flow instance\n *\n * @throws {UploadistaError} FLOW_CYCLE_ERROR if the graph contains cycles\n * @throws {UploadistaError} FLOW_NODE_NOT_FOUND if a node is referenced but missing\n * @throws {UploadistaError} FLOW_NODE_ERROR if node execution fails\n * @throws {UploadistaError} FLOW_OUTPUT_VALIDATION_ERROR if outputs don't match schema\n *\n * @remarks\n * - Nodes can be provided as plain objects or as Effects that resolve to nodes\n * - The flow performs topological sorting to determine execution order\n * - Conditional nodes are evaluated before execution\n * - Nodes can specify retry configuration with exponential backoff\n * - Pausable nodes can halt execution and resume later\n *\n * @example\n * ```typescript\n * const flow = yield* createFlowWithSchema({\n * flowId: \"image-upload\",\n * name: \"Image Upload with Processing\",\n * nodes: [\n * inputNode,\n * yield* createResizeNode({ width: 1920, height: 1080 }),\n * optimizeNode,\n * storageNode\n * ],\n * edges: [\n * { source: \"input\", target: \"resize\" },\n * { source: \"resize\", target: \"optimize\" },\n * { source: \"optimize\", target: \"storage\" }\n * ],\n * inputSchema: z.object({\n * file: z.instanceof(File),\n * metadata: z.record(z.string(), z.any()).optional()\n * }),\n * outputSchema: uploadFileSchema,\n * onEvent: (event) => Effect.gen(function* () {\n * console.log(\"Flow event:\", event);\n * return { eventId: event.jobId };\n * })\n * });\n * ```\n *\n * @see {@link Flow} for the returned flow type\n * @see {@link FlowConfig} for configuration options\n */\nexport function createFlowWithSchema<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements = never,\n TNodeError = never,\n TNodeRequirements = never,\n>(\n config: FlowConfig<\n TFlowInputSchema,\n TFlowOutputSchema,\n TNodeError,\n TNodeRequirements\n >,\n): Effect.Effect<\n Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements>,\n TNodeError,\n TNodeRequirements\n> {\n return Effect.gen(function* () {\n // Resolve nodes - handle mixed arrays of pure nodes and Effect nodes\n const resolvedNodes: Array<FlowNode<any, any, UploadistaError>> =\n yield* Effect.all(\n config.nodes.map((node) =>\n Effect.isEffect(node)\n ? (node as Effect.Effect<\n FlowNode<any, any, UploadistaError>,\n TNodeError,\n TNodeRequirements\n >)\n : Effect.succeed(node as FlowNode<any, any, UploadistaError>),\n ),\n );\n\n const {\n flowId,\n name,\n onEvent,\n checkJobStatus,\n edges,\n inputSchema,\n outputSchema,\n typeChecker,\n circuitBreaker: circuitBreakerConfig,\n } = config;\n const nodes = resolvedNodes;\n const typeValidator = new FlowTypeValidator(typeChecker);\n\n /**\n * Gets the circuit breaker config for a specific node.\n * Priority: node config > flow nodeTypeOverrides > flow defaults\n */\n const getCircuitBreakerConfigForNode = (\n node: FlowNode<any, any, UploadistaError>,\n ): CircuitBreakerConfig | undefined => {\n // Get node-level config from the resolved node\n const nodeConfig = node.circuitBreaker as\n | FlowCircuitBreakerConfig\n | undefined;\n\n // Get flow-level config for this node type (using nodeTypeId for stable identification)\n const flowNodeTypeConfig = node.nodeTypeId\n ? circuitBreakerConfig?.nodeTypeOverrides?.[node.nodeTypeId]\n : undefined;\n\n // Get flow defaults\n const flowDefaults = circuitBreakerConfig?.defaults;\n\n // If nothing is configured, return undefined (circuit breaker disabled)\n if (!nodeConfig && !flowNodeTypeConfig && !flowDefaults) {\n return undefined;\n }\n\n // Merge configs with priority: node > nodeTypeOverrides > defaults\n return {\n ...flowDefaults,\n ...flowNodeTypeConfig,\n ...nodeConfig,\n } as CircuitBreakerConfig;\n };\n\n // Build adjacency list for topological sorting\n const buildGraph = () => {\n const graph: Record<string, string[]> = {};\n const inDegree: Record<string, number> = {};\n const reverseGraph: Record<string, string[]> = {};\n\n // Initialize\n nodes.forEach((node: any) => {\n graph[node.id] = [];\n reverseGraph[node.id] = [];\n inDegree[node.id] = 0;\n });\n\n // Build edges\n edges.forEach((edge: any) => {\n graph[edge.source]?.push(edge.target);\n reverseGraph[edge.target]?.push(edge.source);\n inDegree[edge.target] = (inDegree[edge.target] || 0) + 1;\n });\n\n return { graph, reverseGraph, inDegree };\n };\n\n // Topological sort to determine execution order\n const topologicalSort = () => {\n const { graph, inDegree } = buildGraph();\n const queue: string[] = [];\n const result: string[] = [];\n\n // Add nodes with no incoming edges\n Object.keys(inDegree).forEach((nodeId) => {\n if (inDegree[nodeId] === 0) {\n queue.push(nodeId);\n }\n });\n\n while (queue.length > 0) {\n const current = queue.shift();\n if (!current) {\n throw new Error(\"No current node found\");\n }\n result.push(current);\n\n graph[current]?.forEach((neighbor: any) => {\n inDegree[neighbor] = (inDegree[neighbor] || 0) - 1;\n if (inDegree[neighbor] === 0) {\n queue.push(neighbor);\n }\n });\n }\n\n return result;\n };\n\n // Evaluate condition for conditional nodes using Effect\n const evaluateCondition = (\n node: FlowNode<any, any, UploadistaError>,\n data: unknown,\n ): Effect.Effect<boolean, never> => {\n if (!node.condition) return Effect.succeed(true);\n\n const { field, operator, value } = node.condition;\n const dataRecord = data as Record<string, unknown>;\n const metadata = dataRecord?.metadata as\n | Record<string, unknown>\n | undefined;\n const fieldValue = metadata?.[field] || dataRecord?.[field];\n\n const result = (() => {\n switch (operator) {\n case \"equals\":\n return fieldValue === value;\n case \"notEquals\":\n return fieldValue !== value;\n case \"greaterThan\":\n return Number(fieldValue) > Number(value);\n case \"lessThan\":\n return Number(fieldValue) < Number(value);\n case \"contains\":\n return String(fieldValue).includes(String(value));\n case \"startsWith\":\n return String(fieldValue).startsWith(String(value));\n default:\n return true;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n // Get all inputs for a node\n const getNodeInputs = (\n nodeId: string,\n nodeResults: Map<string, unknown>,\n ) => {\n const { reverseGraph } = buildGraph();\n const incomingNodes = reverseGraph[nodeId] || [];\n const inputs: Record<string, unknown> = {};\n\n incomingNodes.forEach((sourceNodeId: any) => {\n const result = nodeResults.get(sourceNodeId);\n if (result !== undefined) {\n inputs[sourceNodeId] = result;\n }\n });\n\n return inputs;\n };\n\n // Map flow inputs to input nodes\n const mapFlowInputsToNodes = (\n flowInputs: Record<string, z.infer<TFlowInputSchema>>,\n ) => {\n const inputNodes = nodes.filter((node: any) => node.type === \"input\");\n const mappedInputs: Record<string, z.infer<TFlowInputSchema>> = {};\n\n inputNodes.forEach((node: any) => {\n if (\n flowInputs &&\n typeof flowInputs === \"object\" &&\n node.id in flowInputs\n ) {\n mappedInputs[node.id] = inputSchema.parse(flowInputs[node.id]);\n }\n });\n\n return mappedInputs;\n };\n\n // Utility to detect sink nodes (nodes with no outgoing edges)\n const isSink = (nodeId: string): boolean => {\n return !edges.some((edge) => edge.source === nodeId);\n };\n\n // Utility to check if a node should be included in outputs\n // Includes both sink nodes (topology-based) and nodes with keepOutput flag\n const shouldIncludeInOutputs = (nodeId: string): boolean => {\n const node = nodes.find((n: any) => n.id === nodeId);\n return isSink(nodeId) || node?.keepOutput === true;\n };\n\n // Collect outputs from sink nodes (nodes with no outgoing edges) and nodes with keepOutput\n const collectFlowOutputs = (\n nodeResults: Map<string, unknown>,\n ): Record<string, z.infer<TFlowInputSchema>> => {\n const outputNodes = nodes.filter((node: any) =>\n shouldIncludeInOutputs(node.id),\n );\n const flowOutputs: Record<string, unknown> = {};\n\n outputNodes.forEach((node: any) => {\n const result = nodeResults.get(node.id);\n if (result !== undefined) {\n flowOutputs[node.id] = result;\n }\n });\n\n return flowOutputs as Record<string, z.infer<TFlowInputSchema>>;\n };\n\n // Collect typed outputs from sink nodes and keepOutput nodes with metadata\n const collectTypedOutputs = (\n nodeResults: Map<string, unknown>,\n nodeTypesMap: Map<string, string>,\n ): TypedOutput[] => {\n const outputNodes = nodes.filter((node: any) =>\n shouldIncludeInOutputs(node.id),\n );\n const typedOutputs: TypedOutput[] = [];\n\n outputNodes.forEach((node: any) => {\n const result = nodeResults.get(node.id);\n if (result !== undefined) {\n // Get the outputTypeId from the node types map (set from node execution results)\n const outputTypeId = nodeTypesMap.get(node.id);\n\n // Create TypedOutput with metadata\n typedOutputs.push({\n nodeId: node.id,\n nodeType: outputTypeId,\n data: result,\n timestamp: new Date().toISOString(),\n });\n }\n });\n\n return typedOutputs;\n };\n\n // Transfer an UploadFile from one storage to another\n const transferFileToTargetStorage = (\n file: UploadFile,\n targetStorageId: string,\n clientId: string | null,\n ): Effect.Effect<UploadFile, UploadistaError, UploadFileDataStores> => {\n return Effect.gen(function* () {\n // If file is already in target storage, no transfer needed\n if (file.storage.id === targetStorageId) {\n return file;\n }\n\n // Get source and target data stores\n const dataStores = yield* UploadFileDataStores;\n const sourceDataStore = yield* dataStores.getDataStore(\n file.storage.id,\n clientId,\n );\n const targetDataStore = yield* dataStores.getDataStore(\n targetStorageId,\n clientId,\n );\n\n // Read file from source storage\n const fileData = yield* sourceDataStore.read(file.id);\n\n // Create stream from file data\n const dataStream = Stream.make(fileData);\n\n // Create new file record in target storage\n const transferredFile: UploadFile = {\n ...file,\n storage: {\n id: targetStorageId,\n type: file.storage.type, // Keep same type for now\n },\n };\n\n const createdFile = yield* targetDataStore.create(transferredFile);\n\n // Write file data to target storage\n yield* targetDataStore.write(\n {\n file_id: createdFile.id,\n stream: dataStream,\n offset: 0,\n },\n {},\n );\n\n return createdFile;\n });\n };\n\n // Execute a single node using Effect\n const executeNode = (\n nodeId: string,\n storageId: string,\n nodeInputs: Record<string, z.infer<TFlowInputSchema>>,\n nodeResults: Map<string, unknown>,\n nodeMap: Map<string, FlowNode<any, any, UploadistaError>>,\n jobId: string,\n clientId: string | null,\n circuitBreakerRegistry: DistributedCircuitBreakerRegistry | null,\n ): Effect.Effect<\n {\n nodeId: string;\n result: unknown;\n success: boolean;\n waiting: boolean;\n nodeType?: string;\n },\n UploadistaError,\n UploadFileDataStores\n > => {\n return Effect.gen(function* () {\n const node = nodeMap.get(nodeId);\n if (!node) {\n return yield* UploadistaError.fromCode(\n \"FLOW_NODE_NOT_FOUND\",\n ).toEffect();\n }\n\n // Check job status before executing node\n if (checkJobStatus) {\n const status = yield* checkJobStatus(jobId);\n if (status === \"paused\") {\n // Flow was paused by user - stop execution gracefully\n return yield* UploadistaError.fromCode(\"FLOW_PAUSED\", {\n cause: `Flow ${flowId} was paused by user at job ${jobId}`,\n }).toEffect();\n }\n if (status === \"cancelled\") {\n // Flow was cancelled by user - stop execution\n return yield* UploadistaError.fromCode(\"FLOW_CANCELLED\", {\n cause: `Flow ${flowId} was cancelled by user at job ${jobId}`,\n }).toEffect();\n }\n }\n\n // Emit NodeStart event if provided\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeStart,\n nodeName: node.name,\n nodeType: node.type,\n });\n }\n\n // Get retry configuration\n const maxRetries = node.retry?.maxRetries ?? 0;\n const baseDelay = node.retry?.retryDelay ?? 1000;\n const useExponentialBackoff = node.retry?.exponentialBackoff ?? true;\n\n // Get circuit breaker configuration for this node\n const cbConfig = getCircuitBreakerConfigForNode(node);\n const circuitBreaker =\n cbConfig?.enabled && node.nodeTypeId && circuitBreakerRegistry\n ? circuitBreakerRegistry.getOrCreate(node.nodeTypeId, cbConfig)\n : null;\n\n // Check circuit breaker before attempting execution\n if (circuitBreaker) {\n const {\n allowed,\n state: cbState,\n failureCount: cbFailureCount,\n } = yield* circuitBreaker.allowRequest();\n\n if (!allowed) {\n const fallback = circuitBreaker.getFallback();\n\n yield* Effect.logWarning(\n `Circuit breaker OPEN for node type \"${node.nodeTypeId}\" - applying fallback`,\n );\n\n // Handle fallback based on configuration\n if (fallback.type === \"skip\") {\n // Skip the node but continue flow execution\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n });\n }\n\n // For skip fallback, we need to pass through some value\n // Get the first input as pass-through data\n const passThruInput = nodeInputs[nodeId];\n return {\n nodeId,\n result: passThruInput,\n success: true,\n waiting: false,\n };\n }\n\n if (fallback.type === \"default\") {\n // Return configured default value\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n result: fallback.value,\n });\n }\n return {\n nodeId,\n result: fallback.value,\n success: true,\n waiting: false,\n };\n }\n\n // Default: fail immediately\n return yield* UploadistaError.fromCode(\"CIRCUIT_BREAKER_OPEN\", {\n body: `Circuit breaker is open for node type \"${node.name}\"`,\n details: {\n nodeType: node.name,\n nodeId,\n state: cbState,\n failureCount: cbFailureCount,\n },\n }).toEffect();\n }\n }\n\n let retryCount = 0;\n let lastError: UploadistaError | null = null;\n\n // Retry loop\n while (retryCount <= maxRetries) {\n try {\n // Prepare input data for the node\n let nodeInput: unknown;\n let nodeInputsForExecution: Record<string, unknown> = {};\n\n if (node.type === \"input\") {\n // For input nodes, use the mapped flow input\n nodeInput = nodeInputs[nodeId];\n if (nodeInput === undefined) {\n yield* Effect.logError(\n `Input node ${nodeId} has no input data`,\n );\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Input node ${nodeId} has no input data`),\n }).toEffect();\n }\n } else {\n // Get all inputs for the node\n nodeInputsForExecution = getNodeInputs(nodeId, nodeResults);\n\n if (Object.keys(nodeInputsForExecution).length === 0) {\n yield* Effect.logError(`Node ${nodeId} has no input data`);\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Node ${nodeId} has no input data`),\n }).toEffect();\n }\n\n // For single input nodes, use the first input\n if (!node.multiInput) {\n const firstInputKey = Object.keys(nodeInputsForExecution)[0];\n if (!firstInputKey) {\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Node ${nodeId} has no input data`),\n }).toEffect();\n }\n nodeInput = nodeInputsForExecution[firstInputKey];\n } else {\n // For multi-input nodes, pass all inputs\n nodeInput = nodeInputsForExecution;\n }\n }\n\n // Check condition for conditional nodes\n if (node.type === \"conditional\") {\n const conditionResult = yield* evaluateCondition(node, nodeInput);\n if (!conditionResult) {\n // Skip this node - return success but no result\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n });\n }\n return {\n nodeId,\n result: nodeInput,\n success: true,\n waiting: false,\n };\n }\n }\n\n // Execute the node\n const executionResult = yield* node.run({\n data: nodeInput,\n inputs: nodeInputsForExecution,\n jobId,\n flowId,\n storageId,\n clientId,\n });\n\n // Handle execution result\n if (executionResult.type === \"waiting\") {\n // Node is waiting for more data - pause execution\n const result = executionResult.partialData;\n\n // Emit NodePause event with partial data result\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodePause,\n nodeName: node.name,\n partialData: result,\n });\n }\n\n return {\n nodeId,\n result,\n success: true,\n waiting: true,\n nodeType: executionResult.nodeType,\n };\n }\n\n // Node completed successfully\n let result = executionResult.data;\n\n // Auto-persistence and hooks for sink nodes and nodes with keepOutput\n if (shouldIncludeInOutputs(nodeId)) {\n // If result is an UploadFile, transfer to target storage if needed\n if (isUploadFile(result) && result.storage.id !== storageId) {\n yield* Effect.logDebug(\n `Auto-persisting output node ${nodeId} output from ${result.storage.id} to ${storageId}`,\n );\n result = yield* transferFileToTargetStorage(\n result,\n storageId,\n clientId,\n );\n }\n\n // Call onNodeOutput hook if provided (for all sink outputs)\n if (config.hooks?.onNodeOutput) {\n yield* Effect.logDebug(\n `Calling onNodeOutput hook for sink node ${nodeId}`,\n );\n const hookResult = config.hooks.onNodeOutput({\n output: result,\n nodeId,\n flowId,\n jobId,\n storageId,\n clientId,\n });\n\n // Support both Effect and Promise\n result = yield* (Effect.isEffect(hookResult)\n ? hookResult\n : Effect.promise(() => hookResult as Promise<unknown>));\n }\n }\n\n // Record success with circuit breaker\n if (circuitBreaker) {\n yield* circuitBreaker.recordSuccess();\n }\n\n // Emit NodeEnd event with result\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n result,\n });\n }\n\n return {\n nodeId,\n result,\n success: true,\n waiting: false,\n nodeType: executionResult.nodeType,\n };\n } catch (error) {\n // Store the error\n lastError =\n error instanceof UploadistaError\n ? error\n : UploadistaError.fromCode(\"FLOW_NODE_ERROR\", { cause: error });\n\n // Record failure with circuit breaker (on each retry attempt)\n if (circuitBreaker) {\n yield* circuitBreaker.recordFailure(lastError.body);\n }\n\n // Check if we should retry\n if (retryCount < maxRetries) {\n retryCount++;\n\n // Calculate delay with exponential backoff if enabled\n const delay = useExponentialBackoff\n ? baseDelay * 2 ** (retryCount - 1)\n : baseDelay;\n\n // Log retry attempt\n yield* Effect.logWarning(\n `Node ${nodeId} (${node.name}) failed, retrying (${retryCount}/${maxRetries}) after ${delay}ms`,\n );\n\n // Wait before retrying\n yield* Effect.sleep(delay);\n\n // Continue to next iteration of retry loop\n continue;\n }\n\n // No more retries - emit final error event\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeError,\n nodeName: node.name,\n error: lastError.body,\n retryCount,\n });\n }\n\n return yield* lastError.toEffect();\n }\n }\n\n // If we get here, all retries failed\n if (lastError) {\n return yield* lastError.toEffect();\n }\n\n // Should never reach here\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(\"Unexpected error in retry loop\"),\n }).toEffect();\n }).pipe(\n // Wrap node execution in a span for distributed tracing\n // Note: We get node info from the nodeMap since we're outside the Effect.gen scope\n // Use nodeTypeId for more descriptive span names (e.g., \"optimize-image\" vs \"optimize\")\n (() => {\n const node = nodeMap.get(nodeId);\n const spanName = node?.nodeTypeId ?? node?.type ?? \"unknown\";\n return Effect.withSpan(`node-${spanName}`, {\n attributes: {\n \"node.id\": nodeId,\n \"node.type\": node?.type ?? \"unknown\",\n \"node.type_id\": node?.nodeTypeId ?? \"unknown\",\n \"node.name\": node?.name ?? \"unknown\",\n \"flow.id\": flowId,\n \"flow.job_id\": jobId,\n },\n });\n })(),\n );\n };\n\n // Internal execution function that can start fresh or resume\n const executeFlow = ({\n inputs,\n storageId,\n jobId,\n resumeFrom,\n clientId,\n }: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n resumeFrom?: {\n executionOrder: string[];\n nodeResults: Map<string, unknown>;\n currentIndex: number;\n };\n clientId: string | null;\n }): Effect.Effect<\n | {\n type: \"completed\";\n result: Record<string, z.infer<TFlowOutputSchema>>;\n outputs?: TypedOutput[];\n }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n UploadFileDataStores\n > => {\n return Effect.gen(function* () {\n // Get circuit breaker store from context (optional - if not provided, circuit breakers are disabled)\n const circuitBreakerStore = yield* Effect.serviceOption(\n CircuitBreakerStoreService,\n );\n const circuitBreakerRegistry = circuitBreakerStore._tag === \"Some\"\n ? new DistributedCircuitBreakerRegistry(circuitBreakerStore.value)\n : null;\n\n // Emit FlowStart event only if starting fresh\n if (!resumeFrom && onEvent) {\n yield* onEvent({\n jobId,\n eventType: EventType.FlowStart,\n flowId,\n });\n }\n\n // Map flow inputs to input nodes\n const nodeInputs = mapFlowInputsToNodes(inputs || {});\n\n // Get execution order and results - either fresh or from resume state\n let executionOrder: string[];\n let nodeResults: Map<string, unknown>;\n let startIndex: number;\n\n if (resumeFrom) {\n // Resume from saved state\n executionOrder = resumeFrom.executionOrder;\n nodeResults = resumeFrom.nodeResults;\n startIndex = resumeFrom.currentIndex;\n } else {\n // Start fresh\n executionOrder = topologicalSort();\n nodeResults = new Map<string, unknown>();\n startIndex = 0;\n }\n\n // Track nodeTypes for typed outputs\n const nodeTypes = new Map<string, string>();\n\n // If resuming, restore any nodeTypes from previous execution\n if (resumeFrom) {\n // nodeTypes would need to be restored from job state if implementing pause/resume\n // For now, fresh starts only track types going forward\n }\n\n // Check for cycles\n if (executionOrder.length !== nodes.length) {\n return yield* UploadistaError.fromCode(\"FLOW_CYCLE_ERROR\").toEffect();\n }\n\n // Create node map for quick lookup\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n\n // Determine execution strategy\n const useParallelExecution = config.parallelExecution?.enabled ?? false;\n\n if (useParallelExecution) {\n // Parallel execution using execution levels\n yield* Effect.logDebug(\n `Flow ${flowId}: Executing in parallel mode (maxConcurrency: ${config.parallelExecution?.maxConcurrency ?? 4})`,\n );\n\n const scheduler = new ParallelScheduler({\n maxConcurrency: config.parallelExecution?.maxConcurrency ?? 4,\n });\n\n // Get execution levels\n const executionLevels = scheduler.groupNodesByExecutionLevel(\n nodes,\n edges,\n );\n\n yield* Effect.logDebug(\n `Flow ${flowId}: Grouped nodes into ${executionLevels.length} execution levels`,\n );\n\n // Build reverse graph for dependency checking\n const reverseGraph: Record<string, string[]> = {};\n nodes.forEach((node) => {\n reverseGraph[node.id] = [];\n });\n edges.forEach((edge) => {\n reverseGraph[edge.target]?.push(edge.source);\n });\n\n // Execute each level sequentially, but nodes within level in parallel\n for (const level of executionLevels) {\n yield* Effect.logDebug(\n `Flow ${flowId}: Executing level ${level.level} with nodes: ${level.nodes.join(\", \")}`,\n );\n\n // Create executor functions for all nodes in this level\n const nodeExecutors = level.nodes.map(\n (nodeId) => () =>\n Effect.gen(function* () {\n // Emit NodeResume event if we're resuming from a paused state at this node\n if (\n resumeFrom &&\n nodeId === resumeFrom.executionOrder[startIndex] &&\n onEvent\n ) {\n const node = nodeMap.get(nodeId);\n if (node) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeResume,\n nodeName: node.name,\n nodeType: node.type,\n });\n }\n }\n\n const nodeResult = yield* executeNode(\n nodeId,\n storageId,\n nodeInputs,\n nodeResults,\n nodeMap,\n jobId,\n clientId,\n circuitBreakerRegistry,\n );\n\n return { nodeId, nodeResult };\n }),\n );\n\n // Execute all nodes in this level in parallel\n const levelResults =\n yield* scheduler.executeNodesInParallel(nodeExecutors);\n\n // Process results and check for waiting nodes\n for (const { nodeId, nodeResult } of levelResults) {\n if (nodeResult.waiting) {\n // Node is waiting - pause execution and return state\n if (nodeResult.result !== undefined) {\n nodeResults.set(nodeId, nodeResult.result);\n if (nodeResult.nodeType) {\n nodeTypes.set(nodeId, nodeResult.nodeType);\n }\n }\n\n return {\n type: \"paused\" as const,\n nodeId,\n executionState: {\n executionOrder,\n currentIndex: executionOrder.indexOf(nodeId),\n inputs: nodeInputs,\n },\n };\n }\n\n if (nodeResult.success) {\n nodeResults.set(nodeId, nodeResult.result);\n if (nodeResult.nodeType) {\n nodeTypes.set(nodeId, nodeResult.nodeType);\n }\n }\n }\n }\n } else {\n // Sequential execution (original behavior)\n yield* Effect.logDebug(\n `Flow ${flowId}: Executing in sequential mode`,\n );\n\n for (let i = startIndex; i < executionOrder.length; i++) {\n const nodeId = executionOrder[i];\n if (!nodeId) {\n return yield* UploadistaError.fromCode(\n \"FLOW_NODE_NOT_FOUND\",\n ).toEffect();\n }\n\n // Emit NodeResume event if we're resuming from a paused state at this node\n if (resumeFrom && i === startIndex && onEvent) {\n const node = nodeMap.get(nodeId);\n if (node) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeResume,\n nodeName: node.name,\n nodeType: node.type,\n });\n }\n }\n\n const nodeResult = yield* executeNode(\n nodeId,\n storageId,\n nodeInputs,\n nodeResults,\n nodeMap,\n jobId,\n clientId,\n circuitBreakerRegistry,\n );\n\n if (nodeResult.waiting) {\n // Node is waiting - pause execution and return state\n if (nodeResult.result !== undefined) {\n nodeResults.set(nodeResult.nodeId, nodeResult.result);\n if (nodeResult.nodeType) {\n nodeTypes.set(nodeResult.nodeId, nodeResult.nodeType);\n }\n }\n\n return {\n type: \"paused\" as const,\n nodeId: nodeResult.nodeId,\n executionState: {\n executionOrder,\n currentIndex: i, // Stay at current index to re-execute this node on resume\n inputs: nodeInputs,\n },\n };\n }\n\n if (nodeResult.success) {\n nodeResults.set(nodeResult.nodeId, nodeResult.result);\n if (nodeResult.nodeType) {\n nodeTypes.set(nodeResult.nodeId, nodeResult.nodeType);\n }\n }\n }\n }\n\n // All nodes completed - collect outputs\n const finalResult = collectFlowOutputs(nodeResults);\n const typedOutputs = collectTypedOutputs(nodeResults, nodeTypes);\n\n const finalResultSchema = z.record(z.string(), outputSchema);\n\n // Validate the final result against the output schema\n const parseResult = finalResultSchema.safeParse(finalResult);\n if (!parseResult.success) {\n const validationError = `Flow output validation failed: ${parseResult.error.message}. Expected outputs: ${JSON.stringify(Object.keys(collectFlowOutputs(nodeResults)))}. Output nodes (sinks + keepOutput): ${nodes\n .filter((n: any) => shouldIncludeInOutputs(n.id))\n .map((n: any) => n.id)\n .join(\", \")}`;\n\n // Emit FlowError event for validation failure\n if (onEvent) {\n yield* onEvent({\n jobId,\n eventType: EventType.FlowError,\n flowId,\n error: validationError,\n });\n }\n return yield* UploadistaError.fromCode(\n \"FLOW_OUTPUT_VALIDATION_ERROR\",\n {\n body: validationError,\n cause: parseResult.error,\n },\n ).toEffect();\n }\n const validatedResult = parseResult.data;\n\n // Emit FlowEnd event with typed outputs\n if (onEvent) {\n yield* onEvent({\n jobId,\n eventType: EventType.FlowEnd,\n flowId,\n outputs: typedOutputs,\n result: validatedResult, // Keep for backward compatibility\n });\n }\n\n return {\n type: \"completed\" as const,\n result: validatedResult,\n outputs: typedOutputs,\n };\n });\n };\n\n const run = ({\n inputs,\n storageId,\n jobId,\n clientId,\n }: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n clientId: string | null;\n }): Effect.Effect<\n | {\n type: \"completed\";\n result: Record<string, z.infer<TFlowOutputSchema>>;\n outputs?: TypedOutput[];\n }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n TRequirements | UploadFileDataStores\n > => {\n return executeFlow({ inputs, storageId, jobId, clientId });\n };\n\n const resume = ({\n jobId,\n storageId,\n nodeResults,\n executionState,\n clientId,\n }: {\n jobId: string;\n storageId: string;\n nodeResults: Record<string, unknown>;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n clientId: string | null;\n }): Effect.Effect<\n | {\n type: \"completed\";\n result: Record<string, z.infer<TFlowOutputSchema>>;\n outputs?: TypedOutput[];\n }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n TRequirements | UploadFileDataStores\n > => {\n return executeFlow({\n inputs: executionState.inputs,\n storageId,\n jobId,\n resumeFrom: {\n executionOrder: executionState.executionOrder,\n nodeResults: new Map(Object.entries(nodeResults)),\n currentIndex: executionState.currentIndex,\n },\n clientId,\n });\n };\n\n const validateTypes = () => {\n // Convert FlowNode to FlowNode for validation\n const compatibleNodes = nodes as FlowNode<any, any>[];\n return typeValidator.validateFlow(compatibleNodes, edges);\n };\n\n const validateInputs = (inputs: unknown) => {\n return typeValidator.validateData(inputs, inputSchema);\n };\n\n const validateOutputs = (outputs: unknown) => {\n return typeValidator.validateData(outputs, outputSchema);\n };\n\n return {\n id: flowId,\n name,\n nodes,\n edges,\n inputSchema,\n outputSchema,\n onEvent,\n checkJobStatus,\n hooks: config.hooks,\n run,\n resume,\n validateTypes,\n validateInputs,\n validateOutputs,\n };\n });\n}\n","import { Context, Effect, Layer, Option, Runtime, Tracer } from \"effect\";\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\nimport {\n createFlowWithSchema,\n EventType,\n type Flow,\n type FlowData,\n type FlowExecutionResult,\n getFlowData,\n runArgsSchema,\n type TypedOutput,\n} from \"../flow\";\nimport type {\n EventEmitter,\n KvStore,\n UploadFile,\n WebSocketConnection,\n} from \"../types\";\nimport type { FlowJobTraceContext } from \"./types/flow-job\";\n\n/**\n * WaitUntil callback type for keeping background tasks alive.\n * Used in serverless environments like Cloudflare Workers to prevent\n * premature termination of background operations.\n *\n * @param promise - Promise representing the background task to keep alive\n */\nexport type WaitUntilCallback = (promise: Promise<unknown>) => void;\n\n/**\n * Optional WaitUntil service for background task management.\n * When provided, allows flows to execute beyond the HTTP response lifecycle.\n *\n * In Cloudflare Workers, use `ctx.executionCtx.waitUntil()`.\n * In other environments, this can be undefined (flows execute normally with Effect.fork).\n *\n * This service uses Effect's optional service pattern. Access it via:\n * ```typescript\n * const waitUntil = yield* FlowWaitUntil.optional;\n * if (Option.isSome(waitUntil)) {\n * // Use waitUntil.value\n * }\n * ```\n *\n * @see https://effect.website/docs/requirements-management/services/#optional-services\n */\nexport class FlowWaitUntil extends Context.Tag(\"FlowWaitUntil\")<\n FlowWaitUntil,\n WaitUntilCallback\n>() {\n static optional = Effect.serviceOption(FlowWaitUntil);\n}\n\nimport { FlowEventEmitter, FlowJobKVStore } from \"../types\";\nimport { UploadEngine } from \"../upload\";\nimport { DeadLetterQueueService } from \"./dead-letter-queue\";\nimport type { FlowEvent } from \"./event\";\nimport type { FlowJob } from \"./types/flow-job\";\n\n/**\n * Flow provider interface that applications must implement.\n *\n * This interface defines how the FlowEngine retrieves flow definitions.\n * Applications provide their own implementation to load flows from a database,\n * configuration files, or any other source.\n *\n * @template TRequirements - Additional Effect requirements for flow execution\n *\n * @property getFlow - Retrieves a flow definition by ID with authorization check\n *\n * @example\n * ```typescript\n * // Implement a flow provider from database\n * const dbFlowProvider: FlowProviderShape = {\n * getFlow: (flowId, clientId) => Effect.gen(function* () {\n * // Load flow from database\n * const flowData = yield* db.getFlow(flowId);\n *\n * // Check authorization\n * if (flowData.ownerId !== clientId) {\n * return yield* Effect.fail(\n * UploadistaError.fromCode(\"FLOW_NOT_AUTHORIZED\")\n * );\n * }\n *\n * // Create flow instance\n * return createFlow(flowData);\n * })\n * };\n *\n * // Provide to FlowEngine\n * const flowProviderLayer = Layer.succeed(FlowProvider, dbFlowProvider);\n * ```\n */\nexport type FlowProviderShape<TRequirements = any> = {\n getFlow: (\n flowId: string,\n clientId: string | null,\n ) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the FlowProvider service.\n *\n * Applications must provide an implementation of FlowProviderShape\n * to enable the FlowEngine to retrieve flow definitions.\n *\n * @example\n * ```typescript\n * // Access FlowProvider in an Effect\n * const effect = Effect.gen(function* () {\n * const provider = yield* FlowProvider;\n * const flow = yield* provider.getFlow(\"flow123\", \"client456\");\n * return flow;\n * });\n * ```\n */\nexport class FlowProvider extends Context.Tag(\"FlowProvider\")<\n FlowProvider,\n FlowProviderShape<any>\n>() {}\n\n/**\n * FlowServer service interface.\n *\n * This is the core flow processing service that executes DAG-based file processing pipelines.\n * It manages flow execution, job tracking, node processing, pause/resume functionality,\n * and real-time event broadcasting.\n *\n * All operations return Effect types for composable, type-safe error handling.\n *\n * @property getFlow - Retrieves a flow definition by ID\n * @property getFlowData - Retrieves flow metadata (nodes, edges) without full flow instance\n * @property runFlow - Starts a new flow execution and returns immediately with job ID\n * @property resumeFlow - Resumes a paused flow with new data for a specific node\n * @property pauseFlow - Pauses a running flow (user-initiated pause)\n * @property cancelFlow - Cancels a running or paused flow and cleans up resources\n * @property getJobStatus - Retrieves current status and results of a flow job\n * @property subscribeToFlowEvents - Subscribes WebSocket to flow execution events\n * @property unsubscribeFromFlowEvents - Unsubscribes from flow events\n *\n * @example\n * ```typescript\n * // Execute a flow\n * const program = Effect.gen(function* () {\n * const server = yield* FlowEngine;\n *\n * // Start flow execution (returns immediately)\n * const job = yield* server.runFlow({\n * flowId: \"resize-optimize\",\n * storageId: \"s3-production\",\n * clientId: \"client123\",\n * inputs: {\n * input_1: { uploadId: \"upload_abc123\" }\n * }\n * });\n *\n * // Subscribe to events\n * yield* server.subscribeToFlowEvents(job.id, websocket);\n *\n * // Poll for status\n * const status = yield* server.getJobStatus(job.id);\n * console.log(status.status); // \"running\", \"paused\", \"completed\", \"failed\", or \"cancelled\"\n *\n * // User can pause the flow\n * yield* server.pauseFlow(job.id, \"client123\");\n *\n * return job;\n * });\n *\n * // Resume a paused flow\n * const resume = Effect.gen(function* () {\n * const server = yield* FlowEngine;\n *\n * // Flow paused waiting for user input at node \"approval_1\"\n * const job = yield* server.resumeFlow({\n * jobId: \"job123\",\n * nodeId: \"approval_1\",\n * newData: { approved: true },\n * clientId: \"client123\"\n * });\n *\n * return job;\n * });\n *\n * // Cancel a flow\n * const cancel = Effect.gen(function* () {\n * const server = yield* FlowEngine;\n *\n * // Cancel flow and cleanup intermediate files\n * const job = yield* server.cancelFlow(\"job123\", \"client123\");\n *\n * return job;\n * });\n *\n * // Check flow structure before execution\n * const inspect = Effect.gen(function* () {\n * const server = yield* FlowEngine;\n *\n * const flowData = yield* server.getFlowData(\"resize-optimize\", \"client123\");\n * console.log(\"Nodes:\", flowData.nodes);\n * console.log(\"Edges:\", flowData.edges);\n *\n * return flowData;\n * });\n * ```\n */\nexport type FlowEngineShape = {\n getFlow: <TRequirements>(\n flowId: string,\n clientId: string | null,\n ) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;\n\n getFlowData: (\n flowId: string,\n clientId: string | null,\n ) => Effect.Effect<FlowData, UploadistaError>;\n\n runFlow: <TRequirements>({\n flowId,\n storageId,\n clientId,\n inputs,\n }: {\n flowId: string;\n storageId: string;\n clientId: string | null;\n inputs: any;\n }) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;\n\n resumeFlow: <TRequirements>({\n jobId,\n nodeId,\n newData,\n clientId,\n }: {\n jobId: string;\n nodeId: string;\n newData: unknown;\n clientId: string | null;\n }) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;\n\n pauseFlow: (\n jobId: string,\n clientId: string | null,\n ) => Effect.Effect<FlowJob, UploadistaError>;\n\n cancelFlow: (\n jobId: string,\n clientId: string | null,\n ) => Effect.Effect<FlowJob, UploadistaError>;\n\n getJobStatus: (jobId: string) => Effect.Effect<FlowJob, UploadistaError>;\n\n subscribeToFlowEvents: (\n jobId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n\n unsubscribeFromFlowEvents: (\n jobId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the FlowEngine service.\n *\n * Use this tag to access the FlowEngine in an Effect context.\n * The server must be provided via a Layer or dependency injection.\n *\n * @example\n * ```typescript\n * // Access FlowEngine in an Effect\n * const flowEffect = Effect.gen(function* () {\n * const server = yield* FlowEngine;\n * const job = yield* server.runFlow({\n * flowId: \"my-flow\",\n * storageId: \"s3\",\n * clientId: null,\n * inputs: {}\n * });\n * return job;\n * });\n *\n * // Provide FlowEngine layer\n * const program = flowEffect.pipe(\n * Effect.provide(flowServer),\n * Effect.provide(flowProviderLayer),\n * Effect.provide(flowJobKvStore)\n * );\n * ```\n */\nexport class FlowEngine extends Context.Tag(\"FlowEngine\")<\n FlowEngine,\n FlowEngineShape\n>() {}\n\n/**\n * Legacy configuration options for FlowEngine.\n *\n * @deprecated Use Effect Layers and FlowProvider instead.\n * This type is kept for backward compatibility.\n *\n * @property getFlow - Function to retrieve flow definitions\n * @property kvStore - KV store for flow job metadata\n */\nexport type FlowEngineOptions = {\n getFlow: <TRequirements>({\n flowId,\n storageId,\n }: {\n flowId: string;\n storageId: string;\n }) => Promise<Flow<any, any, TRequirements>>;\n kvStore: KvStore<FlowJob>;\n};\n\nconst isResultUploadFile = (result: unknown): result is UploadFile => {\n return typeof result === \"object\" && result !== null && \"id\" in result;\n};\n\n// Helper to extract data from TypedOutput or return as-is\nconst extractResultData = (result: unknown): unknown => {\n if (\n typeof result === \"object\" &&\n result !== null &&\n \"nodeId\" in result &&\n \"data\" in result &&\n \"timestamp\" in result\n ) {\n // This looks like a TypedOutput, extract the data\n return (result as TypedOutput).data;\n }\n return result;\n};\n\n// Function to enhance a flow with event emission capabilities\nfunction withFlowEvents<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n>(\n flow: Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements>,\n eventEmitter: EventEmitter<FlowEvent>,\n kvStore: KvStore<FlowJob>,\n): Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements> {\n // Shared helper to create onEvent callback for a given jobId\n const createOnEventCallback = (executionJobId: string) => {\n // Helper to update job in KV store\n const updateJobInStore = (updates: Partial<FlowJob>) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n yield* kvStore.set(executionJobId, {\n ...job,\n ...updates,\n updatedAt: new Date(),\n });\n }\n });\n\n // Create the onEvent callback that calls original onEvent, emits to eventEmitter, and updates job\n return (event: FlowEvent) =>\n Effect.gen(function* () {\n // Call the original onEvent from the flow if it exists\n // Catch errors to prevent them from blocking flow execution\n if (flow.onEvent) {\n yield* Effect.catchAll(flow.onEvent(event), (error) => {\n // Log the error but don't fail the flow\n Effect.logError(\"Original onEvent failed\", error);\n return Effect.succeed({ eventId: null });\n });\n }\n\n // Emit event\n yield* eventEmitter.emit(executionJobId, event);\n\n Effect.logInfo(\n `Updating job ${executionJobId} with event ${event.eventType}`,\n );\n\n // Update job based on event type\n switch (event.eventType) {\n case EventType.FlowStart:\n yield* updateJobInStore({ status: \"running\" });\n break;\n\n case EventType.FlowEnd:\n // Store typed outputs in job for client access\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job && event.outputs) {\n yield* kvStore.set(executionJobId, {\n ...job,\n result: event.outputs, // Store typed outputs array\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case EventType.FlowError:\n yield* updateJobInStore({\n status: \"failed\",\n error: event.error,\n });\n break;\n\n case EventType.NodeStart:\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const existingTask = job.tasks.find(\n (t) => t.nodeId === event.nodeId,\n );\n const updatedTasks = existingTask\n ? job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"running\" as const,\n updatedAt: new Date(),\n }\n : t,\n )\n : [\n ...job.tasks,\n {\n nodeId: event.nodeId,\n status: \"running\" as const,\n createdAt: new Date(),\n updatedAt: new Date(),\n },\n ];\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case EventType.NodePause:\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const existingTask = job.tasks.find(\n (t) => t.nodeId === event.nodeId,\n );\n const updatedTasks = existingTask\n ? job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"paused\" as const,\n result: event.partialData,\n updatedAt: new Date(),\n }\n : t,\n )\n : [\n ...job.tasks,\n {\n nodeId: event.nodeId,\n status: \"paused\" as const,\n result: event.partialData,\n createdAt: new Date(),\n updatedAt: new Date(),\n },\n ];\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case EventType.NodeResume:\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const updatedTasks = job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"running\" as const,\n updatedAt: new Date(),\n }\n : t,\n );\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case EventType.NodeEnd:\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const updatedTasks = job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"completed\" as const,\n result: event.result,\n updatedAt: new Date(),\n }\n : t,\n );\n\n // Track intermediate files for cleanup\n // Check if result is an UploadFile based on topology (sink vs non-sink) and keepOutput flag\n // A sink node is one with no outgoing edges\n const isSinkNode = !flow.edges.some(\n (edge) => edge.source === event.nodeId,\n );\n // Find the node to check if it has keepOutput enabled\n const node = flow.nodes.find((n: any) => n.id === event.nodeId);\n const hasKeepOutput = node?.keepOutput === true;\n\n const result = event.result;\n // Extract data from TypedOutput if present\n const resultData = extractResultData(result);\n\n let intermediateFiles = job.intermediateFiles || [];\n\n // Node should preserve output if: it's a sink OR has keepOutput enabled\n const shouldPreserveOutput = isSinkNode || hasKeepOutput;\n\n if (\n shouldPreserveOutput &&\n isResultUploadFile(resultData) &&\n resultData.id\n ) {\n // If this node should preserve output and it returns a file that was an intermediate file,\n // remove it from the intermediate files list (it's now a final output)\n intermediateFiles = intermediateFiles.filter(\n (fileId) => fileId !== resultData.id,\n );\n\n // Log when files are preserved due to keepOutput\n if (hasKeepOutput && !isSinkNode) {\n Effect.logInfo(\n `Preserving output from node ${event.nodeId} due to keepOutput flag`,\n );\n }\n } else if (\n !shouldPreserveOutput &&\n isResultUploadFile(resultData) &&\n resultData.id\n ) {\n // Only add to intermediate files if it's not a sink and doesn't have keepOutput\n if (!intermediateFiles.includes(resultData.id)) {\n intermediateFiles.push(resultData.id);\n }\n }\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n intermediateFiles,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case EventType.NodeError:\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const updatedTasks = job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"failed\" as const,\n error: event.error,\n retryCount: event.retryCount,\n updatedAt: new Date(),\n }\n : t,\n );\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n error: event.error,\n updatedAt: new Date(),\n });\n }\n });\n break;\n }\n\n return { eventId: executionJobId };\n });\n };\n\n // Create checkJobStatus callback that reads from KV store\n const createCheckJobStatusCallback = (executionJobId: string) => {\n return (jobId: string) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n // Return only the statuses we care about for flow control\n if (job.status === \"paused\") return \"paused\" as const;\n if (job.status === \"cancelled\") return \"cancelled\" as const;\n return \"running\" as const;\n });\n };\n\n return {\n ...flow,\n run: (args: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId?: string;\n clientId: string | null;\n }) => {\n return Effect.gen(function* () {\n // Use provided jobId or generate a new one\n const executionJobId = args.jobId || crypto.randomUUID();\n\n const onEventCallback = createOnEventCallback(executionJobId);\n const checkJobStatusCallback =\n createCheckJobStatusCallback(executionJobId);\n\n // Create a new flow with the same configuration but with onEvent callback\n const flowWithEvents = yield* createFlowWithSchema({\n flowId: flow.id,\n name: flow.name,\n nodes: flow.nodes,\n edges: flow.edges,\n inputSchema: flow.inputSchema,\n outputSchema: flow.outputSchema,\n onEvent: onEventCallback,\n checkJobStatus: checkJobStatusCallback,\n });\n\n // Run the enhanced flow with consistent jobId\n const result = yield* flowWithEvents.run({\n ...args,\n jobId: executionJobId,\n clientId: args.clientId,\n });\n\n // Return the result directly (can be completed or paused)\n return result;\n });\n },\n resume: (args: {\n jobId: string;\n storageId: string;\n nodeResults: Record<string, unknown>;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n clientId: string | null;\n }) => {\n return Effect.gen(function* () {\n const executionJobId = args.jobId;\n\n const onEventCallback = createOnEventCallback(executionJobId);\n const checkJobStatusCallback =\n createCheckJobStatusCallback(executionJobId);\n\n // Create a new flow with the same configuration but with onEvent callback\n const flowWithEvents = yield* createFlowWithSchema({\n flowId: flow.id,\n name: flow.name,\n nodes: flow.nodes,\n edges: flow.edges,\n inputSchema: flow.inputSchema,\n outputSchema: flow.outputSchema,\n onEvent: onEventCallback,\n checkJobStatus: checkJobStatusCallback,\n });\n\n // Resume the enhanced flow\n const result = yield* flowWithEvents.resume(args);\n\n // Return the result directly (can be completed or paused)\n return result;\n });\n },\n };\n}\n\n// Core FlowServer implementation\nexport function createFlowEngine() {\n return Effect.gen(function* () {\n const flowProvider = yield* FlowProvider;\n const eventEmitter = yield* FlowEventEmitter;\n const kvStore = yield* FlowJobKVStore;\n const uploadEngine = yield* UploadEngine;\n const dlqOption = yield* DeadLetterQueueService.optional;\n\n const updateJob = (jobId: string, updates: Partial<FlowJob>) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n return yield* kvStore.set(jobId, { ...job, ...updates });\n });\n\n // Helper function to cleanup intermediate files\n const cleanupIntermediateFiles = (jobId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (\n !job ||\n !job.intermediateFiles ||\n job.intermediateFiles.length === 0\n ) {\n return;\n }\n\n yield* Effect.logInfo(\n `Cleaning up ${job.intermediateFiles.length} intermediate files for job ${jobId}`,\n );\n\n // Delete each intermediate file\n yield* Effect.all(\n job.intermediateFiles.map((fileId) =>\n Effect.gen(function* () {\n yield* uploadEngine.delete(fileId, clientId);\n yield* Effect.logDebug(`Deleted intermediate file ${fileId}`);\n }).pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n yield* Effect.logWarning(\n `Failed to delete intermediate file ${fileId}: ${error}`,\n );\n return Effect.succeed(undefined);\n }),\n ),\n ),\n ),\n { concurrency: 5 },\n );\n\n // Clear the intermediateFiles array\n yield* updateJob(jobId, {\n intermediateFiles: [],\n });\n });\n\n // Helper function to add failed job to Dead Letter Queue\n const addToDeadLetterQueue = (jobId: string, error: UploadistaError) =>\n Effect.gen(function* () {\n if (Option.isNone(dlqOption)) {\n // DLQ not configured, skip\n yield* Effect.logDebug(\n `[FlowServer] DLQ not configured, skipping for job: ${jobId}`,\n );\n return;\n }\n\n const dlq = dlqOption.value;\n\n // Get the job to add to DLQ\n const job = yield* Effect.catchAll(kvStore.get(jobId), () =>\n Effect.succeed(null as FlowJob | null),\n );\n\n if (!job) {\n yield* Effect.logWarning(\n `[FlowServer] Job ${jobId} not found when adding to DLQ`,\n );\n return;\n }\n\n // Add to DLQ\n yield* Effect.catchAll(dlq.add(job, error), (dlqError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `[FlowServer] Failed to add job ${jobId} to DLQ`,\n dlqError,\n );\n return Effect.succeed(undefined);\n }),\n );\n\n yield* Effect.logInfo(\n `[FlowServer] Added job ${jobId} to Dead Letter Queue`,\n );\n });\n\n /**\n * Captures the current Effect trace context for distributed tracing.\n * Uses Effect's `currentSpan` which properly integrates with @effect/opentelemetry.\n */\n const captureTraceContextEffect: Effect.Effect<\n FlowJobTraceContext | undefined\n > = Effect.gen(function* () {\n const spanOption = yield* Effect.currentSpan.pipe(Effect.option);\n return Option.match(spanOption, {\n onNone: () => undefined,\n onSome: (span) => ({\n traceId: span.traceId,\n spanId: span.spanId,\n traceFlags: span.sampled ? 1 : 0,\n }),\n });\n });\n\n // Helper function to execute flow in background\n const executeFlowInBackground = ({\n jobId,\n flow,\n storageId,\n clientId,\n inputs,\n }: {\n jobId: string;\n flow: Flow<any, any, any>;\n storageId: string;\n clientId: string | null;\n inputs: Record<string, any>;\n }) =>\n Effect.gen(function* () {\n console.log(\n `[FlowServer] executeFlowInBackground started for job: ${jobId}`,\n );\n\n // Capture the parent \"flow\" span's trace context FIRST\n // This allows flow-execution-resume to be a sibling of flow-execution\n // under the same parent \"flow\" span\n const traceContext = yield* captureTraceContextEffect;\n\n // Update job status to running and store trace context\n yield* updateJob(jobId, {\n status: \"running\",\n traceContext,\n });\n\n // Now run the actual flow execution inside a child span\n const result = yield* Effect.gen(function* () {\n console.log(`[FlowServer] Creating flowWithEvents for job: ${jobId}`);\n const flowWithEvents = withFlowEvents(flow, eventEmitter, kvStore);\n\n console.log(`[FlowServer] Running flow for job: ${jobId}`);\n // Run the flow with the consistent jobId\n const flowResult = yield* flowWithEvents.run({\n inputs,\n storageId,\n jobId,\n clientId,\n });\n\n console.log(\n `[FlowServer] Flow completed for job: ${jobId}, result type: ${flowResult.type}`,\n );\n\n // Handle result based on type\n if (flowResult.type === \"paused\") {\n // Update job as paused (node results are in tasks, not executionState)\n yield* updateJob(jobId, {\n status: \"paused\",\n pausedAt: flowResult.nodeId,\n executionState: flowResult.executionState,\n updatedAt: new Date(),\n });\n } else {\n // Update job as completed\n // Note: result field is already set by FlowEnd event handler with TypedOutput[]\n yield* updateJob(jobId, {\n status: \"completed\",\n updatedAt: new Date(),\n endedAt: new Date(),\n });\n\n // Cleanup intermediate files\n yield* cleanupIntermediateFiles(jobId, clientId);\n }\n\n return flowResult;\n }).pipe(\n // flow-execution is a CHILD span of the parent \"flow\" span\n Effect.withSpan(\"flow-execution\", {\n attributes: {\n \"flow.id\": flow.id,\n \"flow.name\": flow.name,\n \"flow.job_id\": jobId,\n \"flow.storage_id\": storageId,\n \"flow.node_count\": flow.nodes.length,\n },\n }),\n );\n\n return result;\n }).pipe(\n // Parent \"flow\" span wraps the entire flow lifecycle\n // flow-execution and flow-execution-resume will be children of this span\n Effect.withSpan(\"flow\", {\n attributes: {\n \"flow.id\": flow.id,\n \"flow.name\": flow.name,\n \"flow.job_id\": jobId,\n \"flow.storage_id\": storageId,\n \"flow.node_count\": flow.nodes.length,\n },\n }),\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n yield* Effect.logError(\"Flow execution failed\", error);\n\n // Convert error to a proper message\n const errorMessage =\n error instanceof UploadistaError ? error.body : String(error);\n\n yield* Effect.logInfo(\n `Updating job ${jobId} to failed status with error: ${errorMessage}`,\n );\n\n // Update job as failed - do this FIRST before cleanup\n yield* updateJob(jobId, {\n status: \"failed\",\n error: errorMessage,\n updatedAt: new Date(),\n }).pipe(\n Effect.catchAll((updateError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `Failed to update job ${jobId}`,\n updateError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n\n // Emit FlowError event to notify client via WebSocket\n const job = yield* kvStore.get(jobId);\n if (job) {\n yield* eventEmitter\n .emit(jobId, {\n jobId,\n eventType: EventType.FlowError,\n flowId: job.flowId,\n error: errorMessage,\n })\n .pipe(\n Effect.catchAll((emitError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `Failed to emit FlowError event for job ${jobId}`,\n emitError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n }\n\n // Also call flow's onEvent callback to update external databases (like uploadista-cloud)\n if (flow.onEvent) {\n yield* flow\n .onEvent({\n jobId,\n eventType: EventType.FlowError,\n flowId: flow.id,\n error: errorMessage,\n })\n .pipe(\n Effect.catchAll((onEventError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `Failed to call flow.onEvent for FlowError event for job ${jobId}`,\n onEventError,\n );\n return Effect.succeed({ eventId: null });\n }),\n ),\n );\n }\n\n // Cleanup intermediate files even on failure (don't let this fail the error handling)\n yield* cleanupIntermediateFiles(jobId, clientId).pipe(\n Effect.catchAll((cleanupError) =>\n Effect.gen(function* () {\n yield* Effect.logWarning(\n `Failed to cleanup intermediate files for job ${jobId}`,\n cleanupError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n\n // Add failed job to Dead Letter Queue for retry/debugging\n const uploadistaError =\n error instanceof UploadistaError\n ? error\n : new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n cause: error,\n });\n yield* addToDeadLetterQueue(jobId, uploadistaError);\n\n throw error;\n }),\n ),\n );\n\n return {\n getFlow: (flowId, clientId) =>\n Effect.gen(function* () {\n const flow = yield* flowProvider.getFlow(flowId, clientId);\n return flow;\n }),\n\n getFlowData: (flowId, clientId) =>\n Effect.gen(function* () {\n const flow = yield* flowProvider.getFlow(flowId, clientId);\n return getFlowData(flow);\n }),\n\n runFlow: ({\n flowId,\n storageId,\n clientId,\n inputs,\n }: {\n flowId: string;\n storageId: string;\n clientId: string | null;\n inputs: unknown;\n }) =>\n Effect.gen(function* () {\n const waitUntil = yield* FlowWaitUntil.optional;\n\n const parsedParams = yield* Effect.try({\n try: () => runArgsSchema.parse({ inputs }),\n catch: (error) =>\n UploadistaError.fromCode(\"FLOW_INPUT_VALIDATION_ERROR\", {\n cause: error,\n }),\n });\n\n // Generate a unique jobId\n const jobId = crypto.randomUUID();\n const createdAt = new Date();\n\n // Store initial job metadata\n const job: FlowJob = {\n id: jobId,\n flowId,\n storageId,\n clientId,\n status: \"started\",\n createdAt,\n updatedAt: createdAt,\n tasks: [],\n };\n\n yield* kvStore.set(jobId, job);\n\n // Get the flow and start background execution\n const flow = yield* flowProvider.getFlow(flowId, clientId);\n\n console.log(\n `[FlowServer] About to fork flow execution for job: ${jobId}`,\n );\n\n // Execute flow in background\n // If waitUntil is provided (Cloudflare Workers), use it to keep execution alive\n // Otherwise, use Effect.fork for standard environments\n const flowEffect = executeFlowInBackground({\n jobId,\n flow,\n storageId,\n clientId,\n inputs: parsedParams.inputs,\n }).pipe(\n Effect.tapErrorCause((cause) =>\n Effect.logError(\"Flow execution failed\", cause),\n ),\n ) as Effect.Effect<\n FlowExecutionResult<Record<string, any>>,\n UploadistaError,\n never\n >;\n\n if (Option.isSome(waitUntil)) {\n // Cloudflare Workers: Use waitUntil to keep execution alive\n console.log(`[FlowServer] Using waitUntil for job: ${jobId}`);\n // Get the current runtime to run the effect as a promise\n const runtime = yield* Effect.runtime();\n const runnable = Runtime.runPromise(runtime);\n const promise = runnable(flowEffect);\n waitUntil.value(promise);\n } else {\n // Standard environments: Fork normally\n console.log(\n `[FlowServer] Using Effect.forkDaemon for job: ${jobId}`,\n );\n yield* Effect.forkDaemon(flowEffect);\n }\n\n console.log(`[FlowServer] Flow execution started for job: ${jobId}`);\n\n // Return immediately with jobId\n return job;\n }),\n\n getJobStatus: (jobId: string) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n return job;\n }),\n\n resumeFlow: ({\n jobId,\n nodeId,\n newData,\n clientId,\n }: {\n jobId: string;\n nodeId: string;\n newData: unknown;\n clientId: string | null;\n }) =>\n Effect.gen(function* () {\n const waitUntil = yield* FlowWaitUntil.optional;\n\n // Get the current job\n const job = yield* kvStore.get(jobId);\n if (!job) {\n console.error(\"Job not found\");\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n // Verify job is paused\n if (job.status !== \"paused\") {\n console.error(\"Job is not paused\");\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} is not paused (status: ${job.status})`,\n }),\n );\n }\n\n // Verify it's paused at the expected node\n if (job.pausedAt !== nodeId) {\n console.error(\"Job is not paused at the expected node\");\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} is paused at node ${job.pausedAt}, not ${nodeId}`,\n }),\n );\n }\n\n // Verify we have execution state\n if (!job.executionState) {\n console.error(\"Job has no execution state\");\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} has no execution state`,\n }),\n );\n }\n\n // Reconstruct nodeResults from tasks\n const nodeResults = job.tasks.reduce(\n (acc, task) => {\n if (task.result !== undefined) {\n acc[task.nodeId] = task.result;\n }\n return acc;\n },\n {} as Record<string, unknown>,\n );\n\n // Update with new data\n const updatedNodeResults = {\n ...nodeResults,\n [nodeId]: newData,\n };\n\n const updatedInputs = {\n ...job.executionState.inputs,\n [nodeId]: newData,\n };\n\n // Update job status to running BEFORE forking background execution\n // This ensures the status is updated synchronously before events start firing\n yield* updateJob(jobId, {\n status: \"running\",\n });\n\n // Get the flow\n const flow = yield* flowProvider.getFlow(job.flowId, job.clientId);\n\n // Create external span from stored trace context if available\n // This links resumed flow to the original flow execution trace\n const parentSpan = job.traceContext\n ? Tracer.externalSpan({\n traceId: job.traceContext.traceId,\n spanId: job.traceContext.spanId,\n sampled: job.traceContext.traceFlags === 1,\n })\n : undefined;\n\n // Helper to resume flow in background\n const resumeFlowInBackground = Effect.gen(function* () {\n const flowWithEvents = withFlowEvents(flow, eventEmitter, kvStore);\n\n if (!job.executionState) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} has no execution state`,\n }),\n );\n }\n\n // Resume the flow with updated state\n const result = yield* flowWithEvents.resume({\n jobId,\n storageId: job.storageId,\n nodeResults: updatedNodeResults,\n executionState: {\n ...job.executionState,\n inputs: updatedInputs,\n },\n clientId: job.clientId,\n });\n\n // Handle result based on type\n if (result.type === \"paused\") {\n // Update job as paused again (node results are in tasks, not executionState)\n yield* updateJob(jobId, {\n status: \"paused\",\n pausedAt: result.nodeId,\n executionState: result.executionState,\n updatedAt: new Date(),\n });\n } else {\n // Update job as completed\n // Note: result field is already set by FlowEnd event handler with TypedOutput[]\n yield* updateJob(jobId, {\n status: \"completed\",\n pausedAt: undefined,\n executionState: undefined,\n updatedAt: new Date(),\n endedAt: new Date(),\n });\n\n // Cleanup intermediate files\n yield* cleanupIntermediateFiles(jobId, clientId);\n }\n\n return result;\n }).pipe(\n // Wrap resumed flow execution in a span for distributed tracing\n // Pass parent directly to link to original flow execution\n Effect.withSpan(\"flow-execution-resume\", {\n attributes: {\n \"flow.id\": flow.id,\n \"flow.name\": flow.name,\n \"flow.job_id\": jobId,\n \"flow.storage_id\": job.storageId,\n \"flow.resumed_from_node\": nodeId,\n },\n parent: parentSpan,\n }),\n );\n\n const resumeFlowInBackgroundWithErrorHandling =\n resumeFlowInBackground.pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n yield* Effect.logError(\"Flow resume failed\", error);\n\n // Convert error to a proper message\n const errorMessage =\n error instanceof UploadistaError\n ? error.body\n : String(error);\n\n yield* Effect.logInfo(\n `Updating job ${jobId} to failed status with error: ${errorMessage}`,\n );\n\n // Update job as failed - do this FIRST before cleanup\n yield* updateJob(jobId, {\n status: \"failed\",\n error: errorMessage,\n updatedAt: new Date(),\n }).pipe(\n Effect.catchAll((updateError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `Failed to update job ${jobId}`,\n updateError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n\n // Emit FlowError event to notify client\n const currentJob = yield* kvStore.get(jobId);\n if (currentJob) {\n yield* eventEmitter\n .emit(jobId, {\n jobId,\n eventType: EventType.FlowError,\n flowId: currentJob.flowId,\n error: errorMessage,\n })\n .pipe(\n Effect.catchAll((emitError) =>\n Effect.gen(function* () {\n yield* Effect.logError(\n `Failed to emit FlowError event for job ${jobId}`,\n emitError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n }\n\n // Cleanup intermediate files even on failure (don't let this fail the error handling)\n yield* cleanupIntermediateFiles(jobId, clientId).pipe(\n Effect.catchAll((cleanupError) =>\n Effect.gen(function* () {\n yield* Effect.logWarning(\n `Failed to cleanup intermediate files for job ${jobId}`,\n cleanupError,\n );\n return Effect.succeed(undefined);\n }),\n ),\n );\n\n // Add failed job to Dead Letter Queue for retry/debugging\n const uploadistaError =\n error instanceof UploadistaError\n ? error\n : new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n cause: error,\n });\n yield* addToDeadLetterQueue(jobId, uploadistaError);\n\n throw error;\n }),\n ),\n );\n\n // Fork the resume execution to run in background\n // Use waitUntil if available (Cloudflare Workers), otherwise fork normally\n const resumeEffect = resumeFlowInBackgroundWithErrorHandling.pipe(\n Effect.tapErrorCause((cause) =>\n Effect.logError(\"Flow resume failed\", cause),\n ),\n ) as Effect.Effect<\n FlowExecutionResult<Record<string, unknown>>,\n UploadistaError,\n never\n >;\n\n if (Option.isSome(waitUntil)) {\n // Cloudflare Workers: Use waitUntil to keep execution alive\n console.log(\n `[FlowServer] Using waitUntil for resume job: ${jobId}`,\n );\n const runtime = yield* Effect.runtime();\n const runnable = Runtime.runPromise(runtime);\n const promise = runnable(resumeEffect);\n waitUntil.value(promise);\n } else {\n // Standard environments: Fork normally as daemon\n console.log(\n `[FlowServer] Using Effect.forkDaemon for resume job: ${jobId}`,\n );\n yield* Effect.forkDaemon(resumeEffect);\n }\n\n // Return immediately with updated job\n const updatedJob = yield* kvStore.get(jobId);\n if (!updatedJob) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found after update`,\n }),\n );\n }\n return updatedJob;\n }),\n\n pauseFlow: (jobId: string, clientId: string | null) =>\n Effect.gen(function* () {\n // Get the current job\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n // Verify authorization if clientId is provided\n if (clientId !== null && job.clientId !== clientId) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_NOT_AUTHORIZED\", {\n cause: `Client ${clientId} is not authorized to pause job ${jobId}`,\n }),\n );\n }\n\n // Verify job can be paused (must be running)\n if (job.status !== \"running\") {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} cannot be paused (current status: ${job.status})`,\n }),\n );\n }\n\n // Find the currently running node (if any)\n const runningTask = job.tasks.find((t) => t.status === \"running\");\n const pausedAtNode = runningTask?.nodeId;\n\n // Update job status to paused\n yield* updateJob(jobId, {\n status: \"paused\",\n pausedAt: pausedAtNode,\n updatedAt: new Date(),\n });\n\n // Emit FlowPause event\n yield* eventEmitter.emit(jobId, {\n jobId,\n flowId: job.flowId,\n eventType: EventType.FlowPause,\n pausedAt: pausedAtNode,\n });\n\n // Return updated job\n const updatedJob = yield* kvStore.get(jobId);\n if (!updatedJob) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found after pause`,\n }),\n );\n }\n return updatedJob;\n }),\n\n cancelFlow: (jobId: string, clientId: string | null) =>\n Effect.gen(function* () {\n // Get the current job\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n // Verify authorization if clientId is provided\n if (clientId !== null && job.clientId !== clientId) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_NOT_AUTHORIZED\", {\n cause: `Client ${clientId} is not authorized to cancel job ${jobId}`,\n }),\n );\n }\n\n // Verify job can be cancelled (must be running or paused)\n if (\n job.status !== \"running\" &&\n job.status !== \"paused\" &&\n job.status !== \"started\"\n ) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} cannot be cancelled (current status: ${job.status})`,\n }),\n );\n }\n\n // Update job status to cancelled\n yield* updateJob(jobId, {\n status: \"cancelled\",\n updatedAt: new Date(),\n endedAt: new Date(),\n });\n\n // Emit FlowCancel event\n yield* eventEmitter.emit(jobId, {\n jobId,\n flowId: job.flowId,\n eventType: EventType.FlowCancel,\n });\n\n // Cleanup intermediate files\n yield* cleanupIntermediateFiles(jobId, clientId);\n\n // Return updated job\n const updatedJob = yield* kvStore.get(jobId);\n if (!updatedJob) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found after cancellation`,\n }),\n );\n }\n return updatedJob;\n }),\n\n subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(jobId, connection);\n }),\n\n unsubscribeFromFlowEvents: (jobId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(jobId);\n }),\n } satisfies FlowEngineShape;\n });\n}\n\n// Export the FlowEngine layer with job store dependency\nexport const flowEngine = Layer.effect(FlowEngine, createFlowEngine());\nexport type FlowEngineLayer = typeof flowEngine;\n","/**\n * File naming utilities for the flow engine.\n *\n * This module provides functions for generating dynamic file names based on\n * templates, auto-suffixes, or custom functions. It supports mustache-style\n * template interpolation using micromustache.\n *\n * @module flow/utils/file-naming\n */\n\nimport { render } from \"micromustache\";\nimport type { UploadFile } from \"../../types/upload-file\";\nimport type {\n FileNamingConfig,\n NamingContext,\n} from \"../types/flow-types\";\n\n/**\n * Extracts the base name (without extension) from a filename.\n *\n * @param fileName - The full filename\n * @returns The filename without extension\n *\n * @example\n * ```typescript\n * getBaseName(\"photo.jpg\") // \"photo\"\n * getBaseName(\"document.tar.gz\") // \"document.tar\"\n * getBaseName(\"noextension\") // \"noextension\"\n * ```\n */\nexport function getBaseName(fileName: string): string {\n const lastDotIndex = fileName.lastIndexOf(\".\");\n if (lastDotIndex === -1 || lastDotIndex === 0) {\n return fileName;\n }\n return fileName.substring(0, lastDotIndex);\n}\n\n/**\n * Extracts the extension (without dot) from a filename.\n *\n * @param fileName - The full filename\n * @returns The extension without leading dot, or empty string if none\n *\n * @example\n * ```typescript\n * getExtension(\"photo.jpg\") // \"jpg\"\n * getExtension(\"document.tar.gz\") // \"gz\"\n * getExtension(\"noextension\") // \"\"\n * ```\n */\nexport function getExtension(fileName: string): string {\n const lastDotIndex = fileName.lastIndexOf(\".\");\n if (lastDotIndex === -1 || lastDotIndex === 0) {\n return \"\";\n }\n return fileName.substring(lastDotIndex + 1);\n}\n\n/**\n * Builds a naming context from file and flow execution information.\n *\n * @param file - The UploadFile being processed\n * @param flowContext - Flow execution context (flowId, jobId, nodeId, nodeType)\n * @param extraVars - Additional variables to include (width, height, format, etc.)\n * @returns Complete naming context for template interpolation\n *\n * @example\n * ```typescript\n * const context = buildNamingContext(\n * uploadFile,\n * { flowId: \"flow-1\", jobId: \"job-1\", nodeId: \"resize-1\", nodeType: \"resize\" },\n * { width: 800, height: 600 }\n * );\n * // context.baseName = \"photo\"\n * // context.extension = \"jpg\"\n * // context.width = 800\n * // context.height = 600\n * ```\n */\nexport function buildNamingContext(\n file: UploadFile,\n flowContext: {\n flowId: string;\n jobId: string;\n nodeId: string;\n nodeType: string;\n },\n extraVars?: Record<string, string | number | undefined>,\n): NamingContext {\n // Extract fileName from metadata\n const metadata = file.metadata ?? {};\n const fileName =\n (metadata.fileName as string) ??\n (metadata.originalName as string) ??\n (metadata.name as string) ??\n \"unnamed\";\n\n const baseName = getBaseName(fileName);\n const extension = getExtension(fileName);\n\n return {\n baseName,\n extension,\n fileName,\n nodeType: flowContext.nodeType,\n nodeId: flowContext.nodeId,\n flowId: flowContext.flowId,\n jobId: flowContext.jobId,\n timestamp: new Date().toISOString(),\n ...extraVars,\n };\n}\n\n/**\n * Interpolates a mustache-style template with the given context.\n *\n * Uses micromustache for fast, secure template rendering.\n * Unknown variables are preserved as-is (e.g., {{unknown}} stays {{unknown}}).\n *\n * @param pattern - Mustache-style template string\n * @param context - Variables to interpolate\n * @returns Interpolated string\n *\n * @example\n * ```typescript\n * interpolateFileName(\n * \"{{baseName}}-{{width}}x{{height}}.{{extension}}\",\n * { baseName: \"photo\", width: 800, height: 600, extension: \"jpg\" }\n * );\n * // Returns: \"photo-800x600.jpg\"\n * ```\n */\nexport function interpolateFileName(\n pattern: string,\n context: NamingContext,\n): string {\n try {\n // Convert context to string values for micromustache\n const stringContext: Record<string, string> = {};\n for (const [key, value] of Object.entries(context)) {\n if (value !== undefined) {\n stringContext[key] = String(value);\n }\n }\n return render(pattern, stringContext);\n } catch {\n // On error, return the pattern as-is (fallback behavior)\n return pattern;\n }\n}\n\n/**\n * Applies file naming configuration to generate a new filename.\n *\n * Handles three modes:\n * - No config: Returns original filename (backward compatible)\n * - Auto mode: Appends auto-generated suffix based on node type\n * - Custom mode: Uses template pattern or rename function\n *\n * On any error, falls back to the original filename to prevent flow failures.\n *\n * @param file - The UploadFile being processed\n * @param context - Naming context with all available variables\n * @param config - Optional naming configuration\n * @returns The new filename (or original on error/no config)\n *\n * @example\n * ```typescript\n * // Auto mode\n * applyFileNaming(file, context, {\n * mode: 'auto',\n * autoSuffix: (ctx) => `${ctx.width}x${ctx.height}`\n * });\n * // Returns: \"photo-800x600.jpg\"\n *\n * // Custom mode with template\n * applyFileNaming(file, context, {\n * mode: 'custom',\n * pattern: '{{baseName}}-processed.{{extension}}'\n * });\n * // Returns: \"photo-processed.jpg\"\n *\n * // Custom mode with function\n * applyFileNaming(file, context, {\n * mode: 'custom',\n * rename: (file, ctx) => `${ctx.flowId}-${ctx.fileName}`\n * });\n * // Returns: \"flow-1-photo.jpg\"\n * ```\n */\nexport function applyFileNaming(\n file: UploadFile,\n context: NamingContext,\n config?: FileNamingConfig,\n): string {\n const originalFileName = context.fileName;\n\n // No config = preserve original (backward compatible)\n if (!config) {\n return originalFileName;\n }\n\n try {\n if (config.mode === \"auto\") {\n // Auto mode: append auto-generated suffix\n if (config.autoSuffix) {\n const suffix = config.autoSuffix(context);\n if (suffix) {\n const { baseName, extension } = context;\n return extension\n ? `${baseName}-${suffix}.${extension}`\n : `${baseName}-${suffix}`;\n }\n }\n // No autoSuffix defined, preserve original\n return originalFileName;\n }\n\n if (config.mode === \"custom\") {\n // Custom mode: use function or template\n if (config.rename) {\n const result = config.rename(file, context);\n return result || originalFileName;\n }\n if (config.pattern) {\n const result = interpolateFileName(config.pattern, context);\n return result || originalFileName;\n }\n }\n\n // Unknown mode, preserve original\n return originalFileName;\n } catch {\n // On any error, fall back to original filename\n return originalFileName;\n }\n}\n\n/**\n * Validates a template pattern for common issues.\n *\n * Checks for:\n * - Balanced braces\n * - Non-empty pattern\n * - Valid variable names\n *\n * @param pattern - Template pattern to validate\n * @returns Object with isValid flag and optional error message\n *\n * @example\n * ```typescript\n * validatePattern(\"{{baseName}}.{{extension}}\");\n * // { isValid: true }\n *\n * validatePattern(\"{{baseName\");\n * // { isValid: false, error: \"Unbalanced braces: missing closing }}\" }\n * ```\n */\nexport function validatePattern(pattern: string): {\n isValid: boolean;\n error?: string;\n} {\n if (!pattern || pattern.trim() === \"\") {\n return { isValid: false, error: \"Pattern cannot be empty\" };\n }\n\n // Check for balanced braces\n const openCount = (pattern.match(/\\{\\{/g) || []).length;\n const closeCount = (pattern.match(/\\}\\}/g) || []).length;\n\n if (openCount !== closeCount) {\n return {\n isValid: false,\n error: `Unbalanced braces: ${openCount} opening, ${closeCount} closing`,\n };\n }\n\n // Check for valid variable syntax\n const invalidVars = pattern.match(/\\{\\{[^}]*[^a-zA-Z0-9_}][^}]*\\}\\}/g);\n if (invalidVars) {\n return {\n isValid: false,\n error: `Invalid variable syntax: ${invalidVars[0]}`,\n };\n }\n\n return { isValid: true };\n}\n\n/**\n * List of available template variables for documentation and UI.\n */\nexport const AVAILABLE_TEMPLATE_VARIABLES = [\n { name: \"baseName\", description: \"Filename without extension\", example: \"photo\" },\n { name: \"extension\", description: \"File extension without dot\", example: \"jpg\" },\n { name: \"fileName\", description: \"Full original filename\", example: \"photo.jpg\" },\n { name: \"nodeType\", description: \"Type of processing node\", example: \"resize\" },\n { name: \"nodeId\", description: \"Specific node instance ID\", example: \"resize-1\" },\n { name: \"flowId\", description: \"Flow identifier\", example: \"flow-abc\" },\n { name: \"jobId\", description: \"Execution job ID\", example: \"job-123\" },\n { name: \"timestamp\", description: \"ISO 8601 processing time\", example: \"2024-01-15T10:30:00Z\" },\n { name: \"width\", description: \"Output width (image/video)\", example: \"800\" },\n { name: \"height\", description: \"Output height (image/video)\", example: \"600\" },\n { name: \"format\", description: \"Output format\", example: \"webp\" },\n { name: \"quality\", description: \"Quality setting\", example: \"80\" },\n { name: \"pageNumber\", description: \"Page number (documents)\", example: \"1\" },\n] as const;\n","import { Effect, Stream } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\nimport type { StreamingConfig, UploadFile } from \"../../types\";\nimport { DEFAULT_STREAMING_CONFIG, uploadFileSchema } from \"../../types\";\nimport { UploadEngine } from \"../../upload\";\nimport { createFlowNode, NodeType } from \"../node\";\nimport { completeNodeExecution, type FileNamingConfig } from \"../types\";\nimport type { FlowCircuitBreakerConfig } from \"../types/flow-types\";\nimport { applyFileNaming, buildNamingContext } from \"../utils/file-naming\";\nimport { resolveUploadMetadata } from \"../utils/resolve-upload-metadata\";\n\n/**\n * Transform mode for controlling how file data is processed.\n *\n * - `buffered`: Always load entire file into memory before transforming (default, backward compatible)\n * - `streaming`: Process file as a stream of chunks for memory efficiency\n * - `auto`: Automatically select mode based on file size and DataStore capabilities\n */\nexport type TransformMode = \"buffered\" | \"streaming\" | \"auto\";\n\n/**\n * Result type for streaming transforms.\n * Can return just the transformed stream, or include metadata changes.\n */\nexport type StreamingTransformResult =\n | Stream.Stream<Uint8Array, UploadistaError>\n | {\n stream: Stream.Stream<Uint8Array, UploadistaError>;\n type?: string;\n fileName?: string;\n /** Estimated output size in bytes (for progress tracking) */\n estimatedSize?: number;\n };\n\n/**\n * Function type for streaming transforms.\n * Receives an input stream and file metadata, returns a transformed stream.\n */\nexport type StreamingTransformFn = (\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n file: UploadFile,\n) => Effect.Effect<StreamingTransformResult, UploadistaError>;\n\n/**\n * Configuration object for creating a transform node.\n */\nexport interface TransformNodeConfig {\n /** Unique identifier for the node */\n id: string;\n /** Human-readable name for the node */\n name: string;\n /** Description of what the node does */\n description: string;\n /** Optional output type ID from outputTypeRegistry for result type registration */\n outputTypeId?: string;\n /**\n * Whether to keep this node's output as a flow result even if it has outgoing edges.\n * When true, the node's output will be included in the final flow outputs alongside topology sinks.\n * Defaults to false.\n */\n keepOutput?: boolean;\n /**\n * Optional file naming configuration.\n * - undefined: Preserve original filename (backward compatible)\n * - mode: 'auto': Generate smart suffix based on node type\n * - mode: 'custom': Use template pattern or rename function\n */\n naming?: FileNamingConfig;\n /**\n * Node type identifier used for auto-naming context.\n * Defaults to \"transform\" if not specified.\n */\n nodeType?: string;\n /**\n * Stable node type identifier for circuit breaker configuration.\n * Used to share circuit breaker state across nodes of the same type\n * and for nodeTypeOverrides in flow config.\n * Example: \"describe-image\", \"remove-background\", \"scan-virus\"\n */\n nodeTypeId?: string;\n /**\n * Additional variables to include in the naming context.\n * These are merged with the base context (flowId, jobId, etc.)\n * and can be used in templates.\n */\n namingVars?: Record<string, string | number | undefined>;\n /**\n * Circuit breaker configuration for resilience against external service failures.\n * Overrides flow-level circuit breaker defaults for this node.\n */\n circuitBreaker?: FlowCircuitBreakerConfig;\n /**\n * Transform mode controlling how file data is processed.\n * - `buffered`: Always load entire file into memory\n * - `streaming`: Process file as a stream of chunks\n * - `auto`: Select mode based on file size and DataStore capabilities (default)\n *\n * @default \"auto\"\n */\n mode?: TransformMode;\n /**\n * Configuration for streaming mode (file size threshold, chunk size).\n * Only used when mode is \"streaming\" or \"auto\".\n */\n streamingConfig?: StreamingConfig;\n /**\n * Function that transforms file bytes (buffered mode).\n * Required unless streamingTransform is provided and mode is \"streaming\".\n */\n transform?: (\n bytes: Uint8Array,\n file: UploadFile,\n ) => Effect.Effect<\n Uint8Array | { bytes: Uint8Array; type?: string; fileName?: string },\n UploadistaError\n >;\n /**\n * Function that transforms file as a stream (streaming mode).\n * For memory-efficient processing of large files.\n * Used when mode is \"streaming\" or when \"auto\" selects streaming.\n */\n streamingTransform?: StreamingTransformFn;\n}\n\n/**\n * Helper to check if a StreamingTransformResult is a stream or an object with metadata.\n */\nfunction isStreamResult(\n result: StreamingTransformResult,\n): result is Stream.Stream<Uint8Array, UploadistaError> {\n // Check if it has the 'stream' property (object form) vs is a Stream directly\n return !(\"stream\" in result);\n}\n\n/**\n * Creates a transform node that handles the common pattern of:\n * 1. Reading bytes from an UploadFile\n * 2. Transforming the bytes\n * 3. Uploading the result as a new UploadFile\n *\n * This simplifies nodes that just need to transform file bytes without\n * worrying about upload server interactions.\n *\n * Supports both buffered and streaming modes:\n * - **Buffered mode**: Loads entire file into memory, transforms, uploads\n * - **Streaming mode**: Processes file as chunks for memory efficiency with large files\n * - **Auto mode** (default): Selects mode based on file size and DataStore capabilities\n *\n * @param config - Configuration object for the transform node\n * @returns An Effect that creates a flow node configured for file transformation\n *\n * @example\n * ```typescript\n * // Create a transform node with auto mode (default) - uses streaming for large files\n * const resizeNode = yield* createTransformNode({\n * id: \"resize-image\",\n * name: \"Resize Image\",\n * description: \"Resizes images to specified dimensions\",\n * transform: (bytes, file) => {\n * // Your transformation logic here\n * return Effect.succeed(transformedBytes);\n * },\n * streamingTransform: (stream, file) => {\n * const transformed = Stream.map(stream, (chunk) => processChunk(chunk));\n * return Effect.succeed(transformed);\n * }\n * });\n *\n * // Force buffered mode for specific use cases\n * const bufferedNode = yield* createTransformNode({\n * id: \"optimize-small\",\n * name: \"Optimize Small Files\",\n * description: \"Optimizes small files with buffered mode\",\n * mode: \"buffered\",\n * transform: (bytes, file) => Effect.succeed(transformBytes(bytes)),\n * });\n *\n * // Force streaming mode for memory efficiency\n * const streamingNode = yield* createTransformNode({\n * id: \"optimize-large\",\n * name: \"Optimize Large Files\",\n * description: \"Optimizes large files with streaming\",\n * mode: \"streaming\",\n * streamingTransform: (stream, file) => {\n * const transformed = Stream.map(stream, (chunk) => processChunk(chunk));\n * return Effect.succeed(transformed);\n * }\n * });\n * ```\n */\nexport function createTransformNode({\n id,\n name,\n description,\n outputTypeId,\n keepOutput,\n naming,\n nodeType: namingNodeType = \"transform\",\n nodeTypeId,\n namingVars,\n circuitBreaker,\n mode = \"auto\",\n streamingConfig,\n transform,\n streamingTransform,\n}: TransformNodeConfig) {\n // Validate configuration\n if (mode === \"streaming\" && !streamingTransform) {\n throw new Error(\n `Transform node \"${id}\": mode is \"streaming\" but no streamingTransform function provided`,\n );\n }\n if (mode === \"buffered\" && !transform) {\n throw new Error(\n `Transform node \"${id}\": mode is \"buffered\" but no transform function provided`,\n );\n }\n if (mode === \"auto\" && !transform && !streamingTransform) {\n throw new Error(\n `Transform node \"${id}\": mode is \"auto\" but neither transform nor streamingTransform provided`,\n );\n }\n\n // Merge streaming config with defaults\n const effectiveStreamingConfig = {\n ...DEFAULT_STREAMING_CONFIG,\n ...streamingConfig,\n };\n\n return Effect.gen(function* () {\n const uploadEngine = yield* UploadEngine;\n\n return yield* createFlowNode<UploadFile, UploadFile>({\n id,\n name,\n description,\n type: NodeType.process,\n outputTypeId,\n keepOutput,\n nodeTypeId,\n circuitBreaker,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n run: ({ data: file, storageId, flowId, jobId, clientId }) => {\n return Effect.gen(function* () {\n const flow = {\n flowId,\n nodeId: id,\n jobId,\n };\n\n // Determine which mode to use\n const shouldUseStreaming = yield* Effect.gen(function* () {\n if (mode === \"buffered\") return false;\n if (mode === \"streaming\") return true;\n\n // Auto mode: check file size and capabilities\n const fileSize = file.size ?? 0;\n const threshold = effectiveStreamingConfig.fileSizeThreshold;\n\n // If file is smaller than threshold, use buffered\n if (fileSize > 0 && fileSize < threshold) {\n yield* Effect.logDebug(\n `File ${file.id} (${fileSize} bytes) below threshold (${threshold}), using buffered mode`,\n );\n return false;\n }\n\n // Check if we have the required functions\n if (!streamingTransform) {\n yield* Effect.logDebug(\n `No streamingTransform function, using buffered mode`,\n );\n return false;\n }\n\n // Check DataStore capabilities via UploadEngine\n const capabilities = yield* uploadEngine.getCapabilities(\n storageId,\n clientId,\n );\n if (!capabilities.supportsStreamingRead) {\n yield* Effect.logDebug(\n `DataStore doesn't support streaming read, using buffered mode`,\n );\n return false;\n }\n\n yield* Effect.logDebug(\n `File ${file.id} qualifies for streaming mode`,\n );\n return true;\n });\n\n const { type, fileName, metadata, metadataJson } =\n resolveUploadMetadata(file.metadata);\n\n if (shouldUseStreaming && streamingTransform) {\n // STREAMING PATH - True end-to-end streaming\n yield* Effect.logDebug(`Using streaming transform for ${file.id}`);\n\n // Get input stream\n const inputStream = yield* uploadEngine.readStream(\n file.id,\n clientId,\n effectiveStreamingConfig,\n );\n\n // Transform the stream\n const transformResult = yield* streamingTransform(\n inputStream,\n file,\n );\n\n // Extract stream and metadata from result\n const outputStream = isStreamResult(transformResult)\n ? transformResult\n : transformResult.stream;\n const outputType = isStreamResult(transformResult)\n ? undefined\n : transformResult.type;\n const estimatedSize = isStreamResult(transformResult)\n ? undefined\n : transformResult.estimatedSize;\n\n // Get fileName from transform result or apply naming config\n let outputFileName = isStreamResult(transformResult)\n ? undefined\n : transformResult.fileName;\n\n if (!outputFileName && naming) {\n const namingContext = buildNamingContext(\n file,\n { flowId, jobId, nodeId: id, nodeType: namingNodeType },\n namingVars,\n );\n outputFileName = applyFileNaming(file, namingContext, naming);\n }\n\n // Check if DataStore supports streaming writes\n const capabilities = yield* uploadEngine.getCapabilities(\n storageId,\n clientId,\n );\n\n let result: UploadFile;\n\n if (capabilities.supportsStreamingWrite) {\n // True end-to-end streaming: pipe transform output directly to storage\n yield* Effect.logDebug(\n `Using streaming write for ${file.id} - no intermediate buffering`,\n );\n\n result = yield* uploadEngine.uploadStream(\n {\n storageId,\n uploadLengthDeferred: true,\n sizeHint: estimatedSize,\n type: outputType ?? type,\n fileName: outputFileName ?? fileName,\n lastModified: 0,\n metadata: metadataJson,\n flow,\n },\n clientId,\n outputStream,\n );\n } else {\n // Fallback: buffer the output before uploading\n // This path is for DataStores that don't support streaming writes\n yield* Effect.logDebug(\n `Falling back to buffered upload for ${file.id} (streaming write not supported)`,\n );\n\n const outputChunks: Uint8Array[] = [];\n yield* Stream.runForEach(outputStream, (chunk) =>\n Effect.sync(() => {\n outputChunks.push(chunk);\n }),\n );\n\n // Concatenate chunks into a single Uint8Array\n const totalLength = outputChunks.reduce(\n (sum, chunk) => sum + chunk.byteLength,\n 0,\n );\n const outputBytes = new Uint8Array(totalLength);\n let offset = 0;\n for (const chunk of outputChunks) {\n outputBytes.set(chunk, offset);\n offset += chunk.byteLength;\n }\n\n // Create a ReadableStream for upload\n const bufferedUploadStream = new ReadableStream({\n start(controller) {\n controller.enqueue(outputBytes);\n controller.close();\n },\n });\n\n result = yield* uploadEngine.upload(\n {\n storageId,\n size: outputBytes.byteLength,\n type: outputType ?? type,\n fileName: outputFileName ?? fileName,\n lastModified: 0,\n metadata: metadataJson,\n flow,\n },\n clientId,\n bufferedUploadStream,\n );\n }\n\n // Merge updated metadata\n const updatedMetadata = metadata\n ? {\n ...metadata,\n ...(outputType && {\n mimeType: outputType,\n type: outputType,\n \"content-type\": outputType,\n }),\n ...(outputFileName && {\n fileName: outputFileName,\n originalName: outputFileName,\n name: outputFileName,\n extension:\n outputFileName.split(\".\").pop() || metadata.extension,\n }),\n }\n : result.metadata;\n\n return completeNodeExecution(\n updatedMetadata\n ? { ...result, metadata: updatedMetadata }\n : result,\n );\n }\n\n // BUFFERED PATH (default, backward compatible)\n if (!transform) {\n throw new Error(\n `Transform node \"${id}\": buffered mode selected but no transform function provided`,\n );\n }\n\n // Read input bytes from upload server\n const inputBytes = yield* uploadEngine.read(file.id, clientId);\n\n // Transform the bytes using the provided function\n const transformResult = yield* transform(inputBytes, file);\n\n // Handle both simple Uint8Array and object with metadata\n const outputBytes =\n transformResult instanceof Uint8Array\n ? transformResult\n : transformResult.bytes;\n\n const outputType =\n transformResult instanceof Uint8Array\n ? undefined\n : transformResult.type;\n\n // Get fileName from transform result (if provided) or apply naming config\n let outputFileName =\n transformResult instanceof Uint8Array\n ? undefined\n : transformResult.fileName;\n\n // Apply file naming if configured and no explicit fileName from transform\n if (!outputFileName && naming) {\n const namingContext = buildNamingContext(\n file,\n {\n flowId,\n jobId,\n nodeId: id,\n nodeType: namingNodeType,\n },\n namingVars,\n );\n outputFileName = applyFileNaming(file, namingContext, naming);\n }\n\n // Create a stream from the output bytes\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(outputBytes);\n controller.close();\n },\n });\n\n // Upload the transformed bytes back to the upload server\n // Use output metadata if provided, otherwise fall back to original\n const result = yield* uploadEngine.upload(\n {\n storageId,\n size: outputBytes.byteLength,\n type: outputType ?? type,\n fileName: outputFileName ?? fileName,\n lastModified: 0,\n metadata: metadataJson,\n flow,\n },\n clientId,\n stream,\n );\n\n // Merge updated metadata with result\n const updatedMetadata = metadata\n ? {\n ...metadata,\n // Update mimeType and related fields if type changed\n ...(outputType && {\n mimeType: outputType,\n type: outputType,\n \"content-type\": outputType,\n }),\n // Update fileName and related fields if fileName changed\n ...(outputFileName && {\n fileName: outputFileName,\n originalName: outputFileName,\n name: outputFileName,\n // Update extension based on new fileName\n extension:\n outputFileName.split(\".\").pop() || metadata.extension,\n }),\n }\n : result.metadata;\n\n return completeNodeExecution(\n updatedMetadata\n ? {\n ...result,\n metadata: updatedMetadata,\n }\n : result,\n );\n });\n },\n });\n });\n}\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Shape definition for the Credential Provider interface.\n * Defines the contract for retrieving credentials for various services.\n */\nexport interface CredentialProviderShape {\n /**\n * Retrieves credentials for a specific service and client.\n *\n * @param params - Parameters for credential retrieval\n * @param params.clientId - Unique identifier for the client, or null if not available\n * @param params.serviceType - Optional service type to get specific credentials for\n * @returns An Effect that resolves to a record of credential key-value pairs\n * @throws {UploadistaError} When credential retrieval fails\n */\n getCredential: (params: {\n clientId: string | null;\n serviceType?: string;\n }) => Effect.Effect<Record<string, unknown>, UploadistaError>;\n}\n\n/**\n * Context tag for the Credential Provider.\n *\n * This tag provides a type-safe way to access credential functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { CredentialProvider } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const credentialProvider = yield* CredentialProvider;\n * const credentials = yield* credentialProvider.getCredential({\n * clientId: \"user123\",\n * serviceType: \"replicate\"\n * });\n * return credentials;\n * });\n * ```\n */\nexport class CredentialProvider extends Context.Tag(\"CredentialProvider\")<\n CredentialProvider,\n CredentialProviderShape\n>() {}\n\nexport type CredentialProviderLayer = Layer.Layer<\n CredentialProvider,\n never,\n never\n>;\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Context information for AI document processing operations.\n * Contains client identification and credentials for tracking and billing purposes.\n */\nexport type DocumentAiContext = {\n /** Unique identifier for the client making the request, or null if not available */\n clientId: string | null;\n /** Credential ID for accessing the AI service (e.g., Replicate API key) */\n credentialId?: string;\n};\n\n/**\n * Task types supported by OCR operations.\n */\nexport type OcrTaskType =\n | \"convertToMarkdown\"\n | \"freeOcr\"\n | \"parseFigure\"\n | \"locateObject\";\n\n/**\n * Resolution options for OCR processing.\n * Higher resolutions provide better accuracy but slower processing.\n */\nexport type OcrResolution = \"tiny\" | \"small\" | \"base\" | \"gundam\" | \"large\";\n\n/**\n * Parameters for OCR operations.\n */\nexport type OcrParams = {\n /**\n * Type of OCR task to perform.\n * - \"convertToMarkdown\": Convert document to structured Markdown\n * - \"freeOcr\": Extract all visible text without structure\n * - \"parseFigure\": Analyze charts and diagrams\n * - \"locateObject\": Find specific content using reference text\n */\n taskType: OcrTaskType;\n /**\n * Resolution size for processing.\n * Affects speed/accuracy tradeoff.\n * Default: \"gundam\" (recommended)\n */\n resolution?: OcrResolution;\n /**\n * Reference text for object location tasks.\n * Only used when taskType is \"locateObject\".\n */\n referenceText?: string;\n};\n\n/**\n * Result of an OCR operation.\n */\nexport type OcrResult = {\n /**\n * The extracted text content.\n */\n extractedText: string;\n /**\n * Format of the extracted text.\n * - \"markdown\": Structured markdown format\n * - \"plain\": Unstructured plain text\n * - \"structured\": Structured analysis (for figures)\n */\n format: \"markdown\" | \"plain\" | \"structured\";\n /**\n * Confidence score (0-1) if provided by the service.\n */\n confidence?: number;\n};\n\n/**\n * Shape definition for the Document AI Plugin interface.\n * Defines the contract that all document AI implementations must follow.\n */\nexport type DocumentAiPluginShape = {\n /**\n * Performs OCR on a document image or scanned PDF using AI.\n *\n * @param inputUrl - The URL of the input document/image to process\n * @param params - OCR parameters including task type and resolution\n * @param context - Context information including client ID for tracking\n * @returns An Effect that resolves to OcrResult with extracted text\n * @throws {UploadistaError} When OCR operation fails\n */\n performOCR: (\n inputUrl: string,\n params: OcrParams,\n context: DocumentAiContext,\n ) => Effect.Effect<OcrResult, UploadistaError>;\n};\n\n/**\n * Context tag for the Document AI Plugin.\n *\n * This tag provides a type-safe way to access document AI functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { DocumentAiPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const documentAi = yield* DocumentAiPlugin;\n * const result = yield* documentAi.performOCR(\n * documentUrl,\n * { taskType: \"convertToMarkdown\", resolution: \"gundam\" },\n * { clientId: \"user123\" }\n * );\n * return result.extractedText;\n * });\n * ```\n */\nexport class DocumentAiPlugin extends Context.Tag(\"DocumentAiPlugin\")<\n DocumentAiPlugin,\n DocumentAiPluginShape\n>() {}\n\nexport type DocumentAiPluginLayer = Layer.Layer<DocumentAiPlugin, never, never>;\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Parameters for splitting a PDF document.\n */\nexport type SplitPdfParams = {\n /**\n * Mode of split operation.\n * - \"range\": Extract a contiguous range of pages\n * - \"individual\": Split into individual single-page PDFs\n */\n mode: \"range\" | \"individual\";\n /**\n * Starting page number (1-indexed).\n * Only used in \"range\" mode.\n */\n startPage?: number;\n /**\n * Ending page number (1-indexed, inclusive).\n * Only used in \"range\" mode.\n */\n endPage?: number;\n};\n\n/**\n * Result of a split PDF operation.\n * In \"range\" mode, returns a single PDF.\n * In \"individual\" mode, returns an array of single-page PDFs.\n */\nexport type SplitPdfResult =\n | { mode: \"range\"; pdf: Uint8Array }\n | { mode: \"individual\"; pdfs: Uint8Array[] };\n\n/**\n * Parameters for merging multiple PDF documents.\n */\nexport type MergePdfParams = {\n /**\n * Array of PDF documents to merge (in order).\n */\n pdfs: Uint8Array[];\n};\n\n/**\n * Metadata extracted from a PDF document.\n */\nexport type DocumentMetadata = {\n /**\n * Total number of pages in the document.\n */\n pageCount: number;\n /**\n * Document format (e.g., \"pdf\").\n */\n format: string;\n /**\n * Author of the document (if available).\n */\n author: string | null;\n /**\n * Title of the document (if available).\n */\n title: string | null;\n /**\n * Subject of the document (if available).\n */\n subject: string | null;\n /**\n * Creator application (if available).\n */\n creator: string | null;\n /**\n * Creation date in ISO 8601 format (if available).\n */\n creationDate: string | null;\n /**\n * Last modification date in ISO 8601 format (if available).\n */\n modifiedDate: string | null;\n /**\n * File size in bytes.\n */\n fileSize: number;\n};\n\n/**\n * Shape definition for the Document Plugin interface.\n * Defines the contract that all document processing implementations must follow.\n */\nexport type DocumentPluginShape = {\n /**\n * Extracts plain text from a searchable PDF document.\n *\n * @param input - The input PDF as a Uint8Array\n * @returns An Effect that resolves to the extracted text as a string\n * @throws {UploadistaError} When text extraction fails (e.g., PDF_ENCRYPTED, PDF_CORRUPTED)\n */\n extractText: (input: Uint8Array) => Effect.Effect<string, UploadistaError>;\n\n /**\n * Splits a PDF document by page range or into individual pages.\n *\n * @param input - The input PDF as a Uint8Array\n * @param options - Split parameters including mode and page range\n * @returns An Effect that resolves to either a single PDF or array of PDFs\n * @throws {UploadistaError} When splitting fails (e.g., PAGE_RANGE_INVALID)\n */\n splitPdf: (\n input: Uint8Array,\n options: SplitPdfParams,\n ) => Effect.Effect<SplitPdfResult, UploadistaError>;\n\n /**\n * Merges multiple PDF documents into a single document.\n *\n * @param options - Merge parameters including array of PDFs to merge\n * @returns An Effect that resolves to the merged PDF as a Uint8Array\n * @throws {UploadistaError} When merging fails\n */\n mergePdfs: (\n options: MergePdfParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Extracts metadata from a PDF document.\n *\n * @param input - The input PDF as a Uint8Array\n * @returns An Effect that resolves to DocumentMetadata with comprehensive document information\n * @throws {UploadistaError} When metadata extraction fails\n */\n getMetadata: (\n input: Uint8Array,\n ) => Effect.Effect<DocumentMetadata, UploadistaError>;\n};\n\n/**\n * Context tag for the Document Plugin.\n *\n * This tag provides a type-safe way to access document processing functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { DocumentPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const documentPlugin = yield* DocumentPlugin;\n * const text = yield* documentPlugin.extractText(pdfData);\n * const metadata = yield* documentPlugin.getMetadata(pdfData);\n * return { text, metadata };\n * });\n * ```\n */\nexport class DocumentPlugin extends Context.Tag(\"DocumentPlugin\")<\n DocumentPlugin,\n DocumentPluginShape\n>() {}\n\nexport type DocumentPluginLayer = Layer.Layer<DocumentPlugin, never, never>;\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Context information for AI image processing operations.\n * Contains client identification for tracking and billing purposes.\n */\nexport type ImageAiContext = {\n /** Unique identifier for the client making the request, or null if not available */\n clientId: string | null;\n};\n\n/**\n * Shape definition for the Image AI Plugin interface.\n * Defines the contract that all image AI implementations must follow.\n */\nexport type ImageAiPluginShape = {\n /**\n * Removes the background from an image using AI processing.\n *\n * @param inputUrl - The URL of the input image to process\n * @param context - Context information including client ID for tracking\n * @returns An Effect that resolves to an object containing the output image URL\n * @throws {UploadistaError} When the background removal fails\n */\n removeBackground: (\n inputUrl: string,\n context: ImageAiContext,\n ) => Effect.Effect<{ outputUrl: string }, UploadistaError>;\n\n /**\n * Generates a textual description of an image using AI analysis.\n *\n * @param inputUrl - The URL of the input image to analyze\n * @param context - Context information including client ID for tracking\n * @returns An Effect that resolves to an object containing the image description\n * @throws {UploadistaError} When the image analysis fails\n */\n describeImage: (\n inputUrl: string,\n context: ImageAiContext,\n ) => Effect.Effect<{ description: string }, UploadistaError>;\n};\n\n/**\n * Context tag for the Image AI Plugin.\n *\n * This tag provides a type-safe way to access image AI functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { ImageAiPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const imageAi = yield* ImageAiPlugin;\n * const result = yield* imageAi.removeBackground(imageUrl, { clientId: \"user123\" });\n * return result.outputUrl;\n * });\n * ```\n */\nexport class ImageAiPlugin extends Context.Tag(\"ImageAiPlugin\")<\n ImageAiPlugin,\n ImageAiPluginShape\n>() {}\n\nexport type ImageAiPluginLayer = Layer.Layer<ImageAiPlugin, never, never>;\n","import { Context, type Effect, type Layer, type Stream } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\nimport type { OptimizeParams } from \"./types/optimize-node\";\nimport type { ResizeParams } from \"./types/resize-node\";\nimport type { Transformation } from \"./types/transform-image-node\";\n\n/**\n * Shape definition for the Image Plugin interface.\n * Defines the contract that all image processing implementations must follow.\n */\nexport type ImagePluginShape = {\n /**\n * Optimizes an image by adjusting quality and format.\n *\n * @param input - The input image as a Uint8Array\n * @param options - Optimization parameters including quality and format\n * @returns An Effect that resolves to the optimized image as a Uint8Array\n * @throws {UploadistaError} When image optimization fails\n */\n optimize: (\n input: Uint8Array,\n options: OptimizeParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Resizes an image to specified dimensions.\n *\n * @param input - The input image as a Uint8Array\n * @param options - Resize parameters including width, height, and fit mode\n * @returns An Effect that resolves to the resized image as a Uint8Array\n * @throws {UploadistaError} When image resizing fails\n */\n resize: (\n input: Uint8Array,\n options: ResizeParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Applies a single transformation to an image.\n *\n * This method is used by the transform image node to apply individual transformations\n * in a chain. Each transformation receives the output of the previous transformation.\n *\n * @param input - The input image as a Uint8Array\n * @param transformation - The transformation to apply (discriminated union)\n * @returns An Effect that resolves to the transformed image as a Uint8Array\n * @throws {UploadistaError} When transformation fails or is unsupported by the plugin\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const imagePlugin = yield* ImagePlugin;\n *\n * // Apply a single transformation\n * const blurred = yield* imagePlugin.transform(imageData, {\n * type: 'blur',\n * sigma: 5.0\n * });\n *\n * // Chain multiple transformations\n * const resized = yield* imagePlugin.transform(blurred, {\n * type: 'resize',\n * width: 800,\n * height: 600,\n * fit: 'cover'\n * });\n *\n * return resized;\n * });\n * ```\n */\n transform: (\n input: Uint8Array,\n transformation: Transformation,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Optimizes an image using streaming for memory-efficient processing of large files.\n *\n * This method processes image data as a stream, which is beneficial for large images\n * where loading the entire file into memory would be problematic.\n *\n * Note: Image processing inherently requires decoding the full image, so memory\n * savings are primarily from avoiding double-buffering. The streaming interface\n * allows better pipeline integration with DataStore streaming reads.\n *\n * @param input - The input image as an Effect Stream of Uint8Array chunks\n * @param options - Optimization parameters including quality and format\n * @returns An Effect that resolves to a Stream of the optimized image bytes\n * @throws {UploadistaError} When image optimization fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const imagePlugin = yield* ImagePlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* imagePlugin.optimizeStream(inputStream, {\n * quality: 80,\n * format: \"webp\"\n * });\n * return outputStream;\n * });\n * ```\n */\n optimizeStream?: (\n input: Stream.Stream<Uint8Array, UploadistaError>,\n options: OptimizeParams,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Resizes an image using streaming for memory-efficient processing of large files.\n *\n * This method processes image data as a stream. Like other image operations,\n * the full image must be decoded before processing, but the streaming interface\n * avoids double-buffering when combined with streaming DataStore reads and writes.\n *\n * @param input - The input image as an Effect Stream of Uint8Array chunks\n * @param options - Resize parameters including width, height, and fit mode\n * @returns An Effect that resolves to a Stream of the resized image bytes\n * @throws {UploadistaError} When image resizing fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const imagePlugin = yield* ImagePlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* imagePlugin.resizeStream(inputStream, {\n * width: 800,\n * height: 600,\n * fit: \"cover\"\n * });\n * return outputStream;\n * });\n * ```\n */\n resizeStream?: (\n input: Stream.Stream<Uint8Array, UploadistaError>,\n options: ResizeParams,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Applies a single transformation using streaming for memory-efficient processing.\n *\n * This method processes image data as a stream. The streaming interface\n * allows better pipeline integration with DataStore streaming reads and writes,\n * reducing peak memory usage for large files.\n *\n * @param input - The input image as an Effect Stream of Uint8Array chunks\n * @param transformation - The transformation to apply\n * @returns An Effect that resolves to a Stream of the transformed image bytes\n * @throws {UploadistaError} When transformation fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const imagePlugin = yield* ImagePlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* imagePlugin.transformStream(inputStream, {\n * type: 'blur',\n * sigma: 5.0\n * });\n * return outputStream;\n * });\n * ```\n */\n transformStream?: (\n input: Stream.Stream<Uint8Array, UploadistaError>,\n transformation: Transformation,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Indicates whether this plugin supports streaming operations.\n * Returns true if streaming methods (optimizeStream, resizeStream, transformStream) are available.\n */\n supportsStreaming?: boolean;\n};\n\n/**\n * Context tag for the Image Plugin.\n *\n * This tag provides a type-safe way to access image processing functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { ImagePlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const imagePlugin = yield* ImagePlugin;\n * const optimized = yield* imagePlugin.optimize(imageData, { quality: 80, format: \"webp\" });\n * const resized = yield* imagePlugin.resize(optimized, { width: 800, height: 600, fit: \"cover\" });\n * return resized;\n * });\n * ```\n */\nexport class ImagePlugin extends Context.Tag(\"ImagePlugin\")<\n ImagePlugin,\n ImagePluginShape\n>() {}\n\nexport type ImagePluginLayer = Layer.Layer<ImagePlugin, never, never>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating describe image node parameters.\n * Defines the structure and validation rules for image description requests.\n */\nexport const describeImageParamsSchema = z.object({\n /** Optional service type to use for image description (currently supports \"replicate\") */\n serviceType: z.enum([\"replicate\"]).optional(),\n});\n\n/**\n * Parameters for the describe image node.\n * Controls which AI service to use for generating image descriptions.\n */\nexport type DescribeImageParams = z.infer<typeof describeImageParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for video metadata extracted by the describe operation.\n * Defines the structure and validation rules for video metadata.\n */\nexport const describeVideoMetadataSchema = z.object({\n /** Video duration in seconds */\n duration: z.number().nonnegative(),\n /** Video width in pixels */\n width: z.number().positive(),\n /** Video height in pixels */\n height: z.number().positive(),\n /** Video codec name */\n codec: z.string(),\n /** Container format name */\n format: z.string(),\n /** Video bitrate in bits per second */\n bitrate: z.number().nonnegative(),\n /** Frame rate (fps) */\n frameRate: z.number().positive(),\n /** Aspect ratio as string (e.g., \"16:9\") */\n aspectRatio: z.string(),\n /** Whether video has an audio track */\n hasAudio: z.boolean(),\n /** Audio codec name (if hasAudio is true) */\n audioCodec: z.string().optional(),\n /** Audio bitrate in bits per second (if hasAudio is true) */\n audioBitrate: z.number().nonnegative().optional(),\n /** File size in bytes */\n size: z.number().nonnegative(),\n});\n\n/**\n * Video metadata extracted by the describe operation.\n * Contains comprehensive information about video properties, codecs, and audio.\n */\nexport type DescribeVideoMetadata = z.infer<typeof describeVideoMetadataSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating video frame extraction parameters.\n * Defines the structure and validation rules for extracting a single frame from video.\n */\nexport const extractFrameVideoParamsSchema = z.object({\n /** Timestamp in seconds where to extract the frame */\n timestamp: z.number().nonnegative(),\n /** Output image format */\n format: z.enum([\"png\", \"jpeg\"]).optional(),\n /** JPEG quality 1-100 (only for jpeg format) */\n quality: z.number().min(1).max(100).optional(),\n});\n\n/**\n * Parameters for the video frame extraction node.\n * Controls the timestamp and output format for extracting a single frame from video.\n */\nexport type ExtractFrameVideoParams = z.infer<\n typeof extractFrameVideoParamsSchema\n>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating image optimization parameters.\n * Defines the structure and validation rules for image optimization requests.\n */\nexport const optimizeParamsSchema = z.object({\n /** Image quality as a percentage (0-100) */\n quality: z.number().min(0).max(100),\n /** Output image format */\n format: z.enum([\"jpeg\", \"webp\", \"png\", \"avif\"] as const),\n});\n\n/**\n * Parameters for the image optimization node.\n * Controls quality and format settings for image optimization.\n */\nexport type OptimizeParams = z.infer<typeof optimizeParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating remove background node parameters.\n * Defines the structure and validation rules for background removal requests.\n */\nexport const removeBackgroundParamsSchema = z.object({\n /** Optional service type to use for background removal (currently supports \"replicate\") */\n serviceType: z.enum([\"replicate\"]).optional(),\n});\n\n/**\n * Parameters for the remove background node.\n * Controls which AI service to use for background removal processing.\n */\nexport type RemoveBackgroundParams = z.infer<\n typeof removeBackgroundParamsSchema\n>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating image resize parameters.\n * Defines the structure and validation rules for image resizing requests.\n * Requires at least one dimension (width or height) to be specified.\n */\nexport const resizeParamsSchema = z\n .object({\n /** Target width in pixels (optional) */\n width: z.number().positive().optional(),\n /** Target height in pixels (optional) */\n height: z.number().positive().optional(),\n /** How the image should fit within the specified dimensions */\n fit: z.enum([\"contain\", \"cover\", \"fill\"]),\n })\n .refine(\n (data) => data.width || data.height,\n \"Either width or height must be specified for resize\",\n );\n\n/**\n * Parameters for the image resize node.\n * Controls the target dimensions and fitting behavior for image resizing.\n */\nexport type ResizeParams = z.infer<typeof resizeParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating video resize parameters.\n * Defines the structure and validation rules for video resolution changes.\n * Requires at least one dimension (width or height) to be specified.\n */\nexport const resizeVideoParamsSchema = z\n .object({\n /** Target width in pixels */\n width: z.number().positive().optional(),\n /** Target height in pixels */\n height: z.number().positive().optional(),\n /** Aspect ratio handling mode */\n aspectRatio: z.enum([\"keep\", \"ignore\"]).optional(),\n /** Scaling algorithm quality */\n scaling: z.enum([\"bicubic\", \"bilinear\", \"lanczos\"]).optional(),\n })\n .refine(\n (data) => data.width || data.height,\n \"Either width or height must be specified for video resize\",\n );\n\n/**\n * Parameters for the video resize node.\n * Controls the target dimensions and aspect ratio handling for video resizing.\n */\nexport type ResizeVideoParams = z.infer<typeof resizeVideoParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating video transcode parameters.\n * Defines the structure and validation rules for video format and codec conversion.\n */\nexport const transcodeVideoParamsSchema = z.object({\n /** Output container format */\n format: z.enum([\"mp4\", \"webm\", \"mov\", \"avi\"]),\n /** Video codec (optional, defaults to format's default) */\n codec: z.enum([\"h264\", \"h265\", \"vp9\", \"av1\"]).optional(),\n /** Video bitrate (e.g., \"1000k\", \"2M\") */\n videoBitrate: z.string().optional(),\n /** Audio bitrate (e.g., \"128k\", \"192k\") */\n audioBitrate: z.string().optional(),\n /** Audio codec (optional, defaults to format's default) */\n audioCodec: z.enum([\"aac\", \"mp3\", \"opus\", \"vorbis\"]).optional(),\n});\n\n/**\n * Parameters for the video transcode node.\n * Controls output format, codecs, and quality settings for video transcoding.\n */\nexport type TranscodeVideoParams = z.infer<typeof transcodeVideoParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Type of transformation to apply to an image.\n */\nexport type TransformationType =\n | \"resize\"\n | \"blur\"\n | \"rotate\"\n | \"flip\"\n | \"grayscale\"\n | \"sepia\"\n | \"brightness\"\n | \"contrast\"\n | \"sharpen\"\n | \"watermark\"\n | \"logo\"\n | \"text\";\n\n// ============================================================================\n// Basic Transformations\n// ============================================================================\n\n/**\n * Resize transformation parameters.\n * Resizes the image to the specified dimensions with the given fit mode.\n */\nexport const resizeTransformSchema = z.object({\n type: z.literal(\"resize\"),\n /** Target width in pixels (optional) */\n width: z.number().positive().optional(),\n /** Target height in pixels (optional) */\n height: z.number().positive().optional(),\n /** How the image should fit within the specified dimensions */\n fit: z.enum([\"contain\", \"cover\", \"fill\"]),\n});\n\nexport type ResizeTransform = z.infer<typeof resizeTransformSchema>;\n\n/**\n * Blur transformation parameters.\n * Applies Gaussian blur to the image.\n */\nexport const blurTransformSchema = z.object({\n type: z.literal(\"blur\"),\n /** Blur strength (sigma). Range: 0.3 to 1000 */\n sigma: z.number().min(0.3).max(1000),\n});\n\nexport type BlurTransform = z.infer<typeof blurTransformSchema>;\n\n/**\n * Rotate transformation parameters.\n * Rotates the image by the specified angle.\n */\nexport const rotateTransformSchema = z.object({\n type: z.literal(\"rotate\"),\n /** Rotation angle in degrees. Positive values rotate clockwise. */\n angle: z.number(),\n /** Background color for exposed areas (optional, defaults to transparent) */\n background: z.string().optional(),\n});\n\nexport type RotateTransform = z.infer<typeof rotateTransformSchema>;\n\n/**\n * Flip transformation parameters.\n * Flips the image horizontally or vertically.\n */\nexport const flipTransformSchema = z.object({\n type: z.literal(\"flip\"),\n /** Direction to flip the image */\n direction: z.enum([\"horizontal\", \"vertical\"]),\n});\n\nexport type FlipTransform = z.infer<typeof flipTransformSchema>;\n\n// ============================================================================\n// Filter Transformations\n// ============================================================================\n\n/**\n * Grayscale transformation parameters.\n * Converts the image to grayscale.\n */\nexport const grayscaleTransformSchema = z.object({\n type: z.literal(\"grayscale\"),\n});\n\nexport type GrayscaleTransform = z.infer<typeof grayscaleTransformSchema>;\n\n/**\n * Sepia transformation parameters.\n * Applies a sepia tone effect to the image.\n */\nexport const sepiaTransformSchema = z.object({\n type: z.literal(\"sepia\"),\n});\n\nexport type SepiaTransform = z.infer<typeof sepiaTransformSchema>;\n\n/**\n * Brightness transformation parameters.\n * Adjusts the brightness of the image.\n */\nexport const brightnessTransformSchema = z.object({\n type: z.literal(\"brightness\"),\n /** Brightness adjustment value. Range: -100 to +100. 0 = no change. */\n value: z.number().min(-100).max(100),\n});\n\nexport type BrightnessTransform = z.infer<typeof brightnessTransformSchema>;\n\n/**\n * Contrast transformation parameters.\n * Adjusts the contrast of the image.\n */\nexport const contrastTransformSchema = z.object({\n type: z.literal(\"contrast\"),\n /** Contrast adjustment value. Range: -100 to +100. 0 = no change. */\n value: z.number().min(-100).max(100),\n});\n\nexport type ContrastTransform = z.infer<typeof contrastTransformSchema>;\n\n// ============================================================================\n// Effect Transformations\n// ============================================================================\n\n/**\n * Sharpen transformation parameters.\n * Applies sharpening to the image.\n */\nexport const sharpenTransformSchema = z.object({\n type: z.literal(\"sharpen\"),\n /** Sharpening strength (sigma). Optional, uses default if not specified. */\n sigma: z.number().positive().optional(),\n});\n\nexport type SharpenTransform = z.infer<typeof sharpenTransformSchema>;\n\n// ============================================================================\n// Advanced Transformations\n// ============================================================================\n\n/**\n * Position for overlays (watermarks, logos, text).\n */\nexport type OverlayPosition =\n | \"top-left\"\n | \"top-right\"\n | \"bottom-left\"\n | \"bottom-right\"\n | \"center\";\n\n/**\n * Watermark transformation parameters.\n * Overlays a watermark image on the main image.\n */\nexport const watermarkTransformSchema = z.object({\n type: z.literal(\"watermark\"),\n /** URL to the watermark image file (e.g., https://example.com/watermark.png) */\n imagePath: z.string().min(1).url(),\n /** Position of the watermark on the image */\n position: z.enum([\n \"top-left\",\n \"top-right\",\n \"bottom-left\",\n \"bottom-right\",\n \"center\",\n ]),\n /** Opacity of the watermark. Range: 0 (transparent) to 1 (opaque) */\n opacity: z.number().min(0).max(1),\n /** Horizontal offset in pixels from the position anchor (optional) */\n offsetX: z.number().optional(),\n /** Vertical offset in pixels from the position anchor (optional) */\n offsetY: z.number().optional(),\n});\n\nexport type WatermarkTransform = z.infer<typeof watermarkTransformSchema>;\n\n/**\n * Logo transformation parameters.\n * Overlays a logo image on the main image with scaling.\n */\nexport const logoTransformSchema = z.object({\n type: z.literal(\"logo\"),\n /** URL to the logo image file (e.g., https://example.com/logo.png) */\n imagePath: z.string().min(1).url(),\n /** Position of the logo on the image */\n position: z.enum([\n \"top-left\",\n \"top-right\",\n \"bottom-left\",\n \"bottom-right\",\n \"center\",\n ]),\n /** Scale factor for the logo. Range: 0.1 to 2.0 */\n scale: z.number().min(0.1).max(2.0),\n /** Horizontal offset in pixels from the position anchor (optional) */\n offsetX: z.number().optional(),\n /** Vertical offset in pixels from the position anchor (optional) */\n offsetY: z.number().optional(),\n});\n\nexport type LogoTransform = z.infer<typeof logoTransformSchema>;\n\n/**\n * Text transformation parameters.\n * Overlays text on the image.\n */\nexport const textTransformSchema = z.object({\n type: z.literal(\"text\"),\n /** Text content to overlay */\n text: z.string().min(1),\n /** Position of the text on the image */\n position: z.enum([\n \"top-left\",\n \"top-right\",\n \"bottom-left\",\n \"bottom-right\",\n \"center\",\n ]),\n /** Font size in pixels */\n fontSize: z.number().positive(),\n /** Text color (hex code or named color) */\n color: z.string().min(1),\n /** Font family name (optional) */\n fontFamily: z.string().optional(),\n /** Horizontal offset in pixels from the position anchor (optional) */\n offsetX: z.number().optional(),\n /** Vertical offset in pixels from the position anchor (optional) */\n offsetY: z.number().optional(),\n});\n\nexport type TextTransform = z.infer<typeof textTransformSchema>;\n\n// ============================================================================\n// Discriminated Union\n// ============================================================================\n\n/**\n * Schema for validating any transformation type.\n * This is a discriminated union of all transformation schemas.\n */\nexport const transformationSchema = z.discriminatedUnion(\"type\", [\n resizeTransformSchema,\n blurTransformSchema,\n rotateTransformSchema,\n flipTransformSchema,\n grayscaleTransformSchema,\n sepiaTransformSchema,\n brightnessTransformSchema,\n contrastTransformSchema,\n sharpenTransformSchema,\n watermarkTransformSchema,\n logoTransformSchema,\n textTransformSchema,\n]);\n\n/**\n * A single image transformation operation.\n * This is a discriminated union type that can represent any transformation.\n */\nexport type Transformation = z.infer<typeof transformationSchema>;\n\n// ============================================================================\n// Transform Image Node Parameters\n// ============================================================================\n\n/**\n * Parameters for the transform image node.\n * Contains an ordered array of transformations to apply sequentially.\n */\nexport const transformImageParamsSchema = z.object({\n /** Ordered array of transformations to apply. Applied sequentially. */\n transformations: z.array(transformationSchema).min(1),\n});\n\n/**\n * Parameters for the transform image node.\n */\nexport type TransformImageParams = z.infer<typeof transformImageParamsSchema>;\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating video trim parameters.\n * Defines the structure and validation rules for extracting video segments.\n */\nexport const trimVideoParamsSchema = z\n .object({\n /** Start time in seconds */\n startTime: z.number().nonnegative(),\n /** End time in seconds (optional, if omitted goes to end) */\n endTime: z.number().positive().optional(),\n /** Duration in seconds (alternative to endTime) */\n duration: z.number().positive().optional(),\n })\n .refine(\n (data) => !data.endTime || !data.duration,\n \"Cannot specify both endTime and duration\",\n )\n .refine(\n (data) => !data.endTime || data.endTime > data.startTime,\n \"endTime must be greater than startTime\",\n );\n\n/**\n * Parameters for the video trim node.\n * Controls the time range for extracting video segments.\n */\nexport type TrimVideoParams = z.infer<typeof trimVideoParamsSchema>;\n","import { Context, type Effect, type Layer, type Stream } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\nimport type { DescribeVideoMetadata } from \"./types/describe-video-node\";\nimport type { ExtractFrameVideoParams } from \"./types/extract-frame-video-node\";\nimport type { ResizeVideoParams } from \"./types/resize-video-node\";\nimport type { TranscodeVideoParams } from \"./types/transcode-video-node\";\nimport type { TrimVideoParams } from \"./types/trim-video-node\";\n\n/**\n * Input type for streaming video operations.\n * Accepts either buffered input (Uint8Array) or streaming input (Effect Stream).\n * Streaming input is only supported for specific formats like MPEG-TS.\n */\nexport type VideoStreamInput =\n | Uint8Array\n | Stream.Stream<Uint8Array, UploadistaError>;\n\n/**\n * Options for streaming video operations.\n */\nexport type VideoStreamOptions = {\n /**\n * Hint for input format to help determine if streaming input is possible.\n * MPEG-TS format supports true streaming input; other formats require buffering.\n */\n inputFormat?: string;\n};\n\n/**\n * Shape definition for the Video Plugin interface.\n * Defines the contract that all video processing implementations must follow.\n */\nexport type VideoPluginShape = {\n /**\n * Transcodes a video to a different format/codec.\n *\n * @param input - The input video as a Uint8Array\n * @param options - Transcode parameters including format, codec, and bitrates\n * @returns An Effect that resolves to the transcoded video as a Uint8Array\n * @throws {UploadistaError} When video transcoding fails\n */\n transcode: (\n input: Uint8Array,\n options: TranscodeVideoParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Resizes a video to specified dimensions.\n *\n * @param input - The input video as a Uint8Array\n * @param options - Resize parameters including width, height, and aspect ratio handling\n * @returns An Effect that resolves to the resized video as a Uint8Array\n * @throws {UploadistaError} When video resizing fails\n */\n resize: (\n input: Uint8Array,\n options: ResizeVideoParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Trims a video to extract a segment by time range.\n *\n * @param input - The input video as a Uint8Array\n * @param options - Trim parameters including start time and end time/duration\n * @returns An Effect that resolves to the trimmed video as a Uint8Array\n * @throws {UploadistaError} When video trimming fails\n */\n trim: (\n input: Uint8Array,\n options: TrimVideoParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Extracts a single frame from the video at a specific timestamp.\n *\n * @param input - The input video as a Uint8Array\n * @param options - Frame extraction parameters including timestamp and format\n * @returns An Effect that resolves to the extracted frame as a Uint8Array (image)\n * @throws {UploadistaError} When frame extraction fails\n */\n extractFrame: (\n input: Uint8Array,\n options: ExtractFrameVideoParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n\n /**\n * Extracts metadata from a video file.\n *\n * @param input - The input video as a Uint8Array\n * @returns An Effect that resolves to VideoMetadata with comprehensive video information\n * @throws {UploadistaError} When metadata extraction fails\n */\n describe: (\n input: Uint8Array,\n ) => Effect.Effect<DescribeVideoMetadata, UploadistaError>;\n\n /**\n * Transcodes a video using streaming for memory-efficient processing of large files.\n *\n * This method outputs the transcoded video as a stream, reducing peak memory usage.\n * For input, it accepts either a buffered Uint8Array or a Stream. Streaming input\n * is only supported for MPEG-TS format; other formats will be buffered internally.\n *\n * @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)\n * @param options - Transcode parameters including format, codec, and bitrates\n * @param streamOptions - Optional streaming configuration including input format hint\n * @returns An Effect that resolves to a Stream of the transcoded video bytes\n * @throws {UploadistaError} When video transcoding fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const videoPlugin = yield* VideoPlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* videoPlugin.transcodeStream(inputStream, {\n * format: \"mp4\",\n * codec: \"h264\"\n * }, { inputFormat: \"video/mp2t\" });\n * return outputStream;\n * });\n * ```\n */\n transcodeStream?: (\n input: VideoStreamInput,\n options: TranscodeVideoParams,\n streamOptions?: VideoStreamOptions,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Resizes a video using streaming for memory-efficient processing of large files.\n *\n * This method outputs the resized video as a stream, reducing peak memory usage.\n * For input, it accepts either a buffered Uint8Array or a Stream. Streaming input\n * is only supported for MPEG-TS format; other formats will be buffered internally.\n *\n * @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)\n * @param options - Resize parameters including width, height, and aspect ratio\n * @param streamOptions - Optional streaming configuration including input format hint\n * @returns An Effect that resolves to a Stream of the resized video bytes\n * @throws {UploadistaError} When video resizing fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const videoPlugin = yield* VideoPlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* videoPlugin.resizeStream(inputStream, {\n * width: 1280,\n * height: 720,\n * aspectRatio: \"keep\"\n * });\n * return outputStream;\n * });\n * ```\n */\n resizeStream?: (\n input: VideoStreamInput,\n options: ResizeVideoParams,\n streamOptions?: VideoStreamOptions,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Trims a video using streaming for memory-efficient processing of large files.\n *\n * This method outputs the trimmed video as a stream, reducing peak memory usage.\n * For input, it accepts either a buffered Uint8Array or a Stream. Streaming input\n * is only supported for MPEG-TS format; other formats will be buffered internally.\n *\n * @param input - The input video as Uint8Array or Stream (MPEG-TS only for streaming)\n * @param options - Trim parameters including start time and end time/duration\n * @param streamOptions - Optional streaming configuration including input format hint\n * @returns An Effect that resolves to a Stream of the trimmed video bytes\n * @throws {UploadistaError} When video trimming fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const videoPlugin = yield* VideoPlugin;\n * const inputStream = yield* dataStore.readStream(fileId);\n * const outputStream = yield* videoPlugin.trimStream(inputStream, {\n * startTime: 10,\n * endTime: 30\n * });\n * return outputStream;\n * });\n * ```\n */\n trimStream?: (\n input: VideoStreamInput,\n options: TrimVideoParams,\n streamOptions?: VideoStreamOptions,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n\n /**\n * Indicates whether this plugin supports streaming operations.\n * Returns true if streaming methods are available and functional.\n */\n supportsStreaming?: boolean;\n};\n\n/**\n * Context tag for the Video Plugin.\n *\n * This tag provides a type-safe way to access video processing functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { VideoPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const videoPlugin = yield* VideoPlugin;\n * const transcoded = yield* videoPlugin.transcode(videoData, { format: \"webm\", codec: \"vp9\" });\n * const resized = yield* videoPlugin.resize(transcoded, { width: 1280, height: 720, aspectRatio: \"keep\" });\n * return resized;\n * });\n * ```\n */\nexport class VideoPlugin extends Context.Tag(\"VideoPlugin\")<\n VideoPlugin,\n VideoPluginShape\n>() {}\n\nexport type VideoPluginLayer = Layer.Layer<VideoPlugin, never, never>;\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Result of a virus scan operation.\n */\nexport type ScanResult = {\n /**\n * Whether the file is clean (no viruses detected)\n */\n isClean: boolean;\n\n /**\n * Array of detected virus/malware names (empty if clean)\n */\n detectedViruses: string[];\n};\n\n/**\n * Comprehensive metadata about a virus scan operation.\n */\nexport type ScanMetadata = {\n /**\n * Whether the file was scanned\n */\n scanned: boolean;\n\n /**\n * Whether the file is clean (no viruses detected)\n */\n isClean: boolean;\n\n /**\n * Array of detected virus/malware names (empty if clean)\n */\n detectedViruses: string[];\n\n /**\n * ISO 8601 timestamp of when the scan was performed\n */\n scanDate: string;\n\n /**\n * Version of the antivirus engine used\n */\n engineVersion: string;\n\n /**\n * ISO 8601 timestamp of when virus definitions were last updated\n */\n definitionsDate: string;\n};\n\n/**\n * Shape definition for the Virus Scan Plugin interface.\n * Defines the contract that all virus scanning implementations must follow.\n */\nexport type VirusScanPluginShape = {\n /**\n * Scans a file for viruses and malware.\n *\n * @param input - The input file as a Uint8Array\n * @returns An Effect that resolves to ScanResult with detection information\n * @throws {UploadistaError} When virus scanning fails or ClamAV is unavailable\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const virusScanPlugin = yield* VirusScanPlugin;\n * const result = yield* virusScanPlugin.scan(fileData);\n * if (!result.isClean) {\n * console.log('Viruses detected:', result.detectedViruses);\n * }\n * });\n * ```\n */\n scan: (input: Uint8Array) => Effect.Effect<ScanResult, UploadistaError>;\n\n /**\n * Retrieves the version of the antivirus engine.\n *\n * @returns An Effect that resolves to the engine version string\n * @throws {UploadistaError} When version retrieval fails\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const virusScanPlugin = yield* VirusScanPlugin;\n * const version = yield* virusScanPlugin.getVersion();\n * console.log('ClamAV version:', version);\n * });\n * ```\n */\n getVersion: () => Effect.Effect<string, UploadistaError>;\n};\n\n/**\n * Context tag for the Virus Scan Plugin.\n *\n * This tag provides a type-safe way to access virus scanning functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { VirusScanPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const virusScanPlugin = yield* VirusScanPlugin;\n * const result = yield* virusScanPlugin.scan(fileData);\n *\n * if (!result.isClean) {\n * // Handle infected file\n * return Effect.fail(new UploadistaError({\n * code: \"VIRUS_DETECTED\",\n * message: `Viruses detected: ${result.detectedViruses.join(', ')}`\n * }));\n * }\n *\n * return fileData;\n * });\n * ```\n */\nexport class VirusScanPlugin extends Context.Tag(\"VirusScanPlugin\")<\n VirusScanPlugin,\n VirusScanPluginShape\n>() {}\n\nexport type VirusScanPluginLayer = Layer.Layer<VirusScanPlugin, never, never>;\n","import { Context, type Effect, type Layer } from \"effect\";\nimport type { UploadFile } from \"@/types\";\nimport type { UploadistaError } from \"../../errors\";\n\n/**\n * Parameters for creating a ZIP archive.\n */\nexport type ZipParams = {\n /** Name of the ZIP file to create */\n zipName: string;\n /** Whether to include file metadata in the ZIP archive */\n includeMetadata: boolean;\n};\n\n/**\n * Input data structure for ZIP operations.\n * Represents a single file to be included in the ZIP archive.\n */\nexport type ZipInput = {\n /** Unique identifier for the file */\n id: string;\n /** Binary data of the file */\n data: Uint8Array;\n /** File metadata including name, size, type, etc. */\n metadata: UploadFile[\"metadata\"];\n};\n\n/**\n * Shape definition for the ZIP Plugin interface.\n * Defines the contract that all ZIP implementations must follow.\n */\nexport type ZipPluginShape = {\n /**\n * Creates a ZIP archive from multiple input files.\n *\n * @param inputs - Array of files to include in the ZIP archive\n * @param options - Configuration options for the ZIP creation\n * @returns An Effect that resolves to the ZIP file as a Uint8Array\n * @throws {UploadistaError} When ZIP creation fails\n */\n zip: (\n inputs: ZipInput[],\n options: ZipParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n // unzip: (input: ZipInput) => Effect.Effect<Uint8Array, UploadistaError>;\n};\n\n/**\n * Context tag for the ZIP Plugin.\n *\n * This tag provides a type-safe way to access ZIP functionality\n * throughout the application using Effect's dependency injection system.\n *\n * @example\n * ```typescript\n * import { ZipPlugin } from \"@uploadista/core/flow/plugins\";\n *\n * // In your flow node\n * const program = Effect.gen(function* () {\n * const zipPlugin = yield* ZipPlugin;\n * const zipData = yield* zipPlugin.zip(files, { zipName: \"archive.zip\", includeMetadata: true });\n * return zipData;\n * });\n * ```\n */\nexport class ZipPlugin extends Context.Tag(\"ZipPlugin\")<\n ZipPlugin,\n ZipPluginShape\n>() {}\n\nexport type ZipPluginLayer = Layer.Layer<ZipPlugin, never, never>;\n","/**\n * biome-ignore-all lint/suspicious/noExplicitAny: broadly-typed generics require runtime schema placeholders\n */\nimport { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport type { UploadistaError as CoreUploadistaError } from \"../errors\";\nimport { UploadistaError } from \"../errors\";\nimport type { UploadEngine } from \"../upload\";\nimport type { FlowEvent } from \"./event\";\nimport type { Flow, FlowExecutionResult } from \"./flow\";\nimport { createFlowWithSchema } from \"./flow\";\nimport { NodeType } from \"./node\";\nimport type { ExtractEffectRequirements, ResolveEffect } from \"./types\";\nimport type {\n FlowCircuitBreakerConfig,\n FlowEdge,\n FlowNode,\n TypeCompatibilityChecker,\n} from \"./types/flow-types\";\n\n/**\n * Defines a node that can be used in a typed flow.\n *\n * A node definition can be either:\n * - A plain FlowNode object\n * - An Effect that resolves to a FlowNode (for nodes requiring dependencies)\n *\n * @template TNodeError - The error types that the node can produce\n * @template TNodeRequirements - The services/dependencies the node requires\n */\nexport type NodeDefinition<TNodeError = never, TNodeRequirements = never> =\n | FlowNode<any, any, CoreUploadistaError>\n | Effect.Effect<\n FlowNode<any, any, CoreUploadistaError>,\n TNodeError,\n TNodeRequirements\n >;\n\n/**\n * A record mapping node IDs to their definitions.\n *\n * This is the primary type used for defining the nodes in a typed flow,\n * allowing TypeScript to infer input/output schemas and requirements.\n *\n * @example\n * ```typescript\n * const nodes = {\n * input: fileInputNode,\n * resize: Effect.succeed(imageResizeNode),\n * output: s3OutputNode\n * } satisfies NodeDefinitionsRecord;\n * ```\n */\nexport type NodeDefinitionsRecord = Record<string, NodeDefinition<any, any>>;\n\n/**\n * Extracts the error type from a NodeDefinition.\n *\n * If the node is an Effect, extracts its error type.\n * If the node is a plain FlowNode, returns never (no errors).\n */\ntype NodeDefinitionError<T> =\n T extends Effect.Effect<\n FlowNode<any, any, CoreUploadistaError>,\n infer TError,\n any\n >\n ? TError\n : never;\n\n/**\n * Extracts the requirements (dependencies) from a NodeDefinition.\n *\n * Uses the shared ExtractEffectRequirements utility for consistency.\n */\ntype NodeDefinitionRequirements<T> = ExtractEffectRequirements<T>;\n\n/**\n * Extracts all possible errors from all nodes in a flow as a union.\n *\n * This iterates through all nodes in the record and combines their\n * error types into a single union type.\n */\ntype NodesErrorUnion<TNodes extends NodeDefinitionsRecord> = {\n [K in keyof TNodes]: NodeDefinitionError<TNodes[K]>;\n}[keyof TNodes];\n\n/**\n * Extracts all service requirements from all nodes in a flow as a union.\n *\n * This iterates through all nodes in the record and combines their\n * requirement types into a single union type representing all services\n * needed by the flow.\n *\n * @template TNodes - The record of node definitions\n *\n * @example\n * ```typescript\n * const nodes = {\n * resize: imageResizeNode, // requires ImagePlugin\n * zip: zipNode, // requires ZipPlugin\n * };\n * type Requirements = NodesRequirementsUnion<typeof nodes>;\n * // Requirements = ImagePlugin | ZipPlugin\n * ```\n */\ntype NodesRequirementsUnion<TNodes extends NodeDefinitionsRecord> = {\n [K in keyof TNodes]: NodeDefinitionRequirements<TNodes[K]>;\n}[keyof TNodes];\n\n/**\n * Extracts all service requirements from a flow's nodes.\n *\n * This includes all services required by any node in the flow,\n * including UploadEngine (which is provided by the runtime).\n *\n * @template TNodes - The record of node definitions\n *\n * @example\n * ```typescript\n * const myFlow = createFlow({\n * nodes: {\n * input: fileInputNode,\n * process: imageProcessNode, // requires ImagePlugin\n * },\n * edges: [...]\n * });\n * type AllRequirements = FlowRequirements<typeof myFlow.nodes>;\n * // AllRequirements = ImagePlugin | UploadEngine\n * ```\n */\nexport type FlowRequirements<TNodes extends NodeDefinitionsRecord> =\n NodesRequirementsUnion<TNodes>;\n\n/**\n * Extracts plugin service requirements from a flow, excluding UploadEngine.\n *\n * This type is useful for determining which plugin layers need to be\n * provided when creating a server, as UploadEngine is automatically\n * provided by the runtime.\n *\n * @template TNodes - The record of node definitions\n *\n * @example\n * ```typescript\n * const myFlow = createFlow({\n * nodes: {\n * resize: imageResizeNode, // requires ImagePlugin\n * upload: s3OutputNode, // requires UploadEngine\n * },\n * edges: [...]\n * });\n * type PluginRequirements = FlowPluginRequirements<typeof myFlow.nodes>;\n * // PluginRequirements = ImagePlugin (UploadEngine excluded)\n * ```\n */\nexport type FlowPluginRequirements<TNodes extends NodeDefinitionsRecord> =\n Exclude<FlowRequirements<TNodes>, UploadEngine>;\n\n/**\n * Infers the concrete FlowNode type from a NodeDefinition.\n *\n * If the definition is already a FlowNode, returns it as-is.\n * If the definition is an Effect, extracts the FlowNode from the Effect's success type.\n *\n * Uses the shared ResolveEffect utility for consistency.\n */\ntype InferNode<T> =\n T extends FlowNode<any, any, CoreUploadistaError>\n ? T\n : ResolveEffect<T> extends FlowNode<any, any, CoreUploadistaError>\n ? ResolveEffect<T>\n : never;\n\ntype ResolvedNodesRecord<TNodes extends NodeDefinitionsRecord> = {\n [K in keyof TNodes]: InferNode<TNodes[K]>;\n};\n\ntype ExtractKeysByNodeType<\n TNodes extends NodeDefinitionsRecord,\n TType extends NodeType,\n> = {\n [K in keyof TNodes]: InferNode<TNodes[K]>[\"type\"] extends TType ? K : never;\n}[keyof TNodes];\n\ntype SchemaInfer<T> = T extends z.ZodTypeAny ? z.infer<T> : never;\n\nexport type FlowInputMap<TNodes extends NodeDefinitionsRecord> = {\n [K in Extract<\n ExtractKeysByNodeType<TNodes, NodeType.input>,\n string\n >]: SchemaInfer<InferNode<TNodes[K]>[\"inputSchema\"]>;\n};\n\n// Note: With sink-based outputs, any node can be an output if it has no outgoing edges.\n// Output map includes all potential outputs (sinks are determined at runtime by edges).\nexport type FlowOutputMap<TNodes extends NodeDefinitionsRecord> = {\n [K in Extract<keyof TNodes, string>]: SchemaInfer<\n InferNode<TNodes[K]>[\"outputSchema\"]\n >;\n};\n\ntype FlowInputUnion<TNodes extends NodeDefinitionsRecord> = {\n [K in Extract<\n ExtractKeysByNodeType<TNodes, NodeType.input>,\n string\n >]: SchemaInfer<InferNode<TNodes[K]>[\"inputSchema\"]>;\n}[Extract<ExtractKeysByNodeType<TNodes, NodeType.input>, string>];\n\n// With sink-based outputs, any node can be an output\ntype FlowOutputUnion<TNodes extends NodeDefinitionsRecord> = {\n [K in Extract<keyof TNodes, string>]: SchemaInfer<\n InferNode<TNodes[K]>[\"outputSchema\"]\n >;\n}[Extract<keyof TNodes, string>];\n\ntype NodeKey<TNodes extends NodeDefinitionsRecord> = Extract<\n keyof TNodes,\n string\n>;\n\nexport type TypedFlowEdge<TNodes extends NodeDefinitionsRecord> = {\n source: NodeKey<TNodes>;\n target: NodeKey<TNodes>;\n sourcePort?: string;\n targetPort?: string;\n};\n\nexport type TypedFlowConfig<TNodes extends NodeDefinitionsRecord> = {\n flowId: string;\n name: string;\n nodes: TNodes;\n edges: Array<TypedFlowEdge<TNodes>>;\n typeChecker?: TypeCompatibilityChecker;\n onEvent?: (\n event: FlowEvent,\n ) => Effect.Effect<{ eventId: string | null }, CoreUploadistaError>;\n parallelExecution?: {\n enabled?: boolean;\n maxConcurrency?: number;\n };\n inputSchema?: z.ZodTypeAny;\n outputSchema?: z.ZodTypeAny;\n hooks?: {\n /**\n * Called when a sink node (terminal node with no outgoing edges) produces an output.\n * This hook runs after auto-persistence for UploadFile outputs.\n *\n * Use this hook to perform additional post-processing such as:\n * - Saving output metadata to a database\n * - Tracking outputs in external systems\n * - Adding custom metadata to outputs\n * - Triggering downstream workflows\n *\n * **Important**: The hook must not have any service requirements (Effect requirements must be `never`).\n * All necessary services should be captured in the closure when defining the hook.\n *\n * @example\n * ```typescript\n * // Using Promise (simpler for most users)\n * hooks: {\n * onNodeOutput: async ({ output }) => {\n * await db.save(output);\n * return output;\n * }\n * }\n * ```\n */\n onNodeOutput?: <TOutput>(context: {\n output: TOutput;\n nodeId: string;\n flowId: string;\n jobId: string;\n storageId: string;\n clientId: string | null;\n }) => Effect.Effect<TOutput, CoreUploadistaError, never> | Promise<TOutput>;\n };\n /**\n * Circuit breaker configuration for resilience against external service failures.\n *\n * @example\n * ```typescript\n * circuitBreaker: {\n * defaults: { enabled: false },\n * nodeTypeOverrides: {\n * \"Describe Image\": {\n * enabled: true,\n * failureThreshold: 5,\n * resetTimeout: 60000,\n * fallback: { type: \"skip\", passThrough: true }\n * }\n * }\n * }\n * ```\n */\n circuitBreaker?: {\n /** Default circuit breaker config for all nodes */\n defaults?: FlowCircuitBreakerConfig;\n /** Override circuit breaker config per node type (node name) */\n nodeTypeOverrides?: Record<string, FlowCircuitBreakerConfig>;\n };\n};\n\ndeclare const typedFlowInputsSymbol: unique symbol;\ndeclare const typedFlowOutputsSymbol: unique symbol;\ndeclare const typedFlowPluginsSymbol: unique symbol;\n\n/**\n * A type-safe Flow that infers input/output types and requirements from its nodes.\n *\n * TypedFlow extends the base Flow type with additional type information that\n * allows TypeScript to verify inputs, outputs, and plugin requirements at compile time.\n *\n * The phantom type properties (using unique symbols) enable type-level metadata\n * without affecting runtime behavior, allowing other type utilities to extract\n * this information for validation purposes.\n *\n * @template TNodes - Record of node definitions used in the flow\n * @template TInputSchema - Zod schema for flow inputs (inferred from input nodes)\n * @template TOutputSchema - Zod schema for flow outputs (inferred from output nodes)\n *\n * @example\n * ```typescript\n * const myFlow = createFlow({\n * nodes: {\n * input: fileInputNode,\n * resize: imageResizeNode,\n * output: s3OutputNode\n * },\n * edges: [\n * { source: 'input', target: 'resize' },\n * { source: 'resize', target: 'output' }\n * ]\n * });\n *\n * // TypeScript infers:\n * // - Input types from fileInputNode.inputSchema\n * // - Output types from s3OutputNode.outputSchema\n * // - Requirements: ImagePlugin (from resize node)\n * ```\n */\nexport type TypedFlow<\n TNodes extends NodeDefinitionsRecord,\n TInputSchema extends z.ZodTypeAny,\n TOutputSchema extends z.ZodTypeAny,\n> = Flow<TInputSchema, TOutputSchema, FlowRequirements<TNodes>> & {\n run: (args: {\n inputs?: Partial<FlowInputMap<TNodes>>;\n storageId: string;\n jobId: string;\n }) => Effect.Effect<\n FlowExecutionResult<FlowOutputMap<TNodes>>,\n CoreUploadistaError,\n FlowRequirements<TNodes>\n >;\n resume: (args: {\n jobId: string;\n storageId: string;\n nodeResults: Record<string, unknown>;\n executionState: {\n executionOrder: string[];\n currentIndex: number;\n inputs: Partial<FlowInputMap<TNodes>>;\n };\n }) => Effect.Effect<\n FlowExecutionResult<FlowOutputMap<TNodes>>,\n CoreUploadistaError,\n FlowRequirements<TNodes>\n >;\n readonly [typedFlowInputsSymbol]?: FlowInputMap<TNodes>;\n readonly [typedFlowOutputsSymbol]?: FlowOutputMap<TNodes>;\n readonly [typedFlowPluginsSymbol]?: FlowPluginRequirements<TNodes>;\n};\n\nconst buildUnionSchema = (\n schemas: z.ZodTypeAny[],\n fallback: z.ZodTypeAny,\n): z.ZodTypeAny => {\n if (schemas.length === 0) {\n return fallback;\n }\n\n const [first, ...rest] = schemas as [z.ZodTypeAny, ...z.ZodTypeAny[]];\n return rest.reduce<z.ZodTypeAny>(\n (acc, schema) => z.union([acc, schema]),\n first,\n );\n};\n\nexport function createFlow<TNodes extends NodeDefinitionsRecord>(\n config: TypedFlowConfig<TNodes>,\n): Effect.Effect<\n TypedFlow<\n TNodes,\n z.ZodType<FlowInputUnion<TNodes>>,\n z.ZodType<FlowOutputUnion<TNodes>>\n >,\n NodesErrorUnion<TNodes> | UploadistaError,\n FlowRequirements<TNodes>\n> {\n return Effect.gen(function* () {\n const nodeEntries = Object.entries(config.nodes) as Array<\n [NodeKey<TNodes>, NodeDefinition]\n >;\n\n const resolveNode = (\n node: NodeDefinition,\n ): Effect.Effect<\n FlowNode<any, any, CoreUploadistaError>,\n NodesErrorUnion<TNodes>,\n FlowRequirements<TNodes>\n > =>\n Effect.isEffect(node)\n ? (node as Effect.Effect<\n FlowNode<any, any, CoreUploadistaError>,\n NodesErrorUnion<TNodes>,\n FlowRequirements<TNodes>\n >)\n : Effect.succeed(node as FlowNode<any, any, CoreUploadistaError>);\n\n const resolvedEntries = yield* Effect.forEach(nodeEntries, ([key, node]) =>\n Effect.flatMap(resolveNode(node), (resolvedNode) => {\n if (resolvedNode.id !== key) {\n return Effect.fail(\n UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(\n `Node key ${key} does not match node id ${resolvedNode.id}`,\n ),\n }),\n );\n }\n return Effect.succeed([key, resolvedNode] as const);\n }),\n );\n\n const resolvedRecord = Object.fromEntries(\n resolvedEntries,\n ) as ResolvedNodesRecord<TNodes>;\n const resolvedNodes = resolvedEntries.map(([, node]) => node);\n\n const inputSchemas = resolvedEntries\n .filter(([, node]) => node.type === NodeType.input)\n .map(([, node]) => node.inputSchema);\n\n // Build flow edges first (needed for sink detection)\n const flowEdges: FlowEdge[] = config.edges.map((edge) => ({\n source: resolvedRecord[edge.source]?.id ?? edge.source,\n target: resolvedRecord[edge.target]?.id ?? edge.target,\n sourcePort: edge.sourcePort,\n targetPort: edge.targetPort,\n }));\n\n // With sink-based outputs, determine sinks by checking edges\n const sinkNodeIds = new Set(\n resolvedEntries\n .map(([key]) => resolvedRecord[key]?.id)\n .filter(\n (nodeId) =>\n nodeId && !flowEdges.some((edge) => edge.source === nodeId),\n ),\n );\n\n const outputSchemas = resolvedEntries\n .filter(([, node]) => sinkNodeIds.has(node.id))\n .map(([, node]) => node.outputSchema);\n\n const inputSchema =\n config.inputSchema ?? buildUnionSchema(inputSchemas, z.unknown());\n\n const outputSchema =\n config.outputSchema ?? buildUnionSchema(outputSchemas, z.unknown());\n\n const flow = yield* createFlowWithSchema({\n flowId: config.flowId,\n name: config.name,\n nodes: resolvedNodes,\n edges: flowEdges,\n inputSchema,\n outputSchema,\n typeChecker: config.typeChecker,\n onEvent: config.onEvent,\n parallelExecution: config.parallelExecution,\n hooks: config.hooks,\n circuitBreaker: config.circuitBreaker,\n });\n\n return flow as unknown as TypedFlow<\n TNodes,\n z.ZodType<FlowInputUnion<TNodes>>,\n z.ZodType<FlowOutputUnion<TNodes>>\n >;\n });\n}\n","/**\n * Flow execution argument schemas and types.\n *\n * Defines and validates the arguments passed when running a flow,\n * ensuring inputs are properly structured before execution begins.\n *\n * @module flow/types/run-args\n */\n\nimport { z } from \"zod\";\n\n/**\n * Zod schema for validating flow run arguments.\n *\n * @property inputs - Record mapping input node IDs to their input data\n *\n * @example\n * ```typescript\n * const args = {\n * inputs: {\n * \"input-node-1\": { file: myFile, metadata: { ... } },\n * \"input-node-2\": { file: anotherFile }\n * }\n * };\n *\n * // Validate before running\n * const validated = runArgsSchema.parse(args);\n * ```\n */\nexport const runArgsSchema = z.object({\n inputs: z.record(z.string(), z.any()),\n});\n\n/**\n * Type representing validated flow run arguments.\n *\n * This type is inferred from the runArgsSchema and ensures type safety\n * when passing inputs to flow execution.\n */\nexport type RunArgs = z.infer<typeof runArgsSchema>;\n"],"mappings":"4XAyFA,MAAaA,EAE8B,CACzC,QAAS,GACT,iBAAkB,EAClB,aAAc,IACd,iBAAkB,EAClB,eAAgB,IAChB,SAAU,CAAE,KAAM,OAAQ,CAC3B,CCrEK,EAA6B,8BA+BnC,SAAgB,EACd,EACqB,CACrB,IAAM,EAAW,GACf,GAAG,IAA6B,IAE5B,EACJ,GAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,EAAQ,EAAS,CACvB,EAAM,MAAO,EAAU,IAAI,EAAI,CAErC,GAAI,IAAQ,KACV,OAAO,KAGT,GAAI,CAEF,OADc,EAAe,YAAqC,EAAI,MAEhE,CAGN,OADA,MAAO,EAAU,OAAO,EAAI,CACrB,OAET,CAEE,GACJ,EACA,IACyC,CACzC,IAAM,EAAM,EAAQ,EAAS,CACvB,EAAa,EAAe,UAAU,EAAM,CAClD,OAAO,EAAU,IAAI,EAAK,EAAW,EAGvC,MAAO,CACL,SAAU,EAEV,SAAU,EAEV,mBAAoB,EAAkB,IACpC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,KAAK,KAAK,CAClB,EAAQ,MAAO,EAAiB,EAAS,CA6B7C,OA3BI,IAAU,OAEZ,EAAQ,EAAiC,CACvC,iBAAkB,EAClB,aAAc,IACd,iBAAkB,EAClB,iBACD,CAAC,EAIJ,AASE,EATE,EAAM,EAAM,YAAc,EAEpB,CACN,GAAG,EACH,aAAc,EACd,YAAa,EACd,CAGO,CACN,GAAG,EACH,aAAc,EAAM,aAAe,EACpC,CAGH,MAAO,EAAiB,EAAU,EAAM,CACjC,EAAM,cACb,CAEJ,cAAgB,GACd,EAAO,IAAI,WAAa,CACtB,IAAM,EAAQ,MAAO,EAAiB,EAAS,CAC3C,IAAU,OACZ,MAAO,EAAiB,EAAU,CAChC,GAAG,EACH,aAAc,EACd,YAAa,KAAK,KAAK,CACxB,CAAC,GAEJ,CAEJ,2BAA6B,GAC3B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAQ,MAAO,EAAiB,EAAS,CAC/C,GAAI,IAAU,KACZ,MAAO,GAGT,IAAM,EAAW,CACf,GAAG,EACH,kBAAmB,EAAM,kBAAoB,EAC9C,CAED,OADA,MAAO,EAAiB,EAAU,EAAS,CACpC,EAAS,mBAChB,CAEJ,gBACE,EAAO,IAAI,WAAa,CACtB,IAAM,EAAQ,IAAI,IAElB,GAAI,CAAC,EAAU,KAEb,OAAO,EAGT,IAAM,EAAO,MAAO,EAAU,KAAK,EAA2B,CACxD,EAAM,KAAK,KAAK,CAEtB,IAAK,IAAM,KAAO,EAAM,CACtB,IAAM,EAAW,EACX,EAAQ,MAAO,EAAiB,EAAS,CAE/C,GAAI,IAAU,KAAM,CAClB,IAAM,EAA2B,EAAM,EAAM,gBAE7C,EAAM,IAAI,EAAU,CAClB,WACA,MAAO,EAAM,MACb,aAAc,EAAM,aACpB,kBAAmB,EAAM,kBACzB,2BACA,kBACE,EAAM,QAAU,OACZ,KAAK,IACH,EACA,EAAM,OAAO,aAAe,EAC7B,CACD,IAAA,GACP,CAAC,EAIN,OAAO,GACP,CAEJ,OAAS,GAAqB,EAAU,OAAO,EAAQ,EAAS,CAAC,CAClE,CA0BH,SAAgB,GAAqD,CACnE,IAAM,EAAQ,IAAI,IAElB,MAAO,CACL,SAAW,GACT,EAAO,QAAQ,EAAM,IAAI,EAAS,EAAI,KAAK,CAE7C,UAAW,EAAkB,IAC3B,EAAO,SAAW,CAChB,EAAM,IAAI,EAAU,EAAM,EAC1B,CAEJ,mBAAoB,EAAkB,IACpC,EAAO,SAAW,CAChB,IAAM,EAAM,KAAK,KAAK,CAClB,EAAQ,EAAM,IAAI,EAAS,CA0B/B,OAxBI,IAAU,IAAA,KACZ,EAAQ,EAAiC,CACvC,iBAAkB,EAClB,aAAc,IACd,iBAAkB,EAClB,iBACD,CAAC,EAIJ,AAOE,EAPE,EAAM,EAAM,YAAc,EACpB,CACN,GAAG,EACH,aAAc,EACd,YAAa,EACd,CAEO,CACN,GAAG,EACH,aAAc,EAAM,aAAe,EACpC,CAGH,EAAM,IAAI,EAAU,EAAM,CACnB,EAAM,cACb,CAEJ,cAAgB,GACd,EAAO,SAAW,CAChB,IAAM,EAAQ,EAAM,IAAI,EAAS,CAC7B,IAAU,IAAA,IACZ,EAAM,IAAI,EAAU,CAClB,GAAG,EACH,aAAc,EACd,YAAa,KAAK,KAAK,CACxB,CAAC,EAEJ,CAEJ,2BAA6B,GAC3B,EAAO,SAAW,CAChB,IAAM,EAAQ,EAAM,IAAI,EAAS,CACjC,GAAI,IAAU,IAAA,GACZ,MAAO,GAGT,IAAM,EAAW,CACf,GAAG,EACH,kBAAmB,EAAM,kBAAoB,EAC9C,CAED,OADA,EAAM,IAAI,EAAU,EAAS,CACtB,EAAS,mBAChB,CAEJ,gBACE,EAAO,SAAW,CAChB,IAAM,EAAQ,IAAI,IACZ,EAAM,KAAK,KAAK,CAEtB,IAAK,GAAM,CAAC,EAAU,KAAU,EAAO,CACrC,IAAM,EAA2B,EAAM,EAAM,gBAE7C,EAAM,IAAI,EAAU,CAClB,WACA,MAAO,EAAM,MACb,aAAc,EAAM,aACpB,kBAAmB,EAAM,kBACzB,2BACA,kBACE,EAAM,QAAU,OACZ,KAAK,IACH,EACA,EAAM,OAAO,aAAe,EAC7B,CACD,IAAA,GACP,CAAC,CAGJ,OAAO,GACP,CAEJ,OAAS,GACP,EAAO,SAAW,CAChB,EAAM,OAAO,EAAS,EACtB,CACL,CAwBH,MAAa,EAA6B,EAAM,OAC9C,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,EADW,MAAO,EACkB,EAC3C,CACH,CAiBY,EAAiC,EAAM,QAClD,EACA,GAA+B,CAChC,CCvTD,IAAa,EAAb,KAAuC,CACrC,aAEA,SACA,OAGA,MAEA,YACE,EACA,EACA,EACA,CACA,KAAK,SAAW,EAChB,KAAK,OAAS,CACZ,QAAS,EAAO,SAAW,EAA+B,QAC1D,iBACE,EAAO,kBACP,EAA+B,iBACjC,aACE,EAAO,cAAgB,EAA+B,aACxD,iBACE,EAAO,kBACP,EAA+B,iBACjC,eACE,EAAO,gBAAkB,EAA+B,eAC1D,SAAU,EAAO,UAAY,EAA+B,SAC7D,CACD,KAAK,MAAQ,EAMf,gBAAgB,EAA2C,CACzD,KAAK,aAAe,EAStB,cAAmE,CACjE,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,EAAK,OAAO,QACf,MAAO,CAAE,QAAS,GAAM,MAAO,SAAmB,aAAc,EAAG,CAGrE,IAAI,EAAQ,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,CAC/C,EAAM,KAAK,KAAK,CActB,GAXI,IAAU,OACZ,EAAQ,EAAiC,CACvC,iBAAkB,EAAK,OAAO,iBAC9B,aAAc,EAAK,OAAO,aAC1B,iBAAkB,EAAK,OAAO,iBAC9B,eAAgB,EAAK,OAAO,eAC7B,CAAC,CACF,MAAO,EAAK,MAAM,SAAS,EAAK,SAAU,EAAM,EAI9C,EAAM,QAAU,QACI,EAAM,EAAM,iBACb,EAAK,OAAO,aAAc,CAE7C,IAAM,EAAgB,EAAM,MAC5B,EAAQ,CACN,GAAG,EACH,MAAO,YACP,kBAAmB,EACnB,gBAAiB,EAClB,CACD,MAAO,EAAK,MAAM,SAAS,EAAK,SAAU,EAAM,CAChD,MAAO,EAAK,UAAU,EAAe,YAAa,EAAM,aAAa,CAOzE,MAAO,CACL,QAHc,EAAM,QAAU,OAI9B,MAAO,EAAM,MACb,aAAc,EAAM,aACrB,EACD,CAMJ,UAAqE,CACnE,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAE7B,OADc,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,GACzC,OAAS,UACvB,CAMJ,iBAA0D,CACxD,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAE7B,OADc,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,GACzC,cAAgB,GAC9B,CASJ,eAAsD,CACpD,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,EAAK,OAAO,QACf,OAGF,IAAM,EAAQ,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,CACnD,IAAU,OAIV,EAAM,QAAU,aACM,MAAO,EAAK,MAAM,2BACxC,EAAK,SACN,GACsB,EAAK,OAAO,mBAEjC,MAAO,EAAK,aAAa,SAAU,EAAM,aAAa,EAE/C,EAAM,QAAU,WAEzB,MAAO,EAAK,MAAM,cAAc,EAAK,SAAS,IAEhD,CASJ,cAAc,EAA6D,CACzE,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,EAAK,OAAO,QACf,OAGF,IAAM,EAAQ,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,CAEvD,GAAI,IAAU,MAAQ,EAAM,QAAU,SAAU,CAE9C,IAAM,EAAkB,MAAO,EAAK,MAAM,kBACxC,EAAK,SACL,EAAK,OAAO,eACb,CAEG,GAAmB,EAAK,OAAO,mBAEjC,MAAO,EAAK,aAAa,OAAQ,EAAgB,OAE1C,EAAM,QAAU,cAEzB,MAAO,EAAK,aAAa,OAAQ,EAAM,aAAa,GAGtD,CAMJ,aAAsC,CACpC,OAAO,KAAK,OAAO,SAMrB,OAA8C,CAC5C,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAE7B,IAAM,GADQ,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,GAC1B,OAAS,SAEtC,MAAO,EAAK,MAAM,SAChB,EAAK,SACL,EAAiC,CAC/B,iBAAkB,EAAK,OAAO,iBAC9B,aAAc,EAAK,OAAO,aAC1B,iBAAkB,EAAK,OAAO,iBAC9B,eAAgB,EAAK,OAAO,eAC7B,CAAC,CACH,CAEG,IAAkB,WACpB,MAAO,EAAK,UAAU,EAAe,SAAU,EAAE,GAEnD,CAMJ,aACE,EACA,EACsC,CACtC,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAAK,MAAM,SAAS,EAAK,SAAS,CACxD,EAAgB,GAAc,OAAS,SAE7C,GAAI,IAAkB,EACpB,OAGF,IAAM,EAAM,KAAK,KAAK,CAChBC,EAAwC,CAC5C,MAAO,EACP,aAAc,IAAa,SAAW,EAAI,EAC1C,gBAAiB,EACjB,kBAAmB,EACnB,YACE,IAAa,SAAW,EAAO,GAAc,aAAe,EAC9D,OAAQ,CACN,iBAAkB,EAAK,OAAO,iBAC9B,aAAc,EAAK,OAAO,aAC1B,iBAAkB,EAAK,OAAO,iBAC9B,eAAgB,EAAK,OAAO,eAC7B,CACF,CAED,MAAO,EAAK,MAAM,SAAS,EAAK,SAAU,EAAa,CACvD,MAAO,EAAK,UAAU,EAAe,EAAU,EAAa,EAC5D,CAMJ,UACE,EACA,EACA,EACmC,CACnC,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CACzB,EAAK,eACP,MAAO,EAAK,aAAa,CACvB,SAAU,EAAK,SACf,gBACA,WACA,UAAW,KAAK,KAAK,CACrB,eACD,CAAC,GAEJ,GAyBO,GAAb,KAA+C,CAC7C,SAA2D,IAAI,IAC/D,aAEA,YAAY,EAAqC,CAA5B,KAAA,MAAA,EAKrB,gBAAgB,EAA2C,CACzD,KAAK,aAAe,EACpB,IAAK,IAAM,KAAW,KAAK,SAAS,QAAQ,CAC1C,EAAQ,gBAAgB,EAAQ,CAOpC,YACE,EACA,EAC2B,CAC3B,IAAI,EAAU,KAAK,SAAS,IAAI,EAAS,CAQzC,OAPK,IACH,EAAU,IAAI,EAA0B,EAAU,EAAQ,KAAK,MAAM,CACjE,KAAK,cACP,EAAQ,gBAAgB,KAAK,aAAa,CAE5C,KAAK,SAAS,IAAI,EAAU,EAAQ,EAE/B,EAMT,IAAI,EAAyD,CAC3D,OAAO,KAAK,SAAS,IAAI,EAAS,CAMpC,aAGE,CACA,OAAO,KAAK,MAAM,aAAa,CAMjC,UAAiD,CAC/C,IAAM,EAAO,KACb,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAK,IAAM,KAAW,EAAK,SAAS,QAAQ,CAC1C,MAAO,EAAQ,OAAO,EAExB,CAOJ,OAAc,CACZ,KAAK,SAAS,OAAO,GCxYzB,SAAgB,EAAe,CAC7B,SACA,SACA,aACA,cAMW,CACX,MAAO,CACL,SACA,SACA,aACA,aACD,CC5BH,IAAY,EAAA,SAAA,EAAL,OAEL,GAAA,SAAA,YAEA,EAAA,OAAA,UAEA,EAAA,UAAA,aAEA,EAAA,QAAA,WAEA,EAAA,UAAA,aAEA,EAAA,UAAA,aAEA,EAAA,WAAA,cAEA,EAAA,UAAA,aAEA,EAAA,QAAA,WAEA,EAAA,UAAA,aAEA,EAAA,WAAA,cAEA,EAAA,UAAA,aAEA,EAAA,WAAA,cAEA,EAAA,aAAA,gBAEA,EAAA,aAAA,iBAEA,EAAA,cAAA,kBAEA,EAAA,gBAAA,oBAEA,EAAA,eAAA,mBAEA,EAAA,iBAAA,qBAEA,EAAA,gBAAA,2BCsBF,IAAa,EAAb,KAA+B,CAC7B,MAEA,aAAc,CACZ,KAAK,MAAQ,IAAI,IAUnB,SAAY,EAA0C,CACpD,GAAI,KAAK,MAAM,IAAI,EAAW,GAAG,CAC/B,MAAM,EAAgB,SAAS,mBAAoB,CACjD,KAAM,eAAe,EAAW,GAAG,qEACnC,QAAS,CAAE,OAAQ,EAAW,GAAI,CACnC,CAAC,CAGJ,KAAK,MAAM,IAAI,EAAW,GAAI,EAA2C,CAS3E,IAAI,EAAsD,CACxD,OAAO,KAAK,MAAM,IAAI,EAAG,CAQ3B,MAAuC,CACrC,OAAO,MAAM,KAAK,KAAK,MAAM,QAAQ,CAAC,CAWxC,SAAY,EAAgB,EAAyC,CACnE,IAAM,EAAU,KAAK,MAAM,IAAI,EAAO,CAEtC,GAAI,CAAC,EACH,MAAO,CACL,QAAS,GACT,MAAO,EAAgB,SAAS,mBAAoB,CAClD,KAAM,eAAe,EAAO,qBAC5B,QAAS,CAAE,SAAQ,CACpB,CAAC,CACH,CAGH,GAAI,CAEF,MAAO,CAAE,QAAS,GAAM,KADT,EAAQ,OAAO,MAAM,EAAK,CACE,OACpC,EAAO,CACd,MAAO,CACL,QAAS,GACT,MAAO,EAAgB,SAAS,mBAAoB,CAClD,KAAM,0CAA0C,EAAO,GACvD,MAAO,EACP,QAAS,CAAE,SAAQ,iBAAkB,EAAO,CAC7C,CAAC,CACH,EAUL,IAAI,EAAqB,CACvB,OAAO,KAAK,MAAM,IAAI,EAAG,CAQ3B,MAAe,CACb,OAAO,KAAK,MAAM,OA0BtB,MAAa,EAAoB,IAAI,EASrC,SAAgB,GACd,EACA,EAC0B,CAC1B,OAAO,EAAkB,SAAY,EAAQ,EAAK,CCrIpD,IAAa,GAAb,KAAgC,CAC9B,MAEA,aAAc,CACZ,KAAK,MAAQ,IAAI,IAUnB,SAAY,EAA2C,CACrD,GAAI,KAAK,MAAM,IAAI,EAAW,GAAG,CAC/B,MAAM,EAAgB,SAAS,mBAAoB,CACjD,KAAM,gBAAgB,EAAW,GAAG,qEACpC,QAAS,CAAE,OAAQ,EAAW,GAAI,CACnC,CAAC,CAGJ,KAAK,MAAM,IAAI,EAAW,GAAI,EAA4C,CAS5E,IAAI,EAAuD,CACzD,OAAO,KAAK,MAAM,IAAI,EAAG,CAQ3B,MAAwC,CACtC,OAAO,MAAM,KAAK,KAAK,MAAM,QAAQ,CAAC,CAWxC,SAAY,EAAgB,EAA0C,CACpE,IAAM,EAAU,KAAK,MAAM,IAAI,EAAO,CAEtC,GAAI,CAAC,EACH,MAAO,CACL,QAAS,GACT,MAAO,EAAgB,SAAS,mBAAoB,CAClD,KAAM,gBAAgB,EAAO,qBAC7B,QAAS,CAAE,SAAQ,CACpB,CAAC,CACH,CAGH,GAAI,CAEF,MAAO,CAAE,QAAS,GAAM,KADT,EAAQ,OAAO,MAAM,EAAK,CACE,OACpC,EAAO,CACd,MAAO,CACL,QAAS,GACT,MAAO,EAAgB,SAAS,mBAAoB,CAClD,KAAM,2CAA2C,EAAO,GACxD,MAAO,EACP,QAAS,CAAE,SAAQ,iBAAkB,EAAO,CAC7C,CAAC,CACH,EAUL,IAAI,EAAqB,CACvB,OAAO,KAAK,MAAM,IAAI,EAAG,CAQ3B,MAAe,CACb,OAAO,KAAK,MAAM,OA0BtB,MAAa,EAAqB,IAAI,GAStC,SAAgB,GACd,EACA,EAC2B,CAC3B,OAAO,EAAmB,SAAY,EAAQ,EAAK,CCtNrD,IAAY,EAAA,SAAA,EAAL,OAEL,GAAA,MAAA,QAEA,EAAA,QAAA,UAEA,EAAA,YAAA,cAEA,EAAA,UAAA,YAEA,EAAA,MAAA,eAiGF,SAAgB,EAId,CACA,KACA,OACA,cACA,OACA,cACA,eACA,MACA,YACA,aAAa,GACb,cAAc,GACd,WAAW,GACX,QACA,cACA,eACA,aAAa,GACb,iBACA,cA4CA,CACA,OAAO,EAAO,IAAI,WAAa,CAuB7B,OArBI,GAEE,CADiB,EAAkB,IAAI,EAAY,CAE9C,MAAO,EAAgB,SAAS,qBAAsB,CAC3D,KAAM,eAAe,EAAY,0CACjC,QAAS,CAAE,cAAa,OAAQ,EAAI,CACrC,CAAC,CAAC,UAAU,CAKb,GAEE,CADkB,EAAmB,IAAI,EAAa,CAEjD,MAAO,EAAgB,SAAS,sBAAuB,CAC5D,KAAM,gBAAgB,EAAa,2CACnC,QAAS,CAAE,eAAc,OAAQ,EAAI,CACtC,CAAC,CAAC,UAAU,CAIV,CACL,KACA,OACA,cACA,OACA,cACA,eACA,aACA,cACA,eACA,WACA,KAAM,CACJ,OACA,QACA,SACA,YACA,cAQA,EAAO,IAAI,WAAa,CAetB,IAAM,EAAS,MAAO,EAAI,CACxB,KAdoB,MAAO,EAAO,IAAI,CACtC,QAAW,EAAY,MAAM,EAAK,CAClC,MAAQ,GAAU,CAChB,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,OAAO,EAAgB,SAAS,8BAA+B,CAC7D,KAAM,SAAS,EAAK,KAAK,EAAG,6BAA6B,IACzD,MAAO,EACR,CAAC,EAEL,CAAC,CAKA,QACA,YACA,SACA,WACD,CAAC,CA0BF,OAvBI,EAAO,OAAS,UACX,CACL,KAAM,UACN,YAAa,EAAO,YACpB,SAAU,EACV,OAAQ,EACT,CAiBI,CACL,KAAM,WACN,KAfsB,MAAO,EAAO,IAAI,CACxC,QAAW,EAAa,MAAM,EAAO,KAAK,CAC1C,MAAQ,GAAU,CAChB,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,OAAO,EAAgB,SAAS,+BAAgC,CAC9D,KAAM,SAAS,EAAK,KAAK,EAAG,8BAA8B,IAC1D,MAAO,EACR,CAAC,EAEL,CAAC,CAMA,SAAU,EACV,OAAQ,EACT,EACD,CACJ,YACA,aACA,cACA,QACA,iBACA,aACD,EACD,CAaJ,MAAa,GAEX,IAEO,CACL,GAAI,EAAK,GACT,KAAM,EAAK,KACX,YAAa,EAAK,YAClB,KAAM,EAAK,KACX,YAAa,EAAK,YAClB,aAAc,EAAK,aACnB,WAAY,EAAK,WAClB,ECrFU,EAAkC,IAAmB,CAChE,KAAM,WACN,OACD,EAiBY,GAAwB,IAA2B,CAC9D,KAAM,UACN,cACD,ECxPYE,IACX,EACA,IACG,CAEH,GAAI,IAAe,EAAU,MAAO,GAGpC,GAAI,CAYF,MATA,GACE,GACA,GACA,OAAO,GAAe,UACtB,OAAO,GAAa,eAMhB,CAEN,MAAO,KAKX,IAAa,GAAb,KAAkE,CAChE,YAEA,YAAY,EAAwC,GAAoB,CACtE,KAAK,YAAc,EAGrB,mBACE,EACA,EACA,EACS,CAET,OAAO,KAAK,mBACV,EAAW,aACX,EAAW,YACZ,CAGH,mBACE,EACA,EACS,CACT,OAAO,KAAK,YAAY,EAAc,EAAa,CAIrD,aACE,EACA,EAIA,CACA,IAAMC,EAAmB,EAAE,CACrB,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CAE7D,IAAK,IAAM,KAAQ,EAAO,CACxB,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CACrC,EAAa,EAAQ,IAAI,EAAK,OAAO,CAE3C,GAAI,CAAC,EAAY,CACf,EAAO,KAAK,eAAe,EAAK,OAAO,YAAY,CACnD,SAGF,GAAI,CAAC,EAAY,CACf,EAAO,KAAK,eAAe,EAAK,OAAO,YAAY,CACnD,SAGG,KAAK,mBAAmB,EAAY,EAAY,EAAK,EACxD,EAAO,KACL,oBAAoB,EAAW,GAAG,mCAAmC,EAAW,GAAG,eACpF,CAIL,MAAO,CACL,QAAS,EAAO,SAAW,EAC3B,SACD,CAIH,wBACE,EACA,EACA,EACyB,CACzB,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CACvDC,EAA2C,EAAE,CAEnD,IAAK,IAAM,KAAQ,EACjB,GAAI,EAAK,SAAW,EAAQ,CAC1B,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CAC3C,GAAI,EAAY,CACd,IAAM,EAAU,EAAK,YAAc,EAAK,OACxC,EAAgB,GAAW,EAAW,cAK5C,OAAO,EAIT,uBACE,EACA,EACA,EACyB,CACzB,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CACvDC,EAAyC,EAAE,CAEjD,IAAK,IAAM,KAAQ,EACjB,GAAI,EAAK,SAAW,EAAQ,CAC1B,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CAC3C,GAAI,EAAY,CACd,IAAM,EAAU,EAAK,YAAc,EAAK,OACxC,EAAc,GAAW,EAAW,aAK1C,OAAO,EAIT,aACE,EACA,EACwC,CACxC,GAAI,CAEF,OADC,EAA4B,MAAM,EAAK,CACjC,CAAE,QAAS,GAAM,OAAQ,EAAE,CAAE,OAC7B,EAAO,CASd,OARI,aAAiB,OAAS,WAAY,EACjC,CACL,QAAS,GACT,OACE,EACA,OAAO,IAAK,GAAQ,GAAG,EAAI,KAAK,KAAK,IAAI,CAAC,IAAI,EAAI,UAAU,CAC/D,CAEI,CACL,QAAS,GACT,OAAQ,CAAC,aAAiB,MAAQ,EAAM,QAAU,oBAAoB,CACvE,IC5JP,SAAgB,EACd,EACwB,CACxB,GAAI,CAAC,EACH,MAAO,CACL,KAAM,GACN,SAAU,GACV,SAAU,IAAA,GACV,aAAc,IAAA,GACf,CAGH,IAAM,EAAa,CAAE,GAAG,EAAU,CAC5B,EAAO,OACX,EAAW,MAAQ,EAAW,UAAY,EAAW,iBAAmB,GACzE,CACG,IACF,EAAW,OAAS,EACpB,EAAW,WAAa,GAG1B,IAAM,EAAW,OACf,EAAW,UAAY,EAAW,cAAgB,EAAW,MAAQ,GACtE,CAOD,OANI,IACF,EAAW,WAAa,EACxB,EAAW,eAAiB,EAC5B,EAAW,OAAS,GAGf,CACL,OACA,WACA,SAAU,EACV,aAAc,KAAK,UAAU,EAAW,CACzC,CC9BH,MAAM,GAA2B,EAAE,OAAO,CAExC,UAAW,EAAE,QAAQ,OAAO,CAE5B,UAAW,EAAE,QAAQ,CAErB,SAAU,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAE,KAAK,CAAC,CAAC,UAAU,CACnD,CAAC,CAMI,GAA+B,EAAE,OAAO,CAE5C,UAAW,EAAE,QAAQ,WAAW,CAEhC,SAAU,EAAE,QAAQ,CACrB,CAAC,CAMI,GAAiB,EAAE,OAAO,CAE9B,UAAW,EAAE,QAAQ,MAAM,CAE3B,IAAK,EAAE,QAAQ,CAEf,UAAW,EAAE,QAAQ,CAAC,UAAU,CAEhC,SAAU,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAE,KAAK,CAAC,CAAC,UAAU,CACnD,CAAC,CAMW,EAAkB,EAAE,MAAM,CACrC,GACA,GACA,GACD,CAAC,CAYW,GAAwB,EAAE,OAAO,CAE5C,iBAAkB,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU,CAEhD,QAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAEzC,QAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAC1C,CAAC,CAgBF,SAAS,GACP,EACA,EACsC,CACtC,OAAO,EAAO,IAAI,WAAa,CACxB,KAGL,IAAI,EAAO,kBAAoB,EAAO,iBAAiB,OAAS,GAU1D,CATc,EAAO,iBAAiB,KAAM,GAAY,CAE1D,GAAI,EAAQ,SAAS,KAAK,CAAE,CAC1B,IAAM,EAAS,EAAQ,MAAM,EAAG,GAAG,CACnC,OAAO,EAAK,KAAK,WAAW,EAAO,CAErC,OAAO,EAAK,OAAS,GACrB,CAGA,MAAM,MAAO,EAAgB,SAAS,mBAAoB,CACxD,MAAW,MACT,cACE,EAAK,KACN,mCAAmC,EAAO,iBAAiB,KAC1D,KACD,GACF,CACF,CAAC,CAAC,UAAU,CAKjB,GAAI,EAAO,UAAY,IAAA,IAAa,EAAK,KAAO,EAAO,QACrD,MAAM,MAAO,EAAgB,SAAS,mBAAoB,CACxD,MAAW,MACT,cAAc,EAAK,KAAK,4BAA4B,EAAO,QAAQ,SACpE,CACF,CAAC,CAAC,UAAU,CAIf,GAAI,EAAO,UAAY,IAAA,IAAa,EAAK,KAAO,EAAO,QACrD,MAAM,MAAO,EAAgB,SAAS,mBAAoB,CACxD,MAAW,MACT,cAAc,EAAK,KAAK,2BAA2B,EAAO,QAAQ,SACnE,CACF,CAAC,CAAC,UAAU,GAEf,CA2BJ,SAAgB,GACd,EACA,EACA,EACA,CACA,IAAM,EAAa,GAAS,YAAc,GAC1C,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAC5B,OAAO,MAAO,EAAe,CAC3B,KACA,KAAM,QACN,YACE,qGACF,KAAM,EAAS,MACf,WAAY,QACZ,YAAa,EACb,aAAc,EACd,aACA,YAAa,EACb,aAAc,EACd,KAAM,CAAE,OAAM,SAAQ,QAAO,cACpB,EAAO,IAAI,WAAa,CAC7B,OAAQ,EAAK,UAAb,CACE,IAAK,OAAQ,CAEX,IAAMC,EAAuB,CAC3B,UAAW,EAAK,UAChB,KAAM,EAAK,UAAU,MAAQ,EAC7B,KAAM,EAAK,UAAU,UAAY,2BACjC,SAAU,EAAK,UAAU,aACzB,aAAc,EAAK,UAAU,KAAO,KAAK,KAAK,CAAG,IAAA,GACjD,SAAU,EAAK,SACX,KAAK,UAAU,EAAK,SAAS,CAC7B,IAAA,GACJ,KAAM,CACJ,SACA,OAAQ,EACR,QACD,CACF,CASD,OAAO,GAPY,MAAO,EAAa,aACrC,EACA,EACD,CAIsC,CAGzC,IAAK,WAAY,CAEf,IAAM,EAAkB,MAAO,EAAa,UAC1C,EAAK,SACN,CAGK,CAAE,QAAS,EAAsB,EAAgB,SAAS,CAQhE,OAJA,MAAO,GAAa,CAAE,OAAM,KAHf,EAAgB,MAAQ,EAGH,CAAE,EAAO,CAIpC,EAAsB,EAAgB,CAG/C,IAAK,MAAO,CAEV,IAAM,EAAW,MAAO,EAAU,EAAK,IAAI,CACrC,EAAS,MAAO,EAAY,EAAS,CAGrC,EACJ,EAAK,UAAU,UACf,EAAS,QAAQ,IAAI,eAAe,EACpC,2BACI,EACJ,EAAK,UAAU,MACf,OAAO,EAAS,QAAQ,IAAI,iBAAiB,EAAI,EAAE,CAC/C,EACJ,EAAK,UAAU,cACf,EAAK,IAAI,MAAM,IAAI,CAAC,KAAK,EACzB,OAGF,MAAO,GAAa,CAAE,KAAM,EAAU,OAAM,CAAE,EAAO,CAGrD,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,IAAI,WAAW,EAAO,CAAC,CAC1C,EAAW,OAAO,EAErB,CAAC,CAGIA,EAAuB,CAC3B,UAAW,EAAK,WAAa,SAC7B,OACA,KAAM,EACN,WACA,aAAc,KAAK,KAAK,CACxB,SAAU,EAAK,SACX,KAAK,UAAU,EAAK,SAAS,CAC7B,IAAA,GACL,CASD,OAAO,EAAsB,CAC3B,GARiB,MAAO,EAAa,OACrC,EACA,EACA,EACD,CAKC,KAAM,CACJ,SACA,OAAQ,EACR,QACD,CACF,CAAC,CAGJ,QACE,MAAM,MAAO,EAAgB,SAAS,mBAAoB,CACxD,MAAW,MAAM,oBAAoB,CACtC,CAAC,CAAC,UAAU,GAEjB,CAEL,CAAC,EACF,CC1PJ,MAAa,EAAyB,oBACzB,EAAqB,gBACrB,EAAmC,8BACnC,EAA0B,qBAU1B,GAAkB,EAAE,OAAO,CACtC,cAAe,EAAE,QAAQ,CACzB,OAAQ,EAAE,KAAK,CAAC,WAAY,QAAS,aAAa,CAAC,CACnD,SAAU,EAAE,KAAK,CACf,oBACA,UACA,cACA,eACD,CAAC,CACF,WAAY,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,UAAU,CAChD,CAAC,CAWW,GAA+B,EAAE,OAAO,CACnD,YAAa,EAAE,QAAQ,CACvB,WAAY,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC,UAAU,CAC/C,SAAU,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAE,SAAS,CAAC,CAAC,UAAU,CACvD,CAAC,CAeF,EAAkB,SAAS,CACzB,GAAI,EACJ,OAAQ,EACR,QAAS,QACT,YACE,qFACH,CAAC,CAQF,EAAmB,SAAS,CAC1B,GAAI,EACJ,OAAQ,EACR,QAAS,QACT,YACE,qEACH,CAAC,CAQF,EAAmB,SAAS,CAC1B,GAAI,EACJ,OAAQ,GACR,QAAS,QACT,YACE,wEACH,CAAC,CAQF,EAAmB,SAAS,CAC1B,GAAI,EACJ,OAAQ,GACR,QAAS,QACT,YACE,iFACH,CAAC,CCbF,MAAaC,EAAoC,CAC/C,QAAS,GACT,WAAY,EACZ,QAAS,CACP,KAAM,cACN,eAAgB,IAChB,WAAY,IACZ,WAAY,EACZ,OAAQ,GACT,CACD,MAAO,OACR,CAkBD,SAAgB,EACd,EACA,EACQ,CACR,OAAQ,EAAQ,KAAhB,CACE,IAAK,YACH,MAAO,GAET,IAAK,QACH,OAAO,EAAQ,QAEjB,IAAK,cAAe,CAClB,IAAM,EACJ,EAAQ,eAA0B,EAAQ,aAAY,EAClD,EAAc,KAAK,IAAI,EAAW,EAAQ,WAAW,CAE3D,GAAI,EAAQ,OAAQ,CAElB,IAAM,EAAe,GAAM,KAAK,QAAQ,CACxC,OAAO,KAAK,MAAM,EAAc,EAAa,CAG/C,OAAO,EAGT,QACE,MAAO,IAwBb,SAAgB,GACd,EACA,EACS,CAiBT,MAfI,CAAC,EAAO,SAKR,EAAO,oBAAoB,SAAS,EAAU,CACzC,GAIL,EAAO,iBAAmB,EAAO,gBAAgB,OAAS,EACrD,EAAO,gBAAgB,SAAS,EAAU,CAI5C,GAUT,SAAgB,GACd,EACA,EACkB,CACd,SAAU,IAAA,IAAa,GAAS,GAGpC,OAAO,IAAI,KAAK,EAAU,SAAS,CAAG,EAAM,CCtF9C,IAAa,EAAb,MAAa,UAA+B,EAAQ,IAClD,yBACD,EAAuD,AAAC,CAKvD,OAAO,SAAW,EAAO,cAAc,EAAuB,EAQhE,SAAgB,IAId,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAU,MAAO,EAKjB,MAA2B,OAAO,OAAO,YAAY,GAMrD,EAAc,IAA0C,CAC5D,GAAG,EACH,UAAW,IAAI,KAAK,EAAK,UAAU,CACnC,UAAW,IAAI,KAAK,EAAK,UAAU,CACnC,UAAW,EAAK,UAAY,IAAI,KAAK,EAAK,UAAU,CAAG,IAAA,GACvD,YAAa,EAAK,YAAc,IAAI,KAAK,EAAK,YAAY,CAAG,IAAA,GAC7D,aAAc,EAAK,aAAa,IAAK,IAAa,CAChD,GAAG,EACH,YAAa,IAAI,KAAK,EAAQ,YAAY,CAC3C,EAAE,CACJ,EAMK,MACJ,EAAO,IAAI,WAAa,CACtB,GAAI,CAAC,EAAQ,KACX,MAAO,EAAE,CAEX,IAAM,EAAO,MAAO,EAAQ,MAAM,CAC5BC,EAA0B,EAAE,CAClC,IAAK,IAAM,KAAO,EAAM,CACtB,IAAM,EAAO,MAAO,EAAO,SAAS,EAAQ,IAAI,EAAI,KAClD,EAAO,QAAQ,KAA8B,CAC9C,CACG,GACF,EAAM,KAAK,EAAW,EAAK,CAAC,CAGhC,OAAO,GACP,CAEJ,MAAO,CACL,KAAM,EAAK,EAAO,EAAc,IAC9B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAK,GAAY,CACjB,EAAM,IAAI,KAGV,EAAe,CACnB,KAAM,EAAM,MAAQ,gBACpB,QAAS,EAAM,MAAQ,EAAM,SAAW,gBACxC,OAAQ,IAAA,GACR,MAAO,EAAM,MACd,CAGK,EAAa,EAAI,MAAM,KAAM,GAAM,EAAE,SAAW,SAAS,CAC3D,IACF,EAAa,OAAS,EAAW,QAInC,IAAMC,EAAuC,EAAE,CAC/C,IAAK,IAAM,KAAQ,EAAI,MACjB,EAAK,SAAW,IAAA,KAClB,EAAY,EAAK,QAAU,EAAK,QAKpC,IAAM,EAAc,GAAiB,EAAa,KAAM,EAAY,CAGhEC,EACJ,GACE,EAAY,SACZ,GACA,EAAY,WAAa,EACzB,CACA,IAAM,EAAQ,EAAsB,EAAY,QAAS,EAAE,CAC3D,EAAc,IAAI,KAAK,EAAI,SAAS,CAAG,EAAM,CAG/C,IAAMC,EAAuB,CAC3B,KACA,MAAO,EAAI,GACX,OAAQ,EAAI,OACZ,UAAW,EAAI,UACf,SAAU,EAAI,SACd,MAAO,EACP,OAAQ,EAAI,gBAAgB,QAAU,EAAE,CACxC,cACA,eAAgB,EAAa,OAC7B,WAAY,EACZ,WAAY,EAAY,WACxB,cACA,aAAc,EAAE,CAChB,UAAW,EACX,UAAW,EACX,UAAW,GAAwB,EAAK,EAAY,MAAM,CAC1D,OACE,GAAe,EAAY,QAAU,UAAY,YACpD,CAGD,OADA,MAAO,EAAQ,IAAI,EAAI,EAAK,CACrB,GACP,CAEJ,IAAM,GACJ,EAAO,IAAI,WAAa,CAEtB,OAAO,EADM,MAAO,EAAQ,IAAI,EAAO,CAChB,EACvB,CAEJ,UAAY,GACV,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAO,OAAO,EAAQ,IAAI,EAAO,CAAC,CAQxD,OAPI,EAAO,OAAS,OAEd,EAAO,KAAK,OAAS,iBAChB,EAAO,MAAsB,CAE/B,MAAO,EAAO,KAAK,EAAO,KAAK,CAEjC,EAAO,KAAK,EAAW,EAAO,MAAM,CAAC,EAC5C,CAEJ,OAAS,GAAW,EAAQ,OAAO,EAAO,CAE1C,MAAO,EAAU,EAAE,GACjB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,GAAa,CAC/B,CAAE,SAAQ,SAAQ,WAAU,QAAQ,GAAI,SAAS,GAAM,EAGzD,EAAW,EACX,IACF,EAAW,EAAS,OAAQ,GAAS,EAAK,SAAW,EAAO,EAE1D,IACF,EAAW,EAAS,OAAQ,GAAS,EAAK,SAAW,EAAO,EAE1D,IACF,EAAW,EAAS,OAAQ,GAAS,EAAK,WAAa,EAAS,EAIlE,EAAS,MACN,EAAG,IAAM,EAAE,UAAU,SAAS,CAAG,EAAE,UAAU,SAAS,CACxD,CAED,IAAM,EAAQ,EAAS,OAGvB,MAAO,CAAE,MAFK,EAAS,MAAM,EAAQ,EAAS,EAAM,CAEpC,QAAO,EACvB,CAEJ,QAAS,EAAQ,IACf,EAAO,IAAI,WAAa,CAGtB,IAAMC,EAA8B,CAClC,GAFW,EADG,MAAO,EAAQ,IAAI,EAAO,CACV,CAG9B,GAAG,EACH,UAAW,IAAI,KAChB,CAED,OADA,MAAO,EAAQ,IAAI,EAAQ,EAAY,CAChC,GACP,CAEJ,aAAe,GACb,EAAO,IAAI,WAAa,CAGtB,IAAMA,EAA8B,CAClC,GAFW,EADG,MAAO,EAAQ,IAAI,EAAO,CACV,CAG9B,OAAQ,WACR,UAAW,IAAI,KAChB,CAED,OADA,MAAO,EAAQ,IAAI,EAAQ,EAAY,CAChC,GACP,CAEJ,oBAAqB,EAAQ,EAAO,IAClC,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAO,EADG,MAAO,EAAQ,IAAI,EAAO,CACV,CAC1B,EAAM,IAAI,KACV,EAAgB,EAAK,WAAa,EAGlC,EAAe,CACnB,GAAG,EAAK,aACR,CACE,YAAa,EACb,QACA,aACD,CACF,CAGGC,EAA+B,UAC/BH,EAEJ,GAAI,GAAiB,EAAK,WAExB,EAAS,YACT,EAAc,IAAA,OACT,CAEL,IAAM,EAAQ,EACZ,EAAqB,QACrB,EACD,CACD,EAAc,IAAI,KAAK,EAAI,SAAS,CAAG,EAAM,CAG/C,IAAME,EAA8B,CAClC,GAAG,EACH,WAAY,EACZ,eACA,SACA,cACA,UAAW,EACZ,CAGD,OADA,MAAO,EAAQ,IAAI,EAAQ,EAAY,CAChC,GACP,CAEJ,aAAe,GACb,EAAO,IAAI,WAAa,CAGtB,IAAMA,EAA8B,CAClC,GAFW,EADG,MAAO,EAAQ,IAAI,EAAO,CACV,CAG9B,OAAQ,WACR,YAAa,IAAA,GACb,UAAW,IAAI,KAChB,CAED,OADA,MAAO,EAAQ,IAAI,EAAQ,EAAY,CAChC,GACP,CAEJ,qBAAsB,EAAQ,MAC5B,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,GAAa,CAC/B,EAAM,IAAI,KAoBhB,OAfmB,EAChB,OACE,GACC,EAAK,SAAW,WAChB,EAAK,aACL,EAAK,aAAe,EACvB,CACA,MAAM,EAAG,KAEM,EAAE,aAAa,SAAS,EAAI,IAC5B,EAAE,aAAa,SAAS,EAAI,GAE1C,CACD,MAAM,EAAG,EAAM,EAGlB,CAEJ,SAAU,EAAU,EAAE,GACpB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,GAAa,CAC/B,CAAE,YAAW,UAAW,EACxB,EAAM,IAAI,KACZ,EAAU,EAEd,IAAK,IAAM,KAAQ,EAAU,CAC3B,IAAI,EAAe,GAGf,EAAK,WAAa,EAAK,WAAa,IACtC,EAAe,IAIb,GAAa,EAAK,WAAa,IAE7B,EACF,EAAe,EAAK,SAAW,GAE/B,EAAK,SAAW,aAChB,EAAK,SAAW,cAGhB,EAAe,KAIf,IACF,MAAO,EAAO,SAAS,EAAQ,OAAO,EAAK,GAAG,KAC5C,EAAO,QAAQ,IAAA,GAAU,CAC1B,CACD,KAIJ,MAAO,CAAE,UAAS,EAClB,CAEJ,aACE,EAAO,IAAI,WAAa,CACtB,IAAM,EAAW,MAAO,GAAa,CAE/BE,EAAiD,CACrD,QAAS,EACT,SAAU,EACV,UAAW,EACX,SAAU,EACX,CAEKC,EAAiC,EAAE,CACrCC,EACA,EAAkB,EAEtB,IAAK,IAAM,KAAQ,EAEjB,EAAS,EAAK,UAGd,EAAO,EAAK,SAAW,EAAO,EAAK,SAAW,GAAK,GAG/C,CAAC,GAAc,EAAK,UAAY,KAClC,EAAa,EAAK,WAIpB,GAAmB,EAAK,WAG1B,IAAM,EACJ,EAAS,OAAS,EAAI,EAAkB,EAAS,OAAS,EAE5D,MAAO,CACL,WAAY,EAAS,OACrB,WACA,SACA,aACA,oBACD,EACD,CACL,EACD,CAmBJ,MAAa,GAAyB,EAAM,OAC1C,EACA,IAA8B,CAC/B,CC5eD,IAAa,GAAb,KAA+B,CAC7B,eAWA,YAAY,EAAkC,EAAE,CAAE,CAChD,KAAK,eAAiB,EAAO,gBAAkB,EAwBjD,2BACE,EACA,EACkB,CAElB,IAAMC,EAAkC,EAAE,CACpCC,EAAmC,EAAE,CAG3C,EAAM,QAAS,GAAS,CACtB,EAAM,EAAK,IAAM,EAAE,CACnB,EAAS,EAAK,IAAM,GACpB,CAGF,EAAM,QAAS,GAAS,CACtB,EAAM,EAAK,SAAS,KAAK,EAAK,OAAO,CACrC,EAAS,EAAK,SAAW,EAAS,EAAK,SAAW,GAAK,GACvD,CAEF,IAAMC,EAA2B,EAAE,CAC7B,EAAiB,IAAI,IACvB,EAAa,EAGjB,KAAO,EAAe,KAAO,EAAM,QAAQ,CAEzC,IAAM,EAAoB,OAAO,KAAK,EAAS,CAAC,OAC7C,GAAW,EAAS,KAAY,GAAK,CAAC,EAAe,IAAI,EAAO,CAClE,CAED,GAAI,EAAkB,SAAW,EAC/B,MAAU,MACR,4DACD,CAGH,EAAO,KAAK,CACV,MAAO,IACP,MAAO,EACR,CAAC,CAGF,EAAkB,QAAS,GAAW,CACpC,EAAe,IAAI,EAAO,CAC1B,OAAO,EAAS,GAGhB,EAAM,IAAS,QAAS,GAAgB,CAClC,EAAS,KAAiB,IAAA,IAC5B,EAAS,MAEX,EACF,CAGJ,OAAO,EA0BT,uBACE,EAC0B,CAC1B,OAAO,EAAO,IACZ,EAAc,IAAK,GAAa,GAAU,CAAC,CAC3C,CACE,YAAa,KAAK,eACnB,CACF,CAwBH,qBACE,EACA,EACA,EACS,CACT,OAAO,EAAQ,MAAO,IACC,EAAa,IAAW,EAAE,EAC3B,MAAO,GAAU,EAAY,IAAI,EAAM,CAAC,CAC5D,CAcJ,UAAW,CACT,MAAO,CACL,eAAgB,KAAK,eACtB,GClKL,SAAgB,EACd,EACsE,CACtE,MAAQ,IAAqE,CAE3E,GAAI,EAAO,WAAa,EAAQ,MAAO,GAGvC,IAAM,EAAU,EAAmB,IAAI,EAAO,CAI9C,OAHK,EAEU,EAAQ,OAAO,UAAU,EAAO,KAAK,CACtC,QAHO,IA4BzB,SAAgB,GAAa,EAAqC,CAGhE,MAFI,CAAC,GAAS,OAAO,GAAU,SAAiB,GACjC,EAAiB,UAAU,EAAM,CAClC,QAsBhB,MAAa,GAAkB,EAA4B,oBAAoB,CAsBlE,GAAc,EAA2B,EAAmB,CAqB5D,GAA2B,EACtC,EACD,CA8BD,SAAgB,EACd,EACA,EACW,CACX,OAAO,EAAQ,OAAO,EAAU,CAqClC,SAAgB,GACd,EACA,EACyC,CACzC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,EAAoB,EAAS,EAAU,CAoBxD,OAlBI,EAAS,SAAW,EACf,MAAO,EAAgB,SAAS,mBAAoB,CACzD,KAAM,gEACP,CAAC,CAAC,UAAU,CAGX,EAAS,OAAS,EACb,MAAO,EAAgB,SAAS,yBAA0B,CAC/D,KAAM,SAAS,EAAS,OAAO,sDAC/B,QAAS,CACP,WAAY,EAAS,OACrB,QAAS,EAAS,IAAK,GAAM,EAAE,OAAO,CACvC,CACF,CAAC,CAAC,UAAU,CAKR,EAAS,IAChB,CA+BJ,SAAgB,GACd,EACA,EACqB,CAErB,OADiB,EAAoB,EAAS,EAAU,CACxC,GAwBlB,SAAgB,GACd,EACA,EACyB,CACzB,OAAO,EAAQ,KAAM,GAAW,EAAO,SAAW,EAAO,CAyB3D,SAAgB,GACd,EACA,EACS,CACT,OAAO,EAAQ,KAAK,EAAU,CAwBhC,SAAgB,GACd,EACmD,CACnD,OAAO,EAAK,YAAc,OAmB5B,SAAgB,GACd,EACuD,CACvD,OAAO,EAAK,YAAc,WAoB5B,SAAgB,GACd,EACkD,CAClD,OAAO,EAAK,YAAc,MAwB5B,SAAgB,GACd,EAC2D,CAC3D,OAAO,EAAK,YAAc,QAAU,EAAK,YAAc,MCtazD,MAAa,GACX,IAEO,CACL,GAAI,EAAK,GACT,KAAM,EAAK,KACX,MAAO,EAAK,MAAM,IAAI,GAAY,CAClC,MAAO,EAAK,MACb,EAqNH,SAAgB,EAOd,EAUA,CACA,OAAO,EAAO,IAAI,WAAa,CAE7B,IAAMC,EACJ,MAAO,EAAO,IACZ,EAAO,MAAM,IAAK,GAChB,EAAO,SAAS,EAAK,CAChB,EAKD,EAAO,QAAQ,EAA4C,CAChE,CACF,CAEG,CACJ,SACA,OACA,UACA,iBACA,QACA,cACA,eACA,cACA,eAAgB,GACd,EACE,EAAQ,EACR,EAAgB,IAAI,GAAkB,EAAY,CAMlD,EACJ,GACqC,CAErC,IAAM,EAAa,EAAK,eAKlB,EAAqB,EAAK,WAC5B,GAAsB,oBAAoB,EAAK,YAC/C,IAAA,GAGE,EAAe,GAAsB,SAGvC,MAAC,GAAc,CAAC,GAAsB,CAAC,GAK3C,MAAO,CACL,GAAG,EACH,GAAG,EACH,GAAG,EACJ,EAIG,MAAmB,CACvB,IAAMC,EAAkC,EAAE,CACpCC,EAAmC,EAAE,CACrCC,EAAyC,EAAE,CAgBjD,OAbA,EAAM,QAAS,GAAc,CAC3B,EAAM,EAAK,IAAM,EAAE,CACnB,EAAa,EAAK,IAAM,EAAE,CAC1B,EAAS,EAAK,IAAM,GACpB,CAGF,EAAM,QAAS,GAAc,CAC3B,EAAM,EAAK,SAAS,KAAK,EAAK,OAAO,CACrC,EAAa,EAAK,SAAS,KAAK,EAAK,OAAO,CAC5C,EAAS,EAAK,SAAW,EAAS,EAAK,SAAW,GAAK,GACvD,CAEK,CAAE,QAAO,eAAc,WAAU,EAIpC,MAAwB,CAC5B,GAAM,CAAE,QAAO,YAAa,GAAY,CAClCC,EAAkB,EAAE,CACpBC,EAAmB,EAAE,CAS3B,IANA,OAAO,KAAK,EAAS,CAAC,QAAS,GAAW,CACpC,EAAS,KAAY,GACvB,EAAM,KAAK,EAAO,EAEpB,CAEK,EAAM,OAAS,GAAG,CACvB,IAAM,EAAU,EAAM,OAAO,CAC7B,GAAI,CAAC,EACH,MAAU,MAAM,wBAAwB,CAE1C,EAAO,KAAK,EAAQ,CAEpB,EAAM,IAAU,QAAS,GAAkB,CACzC,EAAS,IAAa,EAAS,IAAa,GAAK,EAC7C,EAAS,KAAc,GACzB,EAAM,KAAK,EAAS,EAEtB,CAGJ,OAAO,GAIH,GACJ,EACA,IACkC,CAClC,GAAI,CAAC,EAAK,UAAW,OAAO,EAAO,QAAQ,GAAK,CAEhD,GAAM,CAAE,QAAO,WAAU,SAAU,EAAK,UAClC,EAAa,EAIb,EAHW,GAAY,WAGC,IAAU,IAAa,GAE/C,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,SACH,OAAO,IAAe,EACxB,IAAK,YACH,OAAO,IAAe,EACxB,IAAK,cACH,OAAO,OAAO,EAAW,CAAG,OAAO,EAAM,CAC3C,IAAK,WACH,OAAO,OAAO,EAAW,CAAG,OAAO,EAAM,CAC3C,IAAK,WACH,OAAO,OAAO,EAAW,CAAC,SAAS,OAAO,EAAM,CAAC,CACnD,IAAK,aACH,OAAO,OAAO,EAAW,CAAC,WAAW,OAAO,EAAM,CAAC,CACrD,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EAIzB,GACJ,EACA,IACG,CACH,GAAM,CAAE,gBAAiB,GAAY,CAC/B,EAAgB,EAAa,IAAW,EAAE,CAC1CC,EAAkC,EAAE,CAS1C,OAPA,EAAc,QAAS,GAAsB,CAC3C,IAAM,EAAS,EAAY,IAAI,EAAa,CACxC,IAAW,IAAA,KACb,EAAO,GAAgB,IAEzB,CAEK,GAIH,EACJ,GACG,CACH,IAAM,EAAa,EAAM,OAAQ,GAAc,EAAK,OAAS,QAAQ,CAC/DC,EAA0D,EAAE,CAYlE,OAVA,EAAW,QAAS,GAAc,CAE9B,GACA,OAAO,GAAe,UACtB,EAAK,MAAM,IAEX,EAAa,EAAK,IAAM,EAAY,MAAM,EAAW,EAAK,IAAI,GAEhE,CAEK,GAIH,EAAU,GACP,CAAC,EAAM,KAAM,GAAS,EAAK,SAAW,EAAO,CAKhD,EAA0B,GAA4B,CAC1D,IAAM,EAAO,EAAM,KAAM,GAAW,EAAE,KAAO,EAAO,CACpD,OAAO,EAAO,EAAO,EAAI,GAAM,aAAe,IAI1C,EACJ,GAC8C,CAC9C,IAAM,EAAc,EAAM,OAAQ,GAChC,EAAuB,EAAK,GAAG,CAChC,CACKC,EAAuC,EAAE,CAS/C,OAPA,EAAY,QAAS,GAAc,CACjC,IAAM,EAAS,EAAY,IAAI,EAAK,GAAG,CACnC,IAAW,IAAA,KACb,EAAY,EAAK,IAAM,IAEzB,CAEK,GAIH,GACJ,EACA,IACkB,CAClB,IAAM,EAAc,EAAM,OAAQ,GAChC,EAAuB,EAAK,GAAG,CAChC,CACKC,EAA8B,EAAE,CAkBtC,OAhBA,EAAY,QAAS,GAAc,CACjC,IAAM,EAAS,EAAY,IAAI,EAAK,GAAG,CACvC,GAAI,IAAW,IAAA,GAAW,CAExB,IAAM,EAAe,EAAa,IAAI,EAAK,GAAG,CAG9C,EAAa,KAAK,CAChB,OAAQ,EAAK,GACb,SAAU,EACV,KAAM,EACN,UAAW,IAAI,MAAM,CAAC,aAAa,CACpC,CAAC,GAEJ,CAEK,GAIH,GACJ,EACA,EACA,IAEO,EAAO,IAAI,WAAa,CAE7B,GAAI,EAAK,QAAQ,KAAO,EACtB,OAAO,EAIT,IAAM,EAAa,MAAO,EACpB,EAAkB,MAAO,EAAW,aACxC,EAAK,QAAQ,GACb,EACD,CACK,EAAkB,MAAO,EAAW,aACxC,EACA,EACD,CAGK,EAAW,MAAO,EAAgB,KAAK,EAAK,GAAG,CAG/C,EAAa,EAAO,KAAK,EAAS,CAGlCC,EAA8B,CAClC,GAAG,EACH,QAAS,CACP,GAAI,EACJ,KAAM,EAAK,QAAQ,KACpB,CACF,CAEK,EAAc,MAAO,EAAgB,OAAO,EAAgB,CAYlE,OATA,MAAO,EAAgB,MACrB,CACE,QAAS,EAAY,GACrB,OAAQ,EACR,OAAQ,EACT,CACD,EAAE,CACH,CAEM,GACP,CAIE,GACJ,EACA,EACA,EACA,EACA,EACA,EACA,EACA,IAYO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAO,EAAQ,IAAI,EAAO,CAChC,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAC5B,sBACD,CAAC,UAAU,CAId,GAAI,EAAgB,CAClB,IAAM,EAAS,MAAO,EAAe,EAAM,CAC3C,GAAI,IAAW,SAEb,OAAO,MAAO,EAAgB,SAAS,cAAe,CACpD,MAAO,QAAQ,EAAO,6BAA6B,IACpD,CAAC,CAAC,UAAU,CAEf,GAAI,IAAW,YAEb,OAAO,MAAO,EAAgB,SAAS,iBAAkB,CACvD,MAAO,QAAQ,EAAO,gCAAgC,IACvD,CAAC,CAAC,UAAU,CAKb,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,UACrB,SAAU,EAAK,KACf,SAAU,EAAK,KAChB,CAAC,EAIJ,IAAM,EAAa,EAAK,OAAO,YAAc,EACvC,EAAY,EAAK,OAAO,YAAc,IACtC,EAAwB,EAAK,OAAO,oBAAsB,GAG1D,EAAW,EAA+B,EAAK,CAC/C,EACJ,GAAU,SAAW,EAAK,YAAc,EACpC,EAAuB,YAAY,EAAK,WAAY,EAAS,CAC7D,KAGN,GAAI,EAAgB,CAClB,GAAM,CACJ,UACA,MAAO,EACP,aAAc,GACZ,MAAO,EAAe,cAAc,CAExC,GAAI,CAAC,EAAS,CACZ,IAAM,EAAW,EAAe,aAAa,CAmD7C,OAjDA,MAAO,EAAO,WACZ,uCAAuC,EAAK,WAAW,uBACxD,CAGG,EAAS,OAAS,QAEhB,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KAChB,CAAC,EAMG,CACL,SACA,OAHoB,EAAW,GAI/B,QAAS,GACT,QAAS,GACV,EAGC,EAAS,OAAS,WAEhB,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KACf,OAAQ,EAAS,MAClB,CAAC,EAEG,CACL,SACA,OAAQ,EAAS,MACjB,QAAS,GACT,QAAS,GACV,EAII,MAAO,EAAgB,SAAS,uBAAwB,CAC7D,KAAM,0CAA0C,EAAK,KAAK,GAC1D,QAAS,CACP,SAAU,EAAK,KACf,SACA,MAAO,EACP,aAAc,EACf,CACF,CAAC,CAAC,UAAU,EAIjB,IAAI,EAAa,EACbC,EAAoC,KAGxC,KAAO,GAAc,GACnB,GAAI,CAEF,IAAIC,EACAC,EAAkD,EAAE,CAExD,GAAI,EAAK,OAAS,QAGhB,IADA,EAAY,EAAW,GACnB,IAAc,IAAA,GAIhB,OAHA,MAAO,EAAO,SACZ,cAAc,EAAO,oBACtB,CACM,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,cAAc,EAAO,oBAAoB,CAC3D,CAAC,CAAC,UAAU,KAEV,CAIL,GAFA,EAAyB,EAAc,EAAQ,EAAY,CAEvD,OAAO,KAAK,EAAuB,CAAC,SAAW,EAEjD,OADA,MAAO,EAAO,SAAS,QAAQ,EAAO,oBAAoB,CACnD,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,QAAQ,EAAO,oBAAoB,CACrD,CAAC,CAAC,UAAU,CAIf,GAAK,EAAK,WAUR,EAAY,MAVQ,CACpB,IAAM,EAAgB,OAAO,KAAK,EAAuB,CAAC,GAC1D,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,QAAQ,EAAO,oBAAoB,CACrD,CAAC,CAAC,UAAU,CAEf,EAAY,EAAuB,IAQvC,GAAI,EAAK,OAAS,eAEZ,EADoB,MAAO,EAAkB,EAAM,EAAU,EAY/D,OATI,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KAChB,CAAC,EAEG,CACL,SACA,OAAQ,EACR,QAAS,GACT,QAAS,GACV,CAKL,IAAM,EAAkB,MAAO,EAAK,IAAI,CACtC,KAAM,EACN,OAAQ,EACR,QACA,SACA,YACA,WACD,CAAC,CAGF,GAAI,EAAgB,OAAS,UAAW,CAEtC,IAAMC,EAAS,EAAgB,YAc/B,OAXI,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,UACrB,SAAU,EAAK,KACf,YAAaA,EACd,CAAC,EAGG,CACL,SACA,OAAA,EACA,QAAS,GACT,QAAS,GACT,SAAU,EAAgB,SAC3B,CAIH,IAAI,EAAS,EAAgB,KAG7B,GAAI,EAAuB,EAAO,GAE5B,GAAa,EAAO,EAAI,EAAO,QAAQ,KAAO,IAChD,MAAO,EAAO,SACZ,+BAA+B,EAAO,eAAe,EAAO,QAAQ,GAAG,MAAM,IAC9E,CACD,EAAS,MAAO,EACd,EACA,EACA,EACD,EAIC,EAAO,OAAO,cAAc,CAC9B,MAAO,EAAO,SACZ,2CAA2C,IAC5C,CACD,IAAM,EAAa,EAAO,MAAM,aAAa,CAC3C,OAAQ,EACR,SACA,SACA,QACA,YACA,WACD,CAAC,CAGF,EAAS,MAAQ,EAAO,SAAS,EAAW,CACxC,EACA,EAAO,YAAc,EAA+B,CAqB5D,OAhBI,IACF,MAAO,EAAe,eAAe,EAInC,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KACf,SACD,CAAC,EAGG,CACL,SACA,SACA,QAAS,GACT,QAAS,GACT,SAAU,EAAgB,SAC3B,OACM,EAAO,CAad,GAXA,EACE,aAAiB,EACb,EACA,EAAgB,SAAS,kBAAmB,CAAE,MAAO,EAAO,CAAC,CAG/D,IACF,MAAO,EAAe,cAAc,EAAU,KAAK,EAIjD,EAAa,EAAY,CAC3B,IAGA,IAAM,EAAQ,EACV,EAAY,IAAM,EAAa,GAC/B,EAGJ,MAAO,EAAO,WACZ,QAAQ,EAAO,IAAI,EAAK,KAAK,sBAAsB,EAAW,GAAG,EAAW,UAAU,EAAM,IAC7F,CAGD,MAAO,EAAO,MAAM,EAAM,CAG1B,SAgBF,OAZI,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,UACrB,SAAU,EAAK,KACf,MAAO,EAAU,KACjB,aACD,CAAC,EAGG,MAAO,EAAU,UAAU,CAUtC,OALI,EACK,MAAO,EAAU,UAAU,CAI7B,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,iCAAiC,CACnD,CAAC,CAAC,UAAU,EACb,CAAC,UAIM,CACL,IAAM,EAAO,EAAQ,IAAI,EAAO,CAC1B,EAAW,GAAM,YAAc,GAAM,MAAQ,UACnD,OAAO,EAAO,SAAS,QAAQ,IAAY,CACzC,WAAY,CACV,UAAW,EACX,YAAa,GAAM,MAAQ,UAC3B,eAAgB,GAAM,YAAc,UACpC,YAAa,GAAM,MAAQ,UAC3B,UAAW,EACX,cAAe,EAChB,CACF,CAAC,IACA,CACL,CAIG,GAAe,CACnB,SACA,YACA,QACA,aACA,cA6BO,EAAO,IAAI,WAAa,CAE7B,IAAM,EAAsB,MAAO,EAAO,cACxC,EACD,CACK,EAAyB,EAAoB,OAAS,OACxD,IAAI,GAAkC,EAAoB,MAAM,CAChE,KAGA,CAAC,GAAc,IACjB,MAAO,EAAQ,CACb,QACA,UAAW,EAAU,UACrB,SACD,CAAC,EAIJ,IAAM,EAAa,EAAqB,GAAU,EAAE,CAAC,CAGjDC,EACAC,EACAC,EAEA,GAEF,EAAiB,EAAW,eAC5B,EAAc,EAAW,YACzB,EAAa,EAAW,eAGxB,EAAiB,GAAiB,CAClC,EAAc,IAAI,IAClB,EAAa,GAIf,IAAM,EAAY,IAAI,IAStB,GAAI,EAAe,SAAW,EAAM,OAClC,OAAO,MAAO,EAAgB,SAAS,mBAAmB,CAAC,UAAU,CAIvE,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CAK7D,GAF6B,EAAO,mBAAmB,SAAW,GAExC,CAExB,MAAO,EAAO,SACZ,QAAQ,EAAO,gDAAgD,EAAO,mBAAmB,gBAAkB,EAAE,GAC9G,CAED,IAAM,EAAY,IAAI,GAAkB,CACtC,eAAgB,EAAO,mBAAmB,gBAAkB,EAC7D,CAAC,CAGI,EAAkB,EAAU,2BAChC,EACA,EACD,CAED,MAAO,EAAO,SACZ,QAAQ,EAAO,uBAAuB,EAAgB,OAAO,mBAC9D,CAGD,IAAMd,EAAyC,EAAE,CACjD,EAAM,QAAS,GAAS,CACtB,EAAa,EAAK,IAAM,EAAE,EAC1B,CACF,EAAM,QAAS,GAAS,CACtB,EAAa,EAAK,SAAS,KAAK,EAAK,OAAO,EAC5C,CAGF,IAAK,IAAM,KAAS,EAAiB,CACnC,MAAO,EAAO,SACZ,QAAQ,EAAO,oBAAoB,EAAM,MAAM,eAAe,EAAM,MAAM,KAAK,KAAK,GACrF,CAGD,IAAM,EAAgB,EAAM,MAAM,IAC/B,OACC,EAAO,IAAI,WAAa,CAEtB,GACE,GACA,IAAW,EAAW,eAAe,IACrC,EACA,CACA,IAAM,EAAO,EAAQ,IAAI,EAAO,CAC5B,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,WACrB,SAAU,EAAK,KACf,SAAU,EAAK,KAChB,CAAC,EAeN,MAAO,CAAE,SAAQ,WAXE,MAAO,EACxB,EACA,EACA,EACA,EACA,EACA,EACA,EACA,EACD,CAE4B,EAC7B,CACL,CAGK,EACJ,MAAO,EAAU,uBAAuB,EAAc,CAGxD,IAAK,GAAM,CAAE,SAAQ,gBAAgB,EAAc,CACjD,GAAI,EAAW,QASb,OAPI,EAAW,SAAW,IAAA,KACxB,EAAY,IAAI,EAAQ,EAAW,OAAO,CACtC,EAAW,UACb,EAAU,IAAI,EAAQ,EAAW,SAAS,EAIvC,CACL,KAAM,SACN,SACA,eAAgB,CACd,iBACA,aAAc,EAAe,QAAQ,EAAO,CAC5C,OAAQ,EACT,CACF,CAGC,EAAW,UACb,EAAY,IAAI,EAAQ,EAAW,OAAO,CACtC,EAAW,UACb,EAAU,IAAI,EAAQ,EAAW,SAAS,QAK7C,CAEL,MAAO,EAAO,SACZ,QAAQ,EAAO,gCAChB,CAED,IAAK,IAAI,EAAI,EAAY,EAAI,EAAe,OAAQ,IAAK,CACvD,IAAM,EAAS,EAAe,GAC9B,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAC5B,sBACD,CAAC,UAAU,CAId,GAAI,GAAc,IAAM,GAAc,EAAS,CAC7C,IAAM,EAAO,EAAQ,IAAI,EAAO,CAC5B,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,WACrB,SAAU,EAAK,KACf,SAAU,EAAK,KAChB,CAAC,EAIN,IAAM,EAAa,MAAO,EACxB,EACA,EACA,EACA,EACA,EACA,EACA,EACA,EACD,CAED,GAAI,EAAW,QASb,OAPI,EAAW,SAAW,IAAA,KACxB,EAAY,IAAI,EAAW,OAAQ,EAAW,OAAO,CACjD,EAAW,UACb,EAAU,IAAI,EAAW,OAAQ,EAAW,SAAS,EAIlD,CACL,KAAM,SACN,OAAQ,EAAW,OACnB,eAAgB,CACd,iBACA,aAAc,EACd,OAAQ,EACT,CACF,CAGC,EAAW,UACb,EAAY,IAAI,EAAW,OAAQ,EAAW,OAAO,CACjD,EAAW,UACb,EAAU,IAAI,EAAW,OAAQ,EAAW,SAAS,GAO7D,IAAM,EAAc,EAAmB,EAAY,CAC7C,EAAe,EAAoB,EAAa,EAAU,CAK1D,EAHoB,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAa,CAGtB,UAAU,EAAY,CAC5D,GAAI,CAAC,EAAY,QAAS,CACxB,IAAM,EAAkB,kCAAkC,EAAY,MAAM,QAAQ,sBAAsB,KAAK,UAAU,OAAO,KAAK,EAAmB,EAAY,CAAC,CAAC,CAAC,uCAAuC,EAC3M,OAAQ,GAAW,EAAuB,EAAE,GAAG,CAAC,CAChD,IAAK,GAAW,EAAE,GAAG,CACrB,KAAK,KAAK,GAWb,OARI,IACF,MAAO,EAAQ,CACb,QACA,UAAW,EAAU,UACrB,SACA,MAAO,EACR,CAAC,EAEG,MAAO,EAAgB,SAC5B,+BACA,CACE,KAAM,EACN,MAAO,EAAY,MACpB,CACF,CAAC,UAAU,CAEd,IAAM,EAAkB,EAAY,KAapC,OAVI,IACF,MAAO,EAAQ,CACb,QACA,UAAW,EAAU,QACrB,SACA,QAAS,EACT,OAAQ,EACT,CAAC,EAGG,CACL,KAAM,YACN,OAAQ,EACR,QAAS,EACV,EACD,CA+FJ,MAAO,CACL,GAAI,EACJ,OACA,QACA,QACA,cACA,eACA,UACA,iBACA,MAAO,EAAO,MACd,KAtGW,CACX,SACA,YACA,QACA,cAwBO,EAAY,CAAE,SAAQ,YAAW,QAAO,WAAU,CAAC,CA2E1D,QAxEc,CACd,QACA,YACA,cACA,iBACA,cA6BO,EAAY,CACjB,OAAQ,EAAe,OACvB,YACA,QACA,WAAY,CACV,eAAgB,EAAe,eAC/B,YAAa,IAAI,IAAI,OAAO,QAAQ,EAAY,CAAC,CACjD,aAAc,EAAe,aAC9B,CACD,WACD,CAAC,CA6BF,kBA1B0B,CAE1B,IAAM,EAAkB,EACxB,OAAO,EAAc,aAAa,EAAiB,EAAM,EAwBzD,eArBsB,GACf,EAAc,aAAa,EAAQ,EAAY,CAqBtD,gBAlBuB,GAChB,EAAc,aAAa,EAAS,EAAa,CAkBzD,EACD,CCx2CJ,IAAa,EAAb,MAAa,UAAsB,EAAQ,IAAI,gBAAgB,EAG5D,AAAC,CACF,OAAO,SAAW,EAAO,cAAc,EAAc,EAmE1C,GAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GA4KS,GAAb,cAAgC,EAAQ,IAAI,aAAa,EAGtD,AAAC,GAsBJ,MAAM,GAAsB,GACnB,OAAO,GAAW,YAAY,GAAmB,OAAQ,EAI5D,GAAqB,GAEvB,OAAO,GAAW,UAClB,GACA,WAAY,GACZ,SAAU,GACV,cAAe,EAGP,EAAuB,KAE1B,EAIT,SAAS,GAKP,EACA,EACA,EAC0D,CAE1D,IAAM,EAAyB,GAA2B,CAExD,IAAM,EAAoB,GACxB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC1C,IACF,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,GAAG,EACH,UAAW,IAAI,KAChB,CAAC,GAEJ,CAGJ,MAAQ,IACN,EAAO,IAAI,WAAa,CAmBtB,OAhBI,EAAK,UACP,MAAO,EAAO,SAAS,EAAK,QAAQ,EAAM,CAAG,IAE3C,EAAO,SAAS,0BAA2B,EAAM,CAC1C,EAAO,QAAQ,CAAE,QAAS,KAAM,CAAC,EACxC,EAIJ,MAAO,EAAa,KAAK,EAAgB,EAAM,CAE/C,EAAO,QACL,gBAAgB,EAAe,cAAc,EAAM,YACpD,CAGO,EAAM,UAAd,CACE,KAAK,EAAU,UACb,MAAO,EAAiB,CAAE,OAAQ,UAAW,CAAC,CAC9C,MAEF,KAAK,EAAU,QAEb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC1C,GAAO,EAAM,UACf,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,OAAQ,EAAM,QACd,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,KAAK,EAAU,UACb,MAAO,EAAiB,CACtB,OAAQ,SACR,MAAO,EAAM,MACd,CAAC,CACF,MAEF,KAAK,EAAU,UACb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CAIP,IAAM,EAHe,EAAI,MAAM,KAC5B,GAAM,EAAE,SAAW,EAAM,OAC3B,CAEG,EAAI,MAAM,IAAK,GACb,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,UACR,UAAW,IAAI,KAChB,CACD,EACL,CACD,CACE,GAAG,EAAI,MACP,CACE,OAAQ,EAAM,OACd,OAAQ,UACR,UAAW,IAAI,KACf,UAAW,IAAI,KAChB,CACF,CAEL,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,KAAK,EAAU,UACb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CAIP,IAAM,EAHe,EAAI,MAAM,KAC5B,GAAM,EAAE,SAAW,EAAM,OAC3B,CAEG,EAAI,MAAM,IAAK,GACb,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,SACR,OAAQ,EAAM,YACd,UAAW,IAAI,KAChB,CACD,EACL,CACD,CACE,GAAG,EAAI,MACP,CACE,OAAQ,EAAM,OACd,OAAQ,SACR,OAAQ,EAAM,YACd,UAAW,IAAI,KACf,UAAW,IAAI,KAChB,CACF,CAEL,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,KAAK,EAAU,WACb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CACP,IAAM,EAAe,EAAI,MAAM,IAAK,GAClC,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,UACR,UAAW,IAAI,KAChB,CACD,EACL,CAED,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,KAAK,EAAU,QACb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CACP,IAAM,EAAe,EAAI,MAAM,IAAK,GAClC,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,YACR,OAAQ,EAAM,OACd,UAAW,IAAI,KAChB,CACD,EACL,CAKK,EAAa,CAAC,EAAK,MAAM,KAC5B,GAAS,EAAK,SAAW,EAAM,OACjC,CAGK,EADO,EAAK,MAAM,KAAM,GAAW,EAAE,KAAO,EAAM,OAAO,EACnC,aAAe,GAErC,EAAS,EAAM,OAEf,EAAa,GAAkB,EAAO,CAExC,EAAoB,EAAI,mBAAqB,EAAE,CAG7C,EAAuB,GAAc,EAGzC,GACA,GAAmB,EAAW,EAC9B,EAAW,IAIX,EAAoB,EAAkB,OACnC,GAAW,IAAW,EAAW,GACnC,CAGG,GAAiB,CAAC,GACpB,EAAO,QACL,+BAA+B,EAAM,OAAO,yBAC7C,EAGH,CAAC,GACD,GAAmB,EAAW,EAC9B,EAAW,KAGN,EAAkB,SAAS,EAAW,GAAG,EAC5C,EAAkB,KAAK,EAAW,GAAG,EAIzC,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,oBACA,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,KAAK,EAAU,UACb,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CACP,IAAM,EAAe,EAAI,MAAM,IAAK,GAClC,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,SACR,MAAO,EAAM,MACb,WAAY,EAAM,WAClB,UAAW,IAAI,KAChB,CACD,EACL,CAED,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,MAAO,EAAM,MACb,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAGJ,MAAO,CAAE,QAAS,EAAgB,EAClC,EAIA,EAAgC,GAC5B,GACN,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAWrC,OAVK,EAQD,EAAI,SAAW,SAAiB,SAChC,EAAI,SAAW,YAAoB,YAChC,UATE,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,EAMH,CAGN,MAAO,CACL,GAAG,EACH,IAAM,GAMG,EAAO,IAAI,WAAa,CAE7B,IAAM,EAAiB,EAAK,OAAS,OAAO,YAAY,CAElD,EAAkB,EAAsB,EAAe,CACvD,EACJ,EAA6B,EAAe,CAsB9C,OAPe,OAZQ,MAAO,EAAqB,CACjD,OAAQ,EAAK,GACb,KAAM,EAAK,KACX,MAAO,EAAK,MACZ,MAAO,EAAK,MACZ,YAAa,EAAK,YAClB,aAAc,EAAK,aACnB,QAAS,EACT,eAAgB,EACjB,CAAC,EAGmC,IAAI,CACvC,GAAG,EACH,MAAO,EACP,SAAU,EAAK,SAChB,CAAC,EAIF,CAEJ,OAAS,GAWA,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAiB,EAAK,MAEtB,EAAkB,EAAsB,EAAe,CACvD,EACJ,EAA6B,EAAe,CAkB9C,OAHe,OAZQ,MAAO,EAAqB,CACjD,OAAQ,EAAK,GACb,KAAM,EAAK,KACX,MAAO,EAAK,MACZ,MAAO,EAAK,MACZ,YAAa,EAAK,YAClB,aAAc,EAAK,aACnB,QAAS,EACT,eAAgB,EACjB,CAAC,EAGmC,OAAO,EAAK,EAIjD,CAEL,CAIH,SAAgB,IAAmB,CACjC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,GACtB,EAAe,MAAO,EACtB,EAAU,MAAO,EACjB,EAAe,MAAO,EACtB,EAAY,MAAO,EAAuB,SAE1C,GAAa,EAAe,IAChC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAQrC,OAPK,EAOE,MAAO,EAAQ,IAAI,EAAO,CAAE,GAAG,EAAK,GAAG,EAAS,CAAC,CAN/C,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,EAGH,CAGE,GAA4B,EAAe,IAC/C,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAEnC,CAAC,GACD,CAAC,EAAI,mBACL,EAAI,kBAAkB,SAAW,IAKnC,MAAO,EAAO,QACZ,eAAe,EAAI,kBAAkB,OAAO,8BAA8B,IAC3E,CAGD,MAAO,EAAO,IACZ,EAAI,kBAAkB,IAAK,GACzB,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,OAAO,EAAQ,EAAS,CAC5C,MAAO,EAAO,SAAS,6BAA6B,IAAS,EAC7D,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAItB,OAHA,MAAO,EAAO,WACZ,sCAAsC,EAAO,IAAI,IAClD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,CACF,CACD,CAAE,YAAa,EAAG,CACnB,CAGD,MAAO,EAAU,EAAO,CACtB,kBAAmB,EAAE,CACtB,CAAC,GACF,CAGE,GAAwB,EAAe,IAC3C,EAAO,IAAI,WAAa,CACtB,GAAI,EAAO,OAAO,EAAU,CAAE,CAE5B,MAAO,EAAO,SACZ,sDAAsD,IACvD,CACD,OAGF,IAAM,EAAM,EAAU,MAGhB,EAAM,MAAO,EAAO,SAAS,EAAQ,IAAI,EAAM,KACnD,EAAO,QAAQ,KAAuB,CACvC,CAED,GAAI,CAAC,EAAK,CACR,MAAO,EAAO,WACZ,oBAAoB,EAAM,+BAC3B,CACD,OAIF,MAAO,EAAO,SAAS,EAAI,IAAI,EAAK,EAAM,CAAG,GAC3C,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,kCAAkC,EAAM,SACxC,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CAED,MAAO,EAAO,QACZ,0BAA0B,EAAM,uBACjC,EACD,CAMEe,EAEF,EAAO,IAAI,WAAa,CAC1B,IAAM,EAAa,MAAO,EAAO,YAAY,KAAK,EAAO,OAAO,CAChE,OAAO,EAAO,MAAM,EAAY,CAC9B,WAAc,IAAA,GACd,OAAS,IAAU,CACjB,QAAS,EAAK,QACd,OAAQ,EAAK,OACb,WAAY,EAAK,QAAU,EAAI,EAChC,EACF,CAAC,EACF,CAGI,GAA2B,CAC/B,QACA,OACA,YACA,WACA,YAQA,EAAO,IAAI,WAAa,CAsEtB,OArEA,QAAQ,IACN,yDAAyD,IAC1D,CAQD,MAAO,EAAU,EAAO,CACtB,OAAQ,UACR,aALmB,MAAO,EAM3B,CAAC,CAGa,MAAO,EAAO,IAAI,WAAa,CAC5C,QAAQ,IAAI,iDAAiD,IAAQ,CACrE,IAAM,EAAiB,GAAe,EAAM,EAAc,EAAQ,CAElE,QAAQ,IAAI,sCAAsC,IAAQ,CAE1D,IAAM,EAAa,MAAO,EAAe,IAAI,CAC3C,SACA,YACA,QACA,WACD,CAAC,CA4BF,OA1BA,QAAQ,IACN,wCAAwC,EAAM,iBAAiB,EAAW,OAC3E,CAGG,EAAW,OAAS,SAEtB,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,SAAU,EAAW,OACrB,eAAgB,EAAW,eAC3B,UAAW,IAAI,KAChB,CAAC,EAIF,MAAO,EAAU,EAAO,CACtB,OAAQ,YACR,UAAW,IAAI,KACf,QAAS,IAAI,KACd,CAAC,CAGF,MAAO,EAAyB,EAAO,EAAS,EAG3C,GACP,CAAC,KAED,EAAO,SAAS,iBAAkB,CAChC,WAAY,CACV,UAAW,EAAK,GAChB,YAAa,EAAK,KAClB,cAAe,EACf,kBAAmB,EACnB,kBAAmB,EAAK,MAAM,OAC/B,CACF,CAAC,CACH,EAGD,CAAC,KAGD,EAAO,SAAS,OAAQ,CACtB,WAAY,CACV,UAAW,EAAK,GAChB,YAAa,EAAK,KAClB,cAAe,EACf,kBAAmB,EACnB,kBAAmB,EAAK,MAAM,OAC/B,CACF,CAAC,CACF,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,SAAS,wBAAyB,EAAM,CAGtD,IAAM,EACJ,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CAE/D,MAAO,EAAO,QACZ,gBAAgB,EAAM,gCAAgC,IACvD,CAGD,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,wBAAwB,IACxB,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,CAGD,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAqErC,MApEI,IACF,MAAO,EACJ,KAAK,EAAO,CACX,QACA,UAAW,EAAU,UACrB,OAAQ,EAAI,OACZ,MAAO,EACR,CAAC,CACD,KACC,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,0CAA0C,IAC1C,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,EAID,EAAK,UACP,MAAO,EACJ,QAAQ,CACP,QACA,UAAW,EAAU,UACrB,OAAQ,EAAK,GACb,MAAO,EACR,CAAC,CACD,KACC,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,2DAA2D,IAC3D,EACD,CACM,EAAO,QAAQ,CAAE,QAAS,KAAM,CAAC,EACxC,CACH,CACF,EAIL,MAAO,EAAyB,EAAO,EAAS,CAAC,KAC/C,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,WACZ,gDAAgD,IAChD,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,CAYD,MAAO,EAAqB,EAR1B,aAAiB,EACb,EACA,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACnB,MAAO,EACR,CAAC,CAC2C,CAE7C,GACN,CACH,CACF,CAEH,MAAO,CACL,SAAU,EAAQ,IAChB,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAa,QAAQ,EAAQ,EAAS,EAE1D,CAEJ,aAAc,EAAQ,IACpB,EAAO,IAAI,WAAa,CAEtB,OAAO,GADM,MAAO,EAAa,QAAQ,EAAQ,EAAS,CAClC,EACxB,CAEJ,SAAU,CACR,SACA,YACA,WACA,YAOA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,MAAO,EAAc,SAEjC,EAAe,MAAO,EAAO,IAAI,CACrC,QAAW,GAAc,MAAM,CAAE,SAAQ,CAAC,CAC1C,MAAQ,GACN,EAAgB,SAAS,8BAA+B,CACtD,MAAO,EACR,CAAC,CACL,CAAC,CAGI,EAAQ,OAAO,YAAY,CAC3B,EAAY,IAAI,KAGhBC,EAAe,CACnB,GAAI,EACJ,SACA,YACA,WACA,OAAQ,UACR,YACA,UAAW,EACX,MAAO,EAAE,CACV,CAED,MAAO,EAAQ,IAAI,EAAO,EAAI,CAG9B,IAAM,EAAO,MAAO,EAAa,QAAQ,EAAQ,EAAS,CAE1D,QAAQ,IACN,sDAAsD,IACvD,CAKD,IAAM,EAAa,EAAwB,CACzC,QACA,OACA,YACA,WACA,OAAQ,EAAa,OACtB,CAAC,CAAC,KACD,EAAO,cAAe,GACpB,EAAO,SAAS,wBAAyB,EAAM,CAChD,CACF,CAMD,GAAI,EAAO,OAAO,EAAU,CAAE,CAE5B,QAAQ,IAAI,yCAAyC,IAAQ,CAE7D,IAAM,EAAU,MAAO,EAAO,SAAS,CAEjC,EADW,EAAQ,WAAW,EAAQ,CACnB,EAAW,CACpC,EAAU,MAAM,EAAQ,MAGxB,QAAQ,IACN,iDAAiD,IAClD,CACD,MAAO,EAAO,WAAW,EAAW,CAMtC,OAHA,QAAQ,IAAI,gDAAgD,IAAQ,CAG7D,GACP,CAEJ,aAAe,GACb,EAAO,IAAI,WAAa,CAUtB,OATY,MAAO,EAAQ,IAAI,EAAM,IAE5B,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,GAIH,CAEJ,YAAa,CACX,QACA,SACA,UACA,cAOA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,MAAO,EAAc,SAGjC,EAAM,MAAO,EAAQ,IAAI,EAAM,CACrC,GAAI,CAAC,EAEH,OADA,QAAQ,MAAM,gBAAgB,CACvB,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,CAIH,GAAI,EAAI,SAAW,SAEjB,OADA,QAAQ,MAAM,oBAAoB,CAC3B,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,0BAA0B,EAAI,OAAO,GAC1D,CAAC,CACH,CAIH,GAAI,EAAI,WAAa,EAEnB,OADA,QAAQ,MAAM,yCAAyC,CAChD,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,qBAAqB,EAAI,SAAS,QAAQ,IAC/D,CAAC,CACH,CAIH,GAAI,CAAC,EAAI,eAEP,OADA,QAAQ,MAAM,6BAA6B,CACpC,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,CAeH,IAAM,EAAqB,CACzB,GAZkB,EAAI,MAAM,QAC3B,EAAK,KACA,EAAK,SAAW,IAAA,KAClB,EAAI,EAAK,QAAU,EAAK,QAEnB,GAET,EAAE,CACH,EAKE,GAAS,EACX,CAEK,EAAgB,CACpB,GAAG,EAAI,eAAe,QACrB,GAAS,EACX,CAID,MAAO,EAAU,EAAO,CACtB,OAAQ,UACT,CAAC,CAGF,IAAM,EAAO,MAAO,EAAa,QAAQ,EAAI,OAAQ,EAAI,SAAS,CAI5D,EAAa,EAAI,aACnB,EAAO,aAAa,CAClB,QAAS,EAAI,aAAa,QAC1B,OAAQ,EAAI,aAAa,OACzB,QAAS,EAAI,aAAa,aAAe,EAC1C,CAAC,CACF,IAAA,GA0JE,EAvJyB,EAAO,IAAI,WAAa,CACrD,IAAM,EAAiB,GAAe,EAAM,EAAc,EAAQ,CAElE,GAAI,CAAC,EAAI,eACP,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,CAIH,IAAM,EAAS,MAAO,EAAe,OAAO,CAC1C,QACA,UAAW,EAAI,UACf,YAAa,EACb,eAAgB,CACd,GAAG,EAAI,eACP,OAAQ,EACT,CACD,SAAU,EAAI,SACf,CAAC,CA0BF,OAvBI,EAAO,OAAS,SAElB,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,SAAU,EAAO,OACjB,eAAgB,EAAO,eACvB,UAAW,IAAI,KAChB,CAAC,EAIF,MAAO,EAAU,EAAO,CACtB,OAAQ,YACR,SAAU,IAAA,GACV,eAAgB,IAAA,GAChB,UAAW,IAAI,KACf,QAAS,IAAI,KACd,CAAC,CAGF,MAAO,EAAyB,EAAO,EAAS,EAG3C,GACP,CAAC,KAGD,EAAO,SAAS,wBAAyB,CACvC,WAAY,CACV,UAAW,EAAK,GAChB,YAAa,EAAK,KAClB,cAAe,EACf,kBAAmB,EAAI,UACvB,yBAA0B,EAC3B,CACD,OAAQ,EACT,CAAC,CACH,CAGwB,KACrB,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,SAAS,qBAAsB,EAAM,CAGnD,IAAM,EACJ,aAAiB,EACb,EAAM,KACN,OAAO,EAAM,CAEnB,MAAO,EAAO,QACZ,gBAAgB,EAAM,gCAAgC,IACvD,CAGD,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,wBAAwB,IACxB,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,CAGD,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAM,CA+C5C,MA9CI,IACF,MAAO,EACJ,KAAK,EAAO,CACX,QACA,UAAW,EAAU,UACrB,OAAQ,EAAW,OACnB,MAAO,EACR,CAAC,CACD,KACC,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,SACZ,0CAA0C,IAC1C,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,EAIL,MAAO,EAAyB,EAAO,EAAS,CAAC,KAC/C,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAKtB,OAJA,MAAO,EAAO,WACZ,gDAAgD,IAChD,EACD,CACM,EAAO,QAAQ,IAAA,GAAU,EAChC,CACH,CACF,CAYD,MAAO,EAAqB,EAR1B,aAAiB,EACb,EACA,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACnB,MAAO,EACR,CAAC,CAC2C,CAE7C,GACN,CACH,CACF,CAI0D,KAC3D,EAAO,cAAe,GACpB,EAAO,SAAS,qBAAsB,EAAM,CAC7C,CACF,CAMD,GAAI,EAAO,OAAO,EAAU,CAAE,CAE5B,QAAQ,IACN,gDAAgD,IACjD,CACD,IAAM,EAAU,MAAO,EAAO,SAAS,CAEjC,EADW,EAAQ,WAAW,EAAQ,CACnB,EAAa,CACtC,EAAU,MAAM,EAAQ,MAGxB,QAAQ,IACN,wDAAwD,IACzD,CACD,MAAO,EAAO,WAAW,EAAa,CAYxC,OARmB,MAAO,EAAQ,IAAI,EAAM,IAEnC,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,GAGH,CAEJ,WAAY,EAAe,IACzB,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CACrC,GAAI,CAAC,EACH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,CAIH,GAAI,IAAa,MAAQ,EAAI,WAAa,EACxC,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,sBAAuB,CAC9C,MAAO,UAAU,EAAS,kCAAkC,IAC7D,CAAC,CACH,CAIH,GAAI,EAAI,SAAW,UACjB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,qCAAqC,EAAI,OAAO,GACrE,CAAC,CACH,CAKH,IAAM,EADc,EAAI,MAAM,KAAM,GAAM,EAAE,SAAW,UAAU,EAC/B,OA0BlC,OAvBA,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,SAAU,EACV,UAAW,IAAI,KAChB,CAAC,CAGF,MAAO,EAAa,KAAK,EAAO,CAC9B,QACA,OAAQ,EAAI,OACZ,UAAW,EAAU,UACrB,SAAU,EACX,CAAC,EAGiB,MAAO,EAAQ,IAAI,EAAM,IAEnC,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,wBACrB,CAAC,CACH,GAGH,CAEJ,YAAa,EAAe,IAC1B,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAyDrC,OAxDK,EASD,IAAa,MAAQ,EAAI,WAAa,EACjC,MAAO,EAAO,KACnB,EAAgB,SAAS,sBAAuB,CAC9C,MAAO,UAAU,EAAS,mCAAmC,IAC9D,CAAC,CACH,CAKD,EAAI,SAAW,WACf,EAAI,SAAW,UACf,EAAI,SAAW,UAER,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,wCAAwC,EAAI,OAAO,GACxE,CAAC,CACH,EAIH,MAAO,EAAU,EAAO,CACtB,OAAQ,YACR,UAAW,IAAI,KACf,QAAS,IAAI,KACd,CAAC,CAGF,MAAO,EAAa,KAAK,EAAO,CAC9B,QACA,OAAQ,EAAI,OACZ,UAAW,EAAU,WACtB,CAAC,CAGF,MAAO,EAAyB,EAAO,EAAS,EAG7B,MAAO,EAAQ,IAAI,EAAM,IAEnC,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,+BACrB,CAAC,CACH,GArDM,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,EAoDH,CAEJ,uBAAwB,EAAe,IACrC,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAO,EAAW,EAChD,CAEJ,0BAA4B,GAC1B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAM,EACtC,CACL,EACD,CAIJ,MAAa,GAAa,EAAM,OAAO,GAAY,IAAkB,CAAC,CCrgDtE,SAAgB,GAAY,EAA0B,CACpD,IAAM,EAAe,EAAS,YAAY,IAAI,CAI9C,OAHI,IAAiB,IAAM,IAAiB,EACnC,EAEF,EAAS,UAAU,EAAG,EAAa,CAgB5C,SAAgB,GAAa,EAA0B,CACrD,IAAM,EAAe,EAAS,YAAY,IAAI,CAI9C,OAHI,IAAiB,IAAM,IAAiB,EACnC,GAEF,EAAS,UAAU,EAAe,EAAE,CAwB7C,SAAgB,EACd,EACA,EAMA,EACe,CAEf,IAAM,EAAW,EAAK,UAAY,EAAE,CAC9B,EACH,EAAS,UACT,EAAS,cACT,EAAS,MACV,UAKF,MAAO,CACL,SAJe,GAAY,EAAS,CAKpC,UAJgB,GAAa,EAAS,CAKtC,WACA,SAAU,EAAY,SACtB,OAAQ,EAAY,OACpB,OAAQ,EAAY,OACpB,MAAO,EAAY,MACnB,UAAW,IAAI,MAAM,CAAC,aAAa,CACnC,GAAG,EACJ,CAsBH,SAAgB,GACd,EACA,EACQ,CACR,GAAI,CAEF,IAAMC,EAAwC,EAAE,CAChD,IAAK,GAAM,CAAC,EAAK,KAAU,OAAO,QAAQ,EAAQ,CAC5C,IAAU,IAAA,KACZ,EAAc,GAAO,OAAO,EAAM,EAGtC,OAAO,EAAO,EAAS,EAAc,MAC/B,CAEN,OAAO,GA2CX,SAAgB,EACd,EACA,EACA,EACQ,CACR,IAAM,EAAmB,EAAQ,SAGjC,GAAI,CAAC,EACH,OAAO,EAGT,GAAI,CACF,GAAI,EAAO,OAAS,OAAQ,CAE1B,GAAI,EAAO,WAAY,CACrB,IAAM,EAAS,EAAO,WAAW,EAAQ,CACzC,GAAI,EAAQ,CACV,GAAM,CAAE,WAAU,aAAc,EAChC,OAAO,EACH,GAAG,EAAS,GAAG,EAAO,GAAG,IACzB,GAAG,EAAS,GAAG,KAIvB,OAAO,EAGT,GAAI,EAAO,OAAS,SAAU,CAE5B,GAAI,EAAO,OAET,OADe,EAAO,OAAO,EAAM,EAAQ,EAC1B,EAEnB,GAAI,EAAO,QAET,OADe,GAAoB,EAAO,QAAS,EAAQ,EAC1C,EAKrB,OAAO,OACD,CAEN,OAAO,GAwBX,SAAgB,GAAgB,EAG9B,CACA,GAAI,CAAC,GAAW,EAAQ,MAAM,GAAK,GACjC,MAAO,CAAE,QAAS,GAAO,MAAO,0BAA2B,CAI7D,IAAM,GAAa,EAAQ,MAAM,QAAQ,EAAI,EAAE,EAAE,OAC3C,GAAc,EAAQ,MAAM,QAAQ,EAAI,EAAE,EAAE,OAElD,GAAI,IAAc,EAChB,MAAO,CACL,QAAS,GACT,MAAO,sBAAsB,EAAU,YAAY,EAAW,UAC/D,CAIH,IAAM,EAAc,EAAQ,MAAM,oCAAoC,CAQtE,OAPI,EACK,CACL,QAAS,GACT,MAAO,4BAA4B,EAAY,KAChD,CAGI,CAAE,QAAS,GAAM,CAM1B,MAAa,GAA+B,CAC1C,CAAE,KAAM,WAAY,YAAa,6BAA8B,QAAS,QAAS,CACjF,CAAE,KAAM,YAAa,YAAa,6BAA8B,QAAS,MAAO,CAChF,CAAE,KAAM,WAAY,YAAa,yBAA0B,QAAS,YAAa,CACjF,CAAE,KAAM,WAAY,YAAa,0BAA2B,QAAS,SAAU,CAC/E,CAAE,KAAM,SAAU,YAAa,4BAA6B,QAAS,WAAY,CACjF,CAAE,KAAM,SAAU,YAAa,kBAAmB,QAAS,WAAY,CACvE,CAAE,KAAM,QAAS,YAAa,mBAAoB,QAAS,UAAW,CACtE,CAAE,KAAM,YAAa,YAAa,2BAA4B,QAAS,uBAAwB,CAC/F,CAAE,KAAM,QAAS,YAAa,6BAA8B,QAAS,MAAO,CAC5E,CAAE,KAAM,SAAU,YAAa,8BAA+B,QAAS,MAAO,CAC9E,CAAE,KAAM,SAAU,YAAa,gBAAiB,QAAS,OAAQ,CACjE,CAAE,KAAM,UAAW,YAAa,kBAAmB,QAAS,KAAM,CAClE,CAAE,KAAM,aAAc,YAAa,0BAA2B,QAAS,IAAK,CAC7E,CCpLD,SAAS,EACP,EACsD,CAEtD,MAAO,EAAE,WAAY,GA2DvB,SAAgB,GAAoB,CAClC,KACA,OACA,cACA,eACA,aACA,SACA,SAAU,EAAiB,YAC3B,aACA,aACA,iBACA,OAAO,OACP,kBACA,YACA,sBACsB,CAEtB,GAAI,IAAS,aAAe,CAAC,EAC3B,MAAU,MACR,mBAAmB,EAAG,oEACvB,CAEH,GAAI,IAAS,YAAc,CAAC,EAC1B,MAAU,MACR,mBAAmB,EAAG,0DACvB,CAEH,GAAI,IAAS,QAAU,CAAC,GAAa,CAAC,EACpC,MAAU,MACR,mBAAmB,EAAG,yEACvB,CAIH,IAAM,EAA2B,CAC/B,GAAG,EACH,GAAG,EACJ,CAED,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAE5B,OAAO,MAAO,EAAuC,CACnD,KACA,OACA,cACA,KAAM,EAAS,QACf,eACA,aACA,aACA,iBACA,YAAa,EACb,aAAc,EACd,KAAM,CAAE,KAAM,EAAM,YAAW,SAAQ,QAAO,cACrC,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAO,CACX,SACA,OAAQ,EACR,QACD,CAGK,EAAqB,MAAO,EAAO,IAAI,WAAa,CACxD,GAAI,IAAS,WAAY,MAAO,GAChC,GAAI,IAAS,YAAa,MAAO,GAGjC,IAAM,EAAW,EAAK,MAAQ,EACxB,EAAY,EAAyB,kBAiC3C,OA9BI,EAAW,GAAK,EAAW,GAC7B,MAAO,EAAO,SACZ,QAAQ,EAAK,GAAG,IAAI,EAAS,2BAA2B,EAAU,wBACnE,CACM,IAIJ,GAQgB,MAAO,EAAa,gBACvC,EACA,EACD,EACiB,uBAOlB,MAAO,EAAO,SACZ,QAAQ,EAAK,GAAG,+BACjB,CACM,KATL,MAAO,EAAO,SACZ,gEACD,CACM,KAfP,MAAO,EAAO,SACZ,sDACD,CACM,KAmBT,CAEI,CAAE,OAAM,WAAU,WAAU,gBAChC,EAAsB,EAAK,SAAS,CAEtC,GAAI,GAAsB,EAAoB,CAE5C,MAAO,EAAO,SAAS,iCAAiC,EAAK,KAAK,CAUlE,IAAMC,EAAkB,MAAO,EAPX,MAAO,EAAa,WACtC,EAAK,GACL,EACA,EACD,CAKC,EACD,CAGK,EAAe,EAAeA,EAAgB,CAChDA,EACAA,EAAgB,OACdC,EAAa,EAAeD,EAAgB,CAC9C,IAAA,GACAA,EAAgB,KACd,EAAgB,EAAeA,EAAgB,CACjD,IAAA,GACAA,EAAgB,cAGhBE,EAAiB,EAAeF,EAAgB,CAChD,IAAA,GACAA,EAAgB,SAEhB,CAACE,GAAkB,IAMrB,EAAiB,EAAgB,EALX,EACpB,EACA,CAAE,SAAQ,QAAO,OAAQ,EAAI,SAAU,EAAgB,CACvD,EACD,CACqD,EAAO,EAI/D,IAAM,EAAe,MAAO,EAAa,gBACvC,EACA,EACD,CAEGC,EAEJ,GAAI,EAAa,uBAEf,MAAO,EAAO,SACZ,6BAA6B,EAAK,GAAG,8BACtC,CAED,EAAS,MAAO,EAAa,aAC3B,CACE,YACA,qBAAsB,GACtB,SAAU,EACV,KAAMF,GAAc,EACpB,SAAUC,GAAkB,EAC5B,aAAc,EACd,SAAU,EACV,OACD,CACD,EACA,EACD,KACI,CAGL,MAAO,EAAO,SACZ,uCAAuC,EAAK,GAAG,kCAChD,CAED,IAAME,EAA6B,EAAE,CACrC,MAAO,EAAO,WAAW,EAAe,GACtC,EAAO,SAAW,CAChB,EAAa,KAAK,EAAM,EACxB,CACH,CAGD,IAAM,EAAc,EAAa,QAC9B,EAAK,IAAU,EAAM,EAAM,WAC5B,EACD,CACKC,EAAc,IAAI,WAAW,EAAY,CAC3C,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAY,IAAI,EAAO,EAAO,CAC9B,GAAU,EAAM,WAIlB,IAAM,EAAuB,IAAI,eAAe,CAC9C,MAAM,EAAY,CAChB,EAAW,QAAQA,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAEF,EAAS,MAAO,EAAa,OAC3B,CACE,YACA,KAAMA,EAAY,WAClB,KAAMJ,GAAc,EACpB,SAAUC,GAAkB,EAC5B,aAAc,EACd,SAAU,EACV,OACD,CACD,EACA,EACD,CAIH,IAAMI,EAAkB,EACpB,CACE,GAAG,EACH,GAAIL,GAAc,CAChB,SAAUA,EACV,KAAMA,EACN,eAAgBA,EACjB,CACD,GAAIC,GAAkB,CACpB,SAAUA,EACV,aAAcA,EACd,KAAMA,EACN,UACEA,EAAe,MAAM,IAAI,CAAC,KAAK,EAAI,EAAS,UAC/C,CACF,CACDK,EAAO,SAEX,OAAO,EACLD,EACI,CAAE,GAAGC,EAAQ,SAAUD,EAAiB,CACxCC,EACL,CAIH,GAAI,CAAC,EACH,MAAU,MACR,mBAAmB,EAAG,8DACvB,CAOH,IAAM,EAAkB,MAAO,EAHZ,MAAO,EAAa,KAAK,EAAK,GAAI,EAAS,CAGT,EAAK,CAGpD,EACJ,aAA2B,WACvB,EACA,EAAgB,MAEhB,EACJ,aAA2B,WACvB,IAAA,GACA,EAAgB,KAGlB,EACF,aAA2B,WACvB,IAAA,GACA,EAAgB,SAGlB,CAAC,GAAkB,IAWrB,EAAiB,EAAgB,EAVX,EACpB,EACA,CACE,SACA,QACA,OAAQ,EACR,SAAU,EACX,CACD,EACD,CACqD,EAAO,EAI/D,IAAM,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAY,CAC/B,EAAW,OAAO,EAErB,CAAC,CAII,EAAS,MAAO,EAAa,OACjC,CACE,YACA,KAAM,EAAY,WAClB,KAAM,GAAc,EACpB,SAAU,GAAkB,EAC5B,aAAc,EACd,SAAU,EACV,OACD,CACD,EACA,EACD,CAGK,EAAkB,EACpB,CACE,GAAG,EAEH,GAAI,GAAc,CAChB,SAAU,EACV,KAAM,EACN,eAAgB,EACjB,CAED,GAAI,GAAkB,CACpB,SAAU,EACV,aAAc,EACd,KAAM,EAEN,UACE,EAAe,MAAM,IAAI,CAAC,KAAK,EAAI,EAAS,UAC/C,CACF,CACD,EAAO,SAEX,OAAO,EACL,EACI,CACE,GAAG,EACH,SAAU,EACX,CACD,EACL,EACD,CAEL,CAAC,EACF,CCpfJ,IAAa,GAAb,cAAwC,EAAQ,IAAI,qBAAqB,EAGtE,AAAC,GCuES,GAAb,cAAsC,EAAQ,IAAI,mBAAmB,EAGlE,AAAC,GCkCS,GAAb,cAAoC,EAAQ,IAAI,iBAAiB,EAG9D,AAAC,GChGS,GAAb,cAAmC,EAAQ,IAAI,gBAAgB,EAG5D,AAAC,GCmIS,GAAb,cAAiC,EAAQ,IAAI,cAAc,EAGxD,AAAC,GCjMJ,MAAa,GAA4B,EAAE,OAAO,CAEhD,YAAa,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC,UAAU,CAC9C,CAAC,CCHW,GAA8B,EAAE,OAAO,CAElD,SAAU,EAAE,QAAQ,CAAC,aAAa,CAElC,MAAO,EAAE,QAAQ,CAAC,UAAU,CAE5B,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAE7B,MAAO,EAAE,QAAQ,CAEjB,OAAQ,EAAE,QAAQ,CAElB,QAAS,EAAE,QAAQ,CAAC,aAAa,CAEjC,UAAW,EAAE,QAAQ,CAAC,UAAU,CAEhC,YAAa,EAAE,QAAQ,CAEvB,SAAU,EAAE,SAAS,CAErB,WAAY,EAAE,QAAQ,CAAC,UAAU,CAEjC,aAAc,EAAE,QAAQ,CAAC,aAAa,CAAC,UAAU,CAEjD,KAAM,EAAE,QAAQ,CAAC,aAAa,CAC/B,CAAC,CCzBW,GAAgC,EAAE,OAAO,CAEpD,UAAW,EAAE,QAAQ,CAAC,aAAa,CAEnC,OAAQ,EAAE,KAAK,CAAC,MAAO,OAAO,CAAC,CAAC,UAAU,CAE1C,QAAS,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAAC,UAAU,CAC/C,CAAC,CCPW,GAAuB,EAAE,OAAO,CAE3C,QAAS,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CAEnC,OAAQ,EAAE,KAAK,CAAC,OAAQ,OAAQ,MAAO,OAAO,CAAU,CACzD,CAAC,CCLW,GAA+B,EAAE,OAAO,CAEnD,YAAa,EAAE,KAAK,CAAC,YAAY,CAAC,CAAC,UAAU,CAC9C,CAAC,CCFW,GAAqB,EAC/B,OAAO,CAEN,MAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAEvC,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAExC,IAAK,EAAE,KAAK,CAAC,UAAW,QAAS,OAAO,CAAC,CAC1C,CAAC,CACD,OACE,GAAS,EAAK,OAAS,EAAK,OAC7B,sDACD,CCZU,GAA0B,EACpC,OAAO,CAEN,MAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAEvC,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAExC,YAAa,EAAE,KAAK,CAAC,OAAQ,SAAS,CAAC,CAAC,UAAU,CAElD,QAAS,EAAE,KAAK,CAAC,UAAW,WAAY,UAAU,CAAC,CAAC,UAAU,CAC/D,CAAC,CACD,OACE,GAAS,EAAK,OAAS,EAAK,OAC7B,4DACD,CCfU,GAA6B,EAAE,OAAO,CAEjD,OAAQ,EAAE,KAAK,CAAC,MAAO,OAAQ,MAAO,MAAM,CAAC,CAE7C,MAAO,EAAE,KAAK,CAAC,OAAQ,OAAQ,MAAO,MAAM,CAAC,CAAC,UAAU,CAExD,aAAc,EAAE,QAAQ,CAAC,UAAU,CAEnC,aAAc,EAAE,QAAQ,CAAC,UAAU,CAEnC,WAAY,EAAE,KAAK,CAAC,MAAO,MAAO,OAAQ,SAAS,CAAC,CAAC,UAAU,CAChE,CAAC,CCUW,GAAwB,EAAE,OAAO,CAC5C,KAAM,EAAE,QAAQ,SAAS,CAEzB,MAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAEvC,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAExC,IAAK,EAAE,KAAK,CAAC,UAAW,QAAS,OAAO,CAAC,CAC1C,CAAC,CAQW,GAAsB,EAAE,OAAO,CAC1C,KAAM,EAAE,QAAQ,OAAO,CAEvB,MAAO,EAAE,QAAQ,CAAC,IAAI,GAAI,CAAC,IAAI,IAAK,CACrC,CAAC,CAQW,GAAwB,EAAE,OAAO,CAC5C,KAAM,EAAE,QAAQ,SAAS,CAEzB,MAAO,EAAE,QAAQ,CAEjB,WAAY,EAAE,QAAQ,CAAC,UAAU,CAClC,CAAC,CAQW,GAAsB,EAAE,OAAO,CAC1C,KAAM,EAAE,QAAQ,OAAO,CAEvB,UAAW,EAAE,KAAK,CAAC,aAAc,WAAW,CAAC,CAC9C,CAAC,CAYW,GAA2B,EAAE,OAAO,CAC/C,KAAM,EAAE,QAAQ,YAAY,CAC7B,CAAC,CAQW,GAAuB,EAAE,OAAO,CAC3C,KAAM,EAAE,QAAQ,QAAQ,CACzB,CAAC,CAQW,GAA4B,EAAE,OAAO,CAChD,KAAM,EAAE,QAAQ,aAAa,CAE7B,MAAO,EAAE,QAAQ,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CACrC,CAAC,CAQW,GAA0B,EAAE,OAAO,CAC9C,KAAM,EAAE,QAAQ,WAAW,CAE3B,MAAO,EAAE,QAAQ,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CACrC,CAAC,CAYW,GAAyB,EAAE,OAAO,CAC7C,KAAM,EAAE,QAAQ,UAAU,CAE1B,MAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CACxC,CAAC,CAsBW,GAA2B,EAAE,OAAO,CAC/C,KAAM,EAAE,QAAQ,YAAY,CAE5B,UAAW,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAElC,SAAU,EAAE,KAAK,CACf,WACA,YACA,cACA,eACA,SACD,CAAC,CAEF,QAAS,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAEjC,QAAS,EAAE,QAAQ,CAAC,UAAU,CAE9B,QAAS,EAAE,QAAQ,CAAC,UAAU,CAC/B,CAAC,CAQW,GAAsB,EAAE,OAAO,CAC1C,KAAM,EAAE,QAAQ,OAAO,CAEvB,UAAW,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAElC,SAAU,EAAE,KAAK,CACf,WACA,YACA,cACA,eACA,SACD,CAAC,CAEF,MAAO,EAAE,QAAQ,CAAC,IAAI,GAAI,CAAC,IAAI,EAAI,CAEnC,QAAS,EAAE,QAAQ,CAAC,UAAU,CAE9B,QAAS,EAAE,QAAQ,CAAC,UAAU,CAC/B,CAAC,CAQW,GAAsB,EAAE,OAAO,CAC1C,KAAM,EAAE,QAAQ,OAAO,CAEvB,KAAM,EAAE,QAAQ,CAAC,IAAI,EAAE,CAEvB,SAAU,EAAE,KAAK,CACf,WACA,YACA,cACA,eACA,SACD,CAAC,CAEF,SAAU,EAAE,QAAQ,CAAC,UAAU,CAE/B,MAAO,EAAE,QAAQ,CAAC,IAAI,EAAE,CAExB,WAAY,EAAE,QAAQ,CAAC,UAAU,CAEjC,QAAS,EAAE,QAAQ,CAAC,UAAU,CAE9B,QAAS,EAAE,QAAQ,CAAC,UAAU,CAC/B,CAAC,CAYW,GAAuB,EAAE,mBAAmB,OAAQ,CAC/D,GACA,GACA,GACA,GACA,GACA,GACA,GACA,GACA,GACA,GACA,GACA,GACD,CAAC,CAgBW,GAA6B,EAAE,OAAO,CAEjD,gBAAiB,EAAE,MAAM,GAAqB,CAAC,IAAI,EAAE,CACtD,CAAC,CC/QW,GAAwB,EAClC,OAAO,CAEN,UAAW,EAAE,QAAQ,CAAC,aAAa,CAEnC,QAAS,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAEzC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CAC3C,CAAC,CACD,OACE,GAAS,CAAC,EAAK,SAAW,CAAC,EAAK,SACjC,2CACD,CACA,OACE,GAAS,CAAC,EAAK,SAAW,EAAK,QAAU,EAAK,UAC/C,yCACD,CCqMH,IAAa,GAAb,cAAiC,EAAQ,IAAI,cAAc,EAGxD,AAAC,GCnGS,GAAb,cAAqC,EAAQ,IAAI,kBAAkB,EAGhE,AAAC,GC7DS,GAAb,cAA+B,EAAQ,IAAI,YAAY,EAGpD,AAAC,GCkTJ,MAAM,IACJ,EACA,IACiB,CACjB,GAAI,EAAQ,SAAW,EACrB,OAAO,EAGT,GAAM,CAAC,EAAO,GAAG,GAAQ,EACzB,OAAO,EAAK,QACT,EAAK,IAAW,EAAE,MAAM,CAAC,EAAK,EAAO,CAAC,CACvC,EACD,EAGH,SAAgB,GACd,EASA,CACA,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAc,OAAO,QAAQ,EAAO,MAAM,CAI1C,EACJ,GAMA,EAAO,SAAS,EAAK,CAChB,EAKD,EAAO,QAAQ,EAAgD,CAE/D,EAAkB,MAAO,EAAO,QAAQ,GAAc,CAAC,EAAK,KAChE,EAAO,QAAQ,EAAY,EAAK,CAAG,GAC7B,EAAa,KAAO,EASjB,EAAO,QAAQ,CAAC,EAAK,EAAa,CAAU,CAR1C,EAAO,KACZ,EAAgB,SAAS,kBAAmB,CAC1C,MAAW,MACT,YAAY,EAAI,0BAA0B,EAAa,KACxD,CACF,CAAC,CACH,CAGH,CACH,CAEK,EAAiB,OAAO,YAC5B,EACD,CACK,EAAgB,EAAgB,KAAK,EAAG,KAAU,EAAK,CAEvD,EAAe,EAClB,QAAQ,EAAG,KAAU,EAAK,OAAS,EAAS,MAAM,CAClD,KAAK,EAAG,KAAU,EAAK,YAAY,CAGhCC,EAAwB,EAAO,MAAM,IAAK,IAAU,CACxD,OAAQ,EAAe,EAAK,SAAS,IAAM,EAAK,OAChD,OAAQ,EAAe,EAAK,SAAS,IAAM,EAAK,OAChD,WAAY,EAAK,WACjB,WAAY,EAAK,WAClB,EAAE,CAGG,EAAc,IAAI,IACtB,EACG,KAAK,CAAC,KAAS,EAAe,IAAM,GAAG,CACvC,OACE,GACC,GAAU,CAAC,EAAU,KAAM,GAAS,EAAK,SAAW,EAAO,CAC9D,CACJ,CAEK,EAAgB,EACnB,QAAQ,EAAG,KAAU,EAAY,IAAI,EAAK,GAAG,CAAC,CAC9C,KAAK,EAAG,KAAU,EAAK,aAAa,CAEjC,EACJ,EAAO,aAAe,GAAiB,EAAc,EAAE,SAAS,CAAC,CAE7D,EACJ,EAAO,cAAgB,GAAiB,EAAe,EAAE,SAAS,CAAC,CAgBrE,OAda,MAAO,EAAqB,CACvC,OAAQ,EAAO,OACf,KAAM,EAAO,KACb,MAAO,EACP,MAAO,EACP,cACA,eACA,YAAa,EAAO,YACpB,QAAS,EAAO,QAChB,kBAAmB,EAAO,kBAC1B,MAAO,EAAO,MACd,eAAgB,EAAO,eACxB,CAAC,EAOF,CC9cJ,MAAa,GAAgB,EAAE,OAAO,CACpC,OAAQ,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAE,KAAK,CAAC,CACtC,CAAC"}