@uploadista/client-core 0.0.20-beta.6 → 0.0.20-beta.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":["result: BufferedChunk","httpClient: HttpClient","authManager: AuthManager","type: \"direct\" | \"uploadista-cloud\" | \"no-auth\"","config: DirectAuthConfig","platformService: PlatformService","logger: Logger","config: UploadistaCloudAuthConfig","httpClient: HttpClient","cachedToken: CachedToken","noopLog: LogFunction","capabilities: DataStoreCapabilities","defaultClientCapabilities: DataStoreCapabilities","s3LikeCapabilities: DataStoreCapabilities","gcsLikeCapabilities: DataStoreCapabilities","filesystemLikeCapabilities: DataStoreCapabilities","DEFAULT_STRATEGIES: Record<string, ChunkingStrategy>","S3_OPTIMIZED_STRATEGIES: Record<string, ChunkingStrategy>","fallbackStrategy: ChunkingStrategy","baseStrategy: ChunkingStrategy","segments: {\n startByte: number;\n endByte: number;\n segmentIndex: number;\n }[]","error","storedUpload: PreviousUpload","checksum: string | undefined","createUploadData: InputFile","parallelState: ParallelUploadState","segmentSource: FileSource","aggregatedResult: UploadFile","terminate","chunkMetrics: ChunkMetrics","sessionMetrics: UploadSessionMetrics","groups: Record<string, ChunkMetrics[]>","sizeGroup: string","recommendations: string[]","negotiationOptions: UploadStrategyOptions","errors: string[]","warnings: string[]","body: RequestBody","uploadistaApi: UploadistaApi","logger: Logger","onEvent?: UploadistaWebSocketEventHandler","defaultConnectionPoolingConfig: ConnectionPoolConfig","authManager: AuthManager","cachedCapabilities: DataStoreCapabilities | null","smartChunker: SmartChunker","uploadId: string | null","uploadIdStorageKey: string | null","timeoutId: Timeout | null","abortControllers: Map<string, ReturnType<typeof abortControllerFactory.create>>","uploadIds: Map<string, string>","timeoutIds: Timeout[]","metrics","errors: string[]","warnings: string[]","eventSource: EventSource<T>","wrappedHandler: SubscriptionEventHandler<T>","subscription: SubscriptionInfo<T>","initialState: FlowUploadState","flowUploadFn: FlowUploadFunction<TInput>","callbacks: FlowManagerCallbacks","options: FlowUploadOptions","multiInputUploadFn?: MultiInputFlowUploadFunction","initialState","totalBytes: number | null","internalOptions: InternalFlowUploadOptions","totalBytes","initialState: UploadState","uploadFn: UploadFunction<TInput, TOptions>","callbacks: UploadManagerCallbacks","options?: TOptions","totalBytes: number | null"],"sources":["../src/chunk-buffer.ts","../src/auth/auth-http-client.ts","../src/auth/types.ts","../src/auth/direct-auth.ts","../src/auth/no-auth.ts","../src/auth/uploadista-cloud-auth.ts","../src/logger.ts","../src/mock-data-store.ts","../src/network-monitor.ts","../src/smart-chunker.ts","../src/utils/input-detection.ts","../src/error.ts","../src/upload/upload-utils.ts","../src/upload/chunk-upload.ts","../src/upload/flow-upload.ts","../src/upload/flow-upload-orchestrator.ts","../src/upload/upload-storage.ts","../src/upload/single-upload.ts","../src/upload/parallel-upload.ts","../src/services/platform-service.ts","../src/upload/upload-manager.ts","../src/upload/upload-metrics.ts","../src/upload/upload-strategy.ts","../src/client/uploadista-api.ts","../src/client/uploadista-websocket-manager.ts","../src/client/create-uploadista-client.ts","../src/types/previous-upload.ts","../src/storage/client-storage.ts","../src/storage/in-memory-storage-service.ts","../src/managers/event-subscription-manager.ts","../src/managers/flow-manager.ts","../src/managers/upload-manager.ts"],"sourcesContent":["import type { BufferedChunk } from \"./types/buffered-chunk\";\n\n/**\n * Configuration options for ChunkBuffer.\n *\n * Controls how the buffer accumulates chunks before flushing them to the datastore.\n * This is essential for datastores with minimum chunk size requirements (e.g., AWS S3's 5MB minimum).\n */\nexport interface ChunkBufferConfig {\n /**\n * Minimum chunk size required by the datastore before flushing (in bytes).\n * For example, AWS S3 requires a minimum of 5MB per multipart upload part.\n */\n minThreshold: number;\n\n /**\n * Maximum buffer size before forcing a flush (in bytes).\n * Defaults to 2x minThreshold. Prevents memory issues with very slow uploads.\n */\n maxBufferSize?: number;\n\n /**\n * Maximum time to wait before flushing pending data (in milliseconds).\n * Defaults to 30000ms (30 seconds). Ensures timely uploads even with slow data arrival.\n */\n timeoutMs?: number;\n}\n\n/**\n * ChunkBuffer accumulates small chunks until they meet the minimum threshold\n * required by the datastore (e.g., S3's 5MB minimum part size).\n *\n * This prevents inefficient upload/download cycles of incomplete parts by buffering\n * small chunks in memory until they reach the datastore's minimum size requirement.\n * The buffer automatically flushes when the threshold is met, the maximum buffer\n * size is exceeded, or a timeout occurs.\n *\n * @example Basic usage with S3's 5MB minimum\n * ```typescript\n * const buffer = new ChunkBuffer({\n * minThreshold: 5 * 1024 * 1024, // 5MB\n * maxBufferSize: 10 * 1024 * 1024, // 10MB\n * timeoutMs: 30000, // 30 seconds\n * });\n *\n * // Add chunks as they arrive\n * const chunk1 = new Uint8Array(2 * 1024 * 1024); // 2MB\n * buffer.add(chunk1); // Returns null (below threshold)\n *\n * const chunk2 = new Uint8Array(3 * 1024 * 1024); // 3MB\n * const buffered = buffer.add(chunk2); // Returns combined 5MB chunk\n * ```\n *\n * @example Handling incomplete uploads\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 5 * 1024 * 1024 });\n *\n * // After adding several small chunks\n * buffer.add(smallChunk1);\n * buffer.add(smallChunk2);\n *\n * // Force flush remaining data at end of upload\n * if (buffer.hasPendingData()) {\n * const finalChunk = buffer.flush();\n * await uploadFinalChunk(finalChunk);\n * }\n * ```\n */\nexport class ChunkBuffer {\n private buffer: Uint8Array[] = [];\n private currentSize = 0;\n private config: Required<ChunkBufferConfig>;\n private lastAddTime = 0;\n\n /**\n * Creates a new ChunkBuffer instance.\n *\n * @param config - Buffer configuration including thresholds and timeout\n */\n constructor(config: ChunkBufferConfig) {\n this.config = {\n minThreshold: config.minThreshold,\n maxBufferSize: config.maxBufferSize ?? config.minThreshold * 2,\n timeoutMs: config.timeoutMs ?? 30000, // 30 seconds\n };\n }\n\n /**\n * Adds a chunk to the buffer and returns the accumulated chunk if the flush threshold is met.\n *\n * The buffer will automatically flush (return the combined chunk) when:\n * - The total buffered size meets or exceeds minThreshold\n * - The total buffered size exceeds maxBufferSize\n * - The time since the last chunk exceeds timeoutMs\n *\n * @param chunk - The chunk data to add to the buffer\n * @returns The combined buffered chunk if flush conditions are met, null otherwise\n *\n * @example Progressive buffering\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 }); // 1MB\n *\n * // First chunk doesn't meet threshold\n * const result1 = buffer.add(new Uint8Array(512 * 1024)); // 512KB\n * console.log(result1); // null\n *\n * // Second chunk triggers flush\n * const result2 = buffer.add(new Uint8Array(512 * 1024)); // 512KB\n * console.log(result2?.size); // 1048576 (1MB total)\n * ```\n */\n add(chunk: Uint8Array): BufferedChunk | null {\n this.buffer.push(chunk);\n this.currentSize += chunk.length;\n this.lastAddTime = Date.now();\n\n if (this.shouldFlush()) {\n return this.flush();\n }\n\n return null;\n }\n\n /**\n * Forces the buffer to flush immediately, returning all accumulated data.\n *\n * This is typically called at the end of an upload to ensure any remaining\n * buffered data is sent, even if it hasn't reached the minimum threshold.\n *\n * @returns The combined buffered chunk, or null if the buffer is empty\n *\n * @example Flushing at upload completion\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 5 * 1024 * 1024 });\n *\n * // Upload file in chunks\n * for (const chunk of fileChunks) {\n * const buffered = buffer.add(chunk);\n * if (buffered) await uploadChunk(buffered);\n * }\n *\n * // Upload any remaining data\n * const final = buffer.flush();\n * if (final) await uploadChunk(final);\n * ```\n */\n flush(): BufferedChunk | null {\n if (this.buffer.length === 0) {\n return null;\n }\n\n const combined = new Uint8Array(this.currentSize);\n let offset = 0;\n\n for (const chunk of this.buffer) {\n combined.set(chunk, offset);\n offset += chunk.length;\n }\n\n const result: BufferedChunk = {\n data: combined,\n size: this.currentSize,\n timestamp: this.lastAddTime,\n };\n\n this.reset();\n return result;\n }\n\n /**\n * Checks if the buffer should be flushed based on size, max buffer, or timeout conditions.\n *\n * Returns true if any of these conditions are met:\n * - Current size >= minThreshold\n * - Current size >= maxBufferSize\n * - Time since last add > timeoutMs\n *\n * @returns True if the buffer should be flushed\n *\n * @example Manual flush control\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 });\n *\n * buffer.add(smallChunk);\n *\n * if (buffer.shouldFlush()) {\n * const data = buffer.flush();\n * await upload(data);\n * }\n * ```\n */\n shouldFlush(): boolean {\n if (this.currentSize >= this.config.minThreshold) {\n return true;\n }\n\n if (this.currentSize >= this.config.maxBufferSize) {\n return true;\n }\n\n const timeSinceLastAdd = Date.now() - this.lastAddTime;\n if (this.buffer.length > 0 && timeSinceLastAdd > this.config.timeoutMs) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Returns the current buffer state without flushing.\n *\n * Useful for monitoring buffer status and making informed decisions\n * about when to manually flush or adjust upload strategies.\n *\n * @returns Object containing buffer metrics\n *\n * @example Monitoring buffer state\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 });\n * buffer.add(chunk);\n *\n * const info = buffer.getBufferInfo();\n * console.log(`Buffered: ${info.size} bytes in ${info.chunkCount} chunks`);\n * console.log(`Ready to flush: ${info.isReadyToFlush}`);\n * console.log(`Time since last add: ${info.timeSinceLastAdd}ms`);\n * ```\n */\n getBufferInfo(): {\n size: number;\n chunkCount: number;\n isReadyToFlush: boolean;\n timeSinceLastAdd: number;\n } {\n return {\n size: this.currentSize,\n chunkCount: this.buffer.length,\n isReadyToFlush: this.shouldFlush(),\n timeSinceLastAdd: Date.now() - this.lastAddTime,\n };\n }\n\n /**\n * Checks if the buffer has any pending data that hasn't been flushed.\n *\n * Useful for determining if a final flush is needed at upload completion.\n *\n * @returns True if there are chunks waiting in the buffer\n *\n * @example Ensuring complete upload\n * ```typescript\n * // Upload all chunks\n * for (const chunk of chunks) {\n * const buffered = buffer.add(chunk);\n * if (buffered) await upload(buffered);\n * }\n *\n * // Don't forget the last partial chunk!\n * if (buffer.hasPendingData()) {\n * await upload(buffer.flush());\n * }\n * ```\n */\n hasPendingData(): boolean {\n return this.buffer.length > 0;\n }\n\n /**\n * Clears the buffer without returning data.\n *\n * This discards all buffered chunks and resets the buffer state.\n * Use with caution as this will lose any pending data.\n */\n reset(): void {\n this.buffer = [];\n this.currentSize = 0;\n this.lastAddTime = 0;\n }\n\n /**\n * Returns the minimum threshold this buffer is configured for.\n *\n * @returns Minimum chunk size in bytes before flushing\n */\n getMinThreshold(): number {\n return this.config.minThreshold;\n }\n}\n","import type {\n HttpClient,\n HttpRequestOptions,\n HttpResponse,\n} from \"../services/http-client\";\nimport type { DirectAuthManager } from \"./direct-auth\";\nimport type { NoAuthManager } from \"./no-auth\";\nimport type { UploadistaCloudAuthManager } from \"./uploadista-cloud-auth\";\n\n/**\n * Union type of all auth managers\n */\nexport type AuthManager =\n | DirectAuthManager\n | UploadistaCloudAuthManager\n | NoAuthManager;\n\n/**\n * Auth-aware HTTP client wrapper.\n *\n * Wraps a standard HttpClient and automatically attaches authentication\n * credentials/tokens to all HTTP requests based on the configured auth manager.\n *\n * The wrapper delegates all non-auth concerns (connection pooling, metrics, etc.)\n * to the underlying HttpClient and only adds the auth layer on top.\n */\nexport class AuthHttpClient implements HttpClient {\n constructor(\n private httpClient: HttpClient,\n private authManager: AuthManager,\n ) {}\n\n /**\n * Make an HTTP request with authentication credentials attached.\n * Calls the auth manager to attach credentials before delegating to the underlying client.\n */\n async request(\n url: string,\n options: HttpRequestOptions = {},\n ): Promise<HttpResponse> {\n try {\n // Attach auth credentials to request headers\n const authenticatedHeaders = await this.attachAuthCredentials(\n options.headers || {},\n url,\n );\n\n // Delegate to underlying HTTP client with authenticated headers\n return await this.httpClient.request(url, {\n ...options,\n headers: authenticatedHeaders,\n // include credentials for cors if needed\n credentials:\n this.authManager.getType() === \"no-auth\" ||\n this.authManager.getType() === \"uploadista-cloud\"\n ? \"omit\"\n : (options.credentials ?? \"include\"),\n });\n } catch (error) {\n // If auth fails, wrap error with context\n if (error instanceof Error && error.message.includes(\"auth\")) {\n throw error; // Re-throw auth errors as-is\n }\n\n // For other errors, let them propagate\n throw error;\n }\n }\n\n /**\n * Attach authentication credentials to request headers.\n * Delegates to the appropriate auth manager method.\n */\n private async attachAuthCredentials(\n headers: Record<string, string>,\n url: string,\n ): Promise<Record<string, string>> {\n // Check if this is a DirectAuthManager or UploadistaCloudAuthManager\n if (\"attachCredentials\" in this.authManager) {\n // DirectAuthManager or NoAuthManager\n return await this.authManager.attachCredentials(headers);\n }\n\n if (\"attachToken\" in this.authManager) {\n // UploadistaCloudAuthManager - extract job ID from URL if present\n const jobId = this.extractJobIdFromUrl(url);\n return await this.authManager.attachToken(headers, jobId);\n }\n\n // Fallback - return headers unchanged\n return headers;\n }\n\n /**\n * Extract job ID from URL for SaaS mode token caching.\n * Looks for patterns like /upload/{id} or /jobs/{id} in the URL.\n */\n private extractJobIdFromUrl(url: string): string | undefined {\n // Match patterns like:\n // - /api/upload/{uploadId}\n // - /api/flow/{flowId}/{storageId}\n // - /api/jobs/{jobId}/status\n // - /api/jobs/{jobId}/resume/{nodeId}\n\n const uploadMatch = url.match(/\\/api\\/upload\\/([^/?]+)/);\n if (uploadMatch) {\n return uploadMatch[1];\n }\n\n const flowMatch = url.match(/\\/api\\/flow\\/([^/?]+)/);\n if (flowMatch) {\n return flowMatch[1];\n }\n\n const jobMatch = url.match(/\\/api\\/jobs\\/([^/?]+)/);\n if (jobMatch) {\n return jobMatch[1];\n }\n\n // No job ID found - UploadistaCloud mode will use global token\n return undefined;\n }\n\n /**\n * Delegate metrics methods to underlying HTTP client\n */\n getMetrics() {\n return this.httpClient.getMetrics();\n }\n\n getDetailedMetrics() {\n return this.httpClient.getDetailedMetrics();\n }\n\n reset() {\n this.httpClient.reset();\n }\n\n async close() {\n await this.httpClient.close();\n }\n\n async warmupConnections(urls: string[]) {\n await this.httpClient.warmupConnections(urls);\n }\n\n /**\n * Get the underlying auth manager for advanced use cases\n */\n getAuthManager(): AuthManager {\n return this.authManager;\n }\n}\n","export class BaseAuthManager {\n constructor(private type: \"direct\" | \"uploadista-cloud\" | \"no-auth\") {}\n\n getType() {\n return this.type;\n }\n}\n/**\n * Credentials that can be attached to HTTP requests.\n * Supports headers and cookies for maximum flexibility.\n */\nexport type RequestCredentials = {\n /** HTTP headers to attach (e.g., Authorization, X-API-Key) */\n headers?: Record<string, string>;\n /** Cookies to attach (primarily for browser environments) */\n cookies?: Record<string, string>;\n};\n\n/**\n * Direct auth mode configuration.\n * Users provide a function that returns credentials to attach to every request.\n * This mode supports any authentication protocol (OAuth, JWT, sessions, API keys, etc.)\n *\n * @example Bearer token\n * ```typescript\n * {\n * mode: 'direct',\n * getCredentials: async () => ({\n * headers: {\n * 'Authorization': `Bearer ${await getAccessToken()}`\n * }\n * })\n * }\n * ```\n *\n * @example API key\n * ```typescript\n * {\n * mode: 'direct',\n * getCredentials: () => ({\n * headers: {\n * 'X-API-Key': process.env.API_KEY\n * }\n * })\n * }\n * ```\n */\nexport type DirectAuthConfig = {\n mode: \"direct\";\n /**\n * Function called before each HTTP request to obtain credentials.\n * Can be async to support token refresh or other async operations.\n * Should not throw - return empty object if credentials unavailable.\n */\n getCredentials?: () => RequestCredentials | Promise<RequestCredentials>;\n};\n\n/**\n * UploadistaCloud auth mode configuration.\n * Client requests JWT tokens from a user-controlled auth server,\n * which validates credentials and issues tokens using a secure API key.\n *\n * Token exchange flow:\n * 1. Client calls getCredentials() to get user credentials\n * 2. Client sends credentials to authServerUrl\n * 3. Auth server validates and returns JWT token\n * 4. Client attaches token to uploadista engine requests\n *\n * @example\n * ```typescript\n * {\n * mode: 'uploadista-cloud',\n * authServerUrl: 'https://auth.myapp.com/token',\n * clientId: 'my-client-id'\n * }\n * ```\n */\nexport type UploadistaCloudAuthConfig = {\n mode: \"uploadista-cloud\";\n /**\n * URL of the user's auth server that issues JWT tokens.\n * Should be a GET endpoint that accepts client id and returns { token, expiresIn }.\n */\n authServerUrl: string;\n /**\n * Client ID to use for authentication. It will be used to compare the API Key with the client id on the auth server.\n */\n clientId: string;\n};\n\n/**\n * Authentication configuration for the uploadista client.\n * Supports two modes:\n * - Direct: Bring your own auth (any protocol)\n * - UploadistaCloud: Standard JWT token exchange with auth server\n *\n * Use a discriminated union to ensure type safety - TypeScript will\n * enforce that the correct fields are present for each mode.\n */\nexport type AuthConfig = DirectAuthConfig | UploadistaCloudAuthConfig;\n","import type { Logger } from \"../logger\";\nimport type { PlatformService } from \"../services/platform-service\";\nimport type { DirectAuthConfig } from \"./types\";\nimport { BaseAuthManager } from \"./types\";\n\n/**\n * Direct auth manager - handles credential attachment for \"bring your own auth\" mode.\n *\n * This manager calls the user-provided getCredentials() function before each request\n * and attaches the returned credentials (headers, cookies) to the HTTP request.\n *\n * Supports any authentication protocol: OAuth, JWT, API keys, session cookies, etc.\n */\nexport class DirectAuthManager extends BaseAuthManager {\n constructor(\n private config: DirectAuthConfig,\n private platformService: PlatformService,\n private logger: Logger,\n ) {\n super(\"direct\");\n }\n\n /**\n * Attach credentials to an HTTP request by calling getCredentials() and\n * merging the returned headers/cookies with the request.\n *\n * @param headers - Existing request headers\n * @returns Updated headers with credentials attached\n * @throws Error if getCredentials() throws or returns invalid data\n */\n async attachCredentials(\n headers: Record<string, string> = {},\n ): Promise<Record<string, string>> {\n try {\n if (!this.config.getCredentials) {\n return headers;\n }\n\n // Call user's credential provider (may be async)\n const credentials = await Promise.resolve(this.config.getCredentials());\n\n // Validate credentials\n if (!credentials || typeof credentials !== \"object\") {\n throw new Error(\n \"getCredentials() must return an object with headers and/or cookies\",\n );\n }\n\n // Merge credential headers with existing headers\n const updatedHeaders = { ...headers };\n\n if (credentials.headers) {\n this.validateHeaders(credentials.headers);\n Object.assign(updatedHeaders, credentials.headers);\n }\n\n // Note: Cookie handling would be browser-specific\n // For now, we only support headers as cookies are automatically\n // handled by the browser when using fetch()\n if (credentials.cookies) {\n this.attachCookies(updatedHeaders, credentials.cookies);\n }\n\n return updatedHeaders;\n } catch (error) {\n // Wrap errors with context\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to attach auth credentials: ${message}`);\n }\n }\n\n /**\n * Validate that headers is a valid object with string keys and values\n */\n private validateHeaders(headers: Record<string, string>): void {\n if (typeof headers !== \"object\" || headers === null) {\n throw new Error(\"headers must be an object\");\n }\n\n for (const [key, value] of Object.entries(headers)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n `Invalid header: key and value must be strings (got ${key}: ${typeof value})`,\n );\n }\n }\n }\n\n /**\n * Attach cookies to request headers.\n * In browser environments, cookies are automatically handled by fetch().\n * In Node.js, we need to manually add them to the Cookie header.\n */\n private attachCookies(\n headers: Record<string, string>,\n cookies: Record<string, string>,\n ): void {\n // Check if we're in a browser environment\n const isBrowser = this.platformService.isBrowser();\n\n if (isBrowser) {\n // In browsers, fetch() automatically sends cookies for same-origin requests\n // For cross-origin, the server needs to set CORS headers and credentials: 'include'\n // We can't manually set cookies in headers for security reasons\n // So we just warn if cookies are provided in direct mode\n this.logger.warn(\n \"DirectAuth: Cookies are automatically handled by the browser. \" +\n \"Ensure your server has proper CORS configuration with credentials support.\",\n );\n } else {\n // In Node.js, we can manually build the Cookie header\n const cookieString = Object.entries(cookies)\n .map(([key, value]) => `${key}=${value}`)\n .join(\"; \");\n\n if (cookieString) {\n headers.Cookie = cookieString;\n }\n }\n }\n}\n","import { BaseAuthManager } from \"./types\";\n\n/**\n * No-auth manager - pass-through implementation for backward compatibility.\n *\n * When no auth configuration is provided, this manager is used to maintain\n * a consistent interface without adding any authentication to requests.\n */\nexport class NoAuthManager extends BaseAuthManager {\n constructor() {\n super(\"no-auth\");\n }\n\n /**\n * Pass through headers without modification.\n *\n * @param headers - Existing request headers\n * @returns Same headers unchanged\n */\n async attachCredentials(\n headers: Record<string, string> = {},\n ): Promise<Record<string, string>> {\n return headers;\n }\n\n /**\n * No-op for clearing tokens (NoAuthManager doesn't cache anything)\n */\n clearToken(_jobId: string): void {\n // No-op\n }\n\n /**\n * No-op for clearing all tokens\n */\n clearAllTokens(): void {\n // No-op\n }\n}\n","import type { HttpClient } from \"../services/http-client\";\nimport { BaseAuthManager, type UploadistaCloudAuthConfig } from \"./types\";\n\n/**\n * Token response from the auth server\n */\nexport type TokenResponse = {\n /** JWT token to use for authentication */\n token: string;\n /** Token expiration time in seconds (optional) */\n expiresIn?: number;\n};\n\n/**\n * Cached token information\n */\ntype CachedToken = {\n token: string;\n expiresAt?: number; // Unix timestamp in milliseconds\n};\n\n/**\n * UploadistaCloud auth manager - handles JWT token exchange with an auth server.\n *\n * Token exchange flow:\n * 1. Client calls getCredentials() to get user credentials\n * 2. Manager sends credentials to authServerUrl\n * 3. Auth server validates credentials and returns JWT token\n * 4. Manager caches token and attaches it to uploadista requests\n * 5. Token is cached per job to minimize auth overhead\n *\n * Security: API keys are kept server-side in the auth server, never exposed to clients.\n */\nexport class UploadistaCloudAuthManager extends BaseAuthManager {\n /** Token cache: maps job ID to cached token */\n private tokenCache = new Map<string, CachedToken>();\n\n /** Global token for requests without a specific job ID */\n private globalToken: CachedToken | null = null;\n\n constructor(\n private config: UploadistaCloudAuthConfig,\n private httpClient: HttpClient,\n ) {\n super(\"uploadista-cloud\");\n }\n\n /**\n * Fetch a JWT token from the auth server using user credentials.\n *\n * @returns Token response with JWT and optional expiry\n * @throws Error if auth server is unreachable or returns an error\n */\n async fetchToken(): Promise<TokenResponse> {\n try {\n // Make POST request to auth server\n const response = await this.httpClient.request(\n `${this.config.authServerUrl}/${this.config.clientId}`,\n {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n },\n );\n\n // Handle error responses\n if (!response.ok) {\n const errorText = await response.text();\n let errorMessage = `Auth server returned ${response.status}`;\n\n try {\n const errorJson = JSON.parse(errorText);\n errorMessage = errorJson.error || errorJson.message || errorMessage;\n } catch {\n // If response is not JSON, use status text\n errorMessage = errorText || response.statusText || errorMessage;\n }\n\n throw new Error(errorMessage);\n }\n\n // Parse token response\n const data = (await response.json()) as TokenResponse;\n\n if (!data.token || typeof data.token !== \"string\") {\n throw new Error(\n \"Auth server response missing 'token' field or token is not a string\",\n );\n }\n\n return data;\n } catch (error) {\n // Wrap errors with context\n if (error instanceof Error) {\n throw new Error(`Failed to fetch auth token: ${error.message}`);\n }\n throw new Error(`Failed to fetch auth token: ${String(error)}`);\n }\n }\n\n /**\n * Get a cached token for a specific job, or fetch a new one if not cached.\n *\n * @param jobId - Optional job ID to cache token for specific job\n * @returns Cached or newly fetched token\n */\n private async getOrFetchToken(jobId?: string): Promise<string> {\n // Check if we have a cached token for this job\n if (jobId) {\n const cached = this.tokenCache.get(jobId);\n if (cached && !this.isTokenExpired(cached)) {\n return cached.token;\n }\n }\n\n // Check global token cache\n if (!jobId && this.globalToken && !this.isTokenExpired(this.globalToken)) {\n return this.globalToken.token;\n }\n\n // No valid cached token - fetch a new one\n const tokenResponse = await this.fetchToken();\n\n // Calculate expiration time if provided\n const expiresAt = tokenResponse.expiresIn\n ? Date.now() + tokenResponse.expiresIn * 1000\n : undefined;\n\n const cachedToken: CachedToken = {\n token: tokenResponse.token,\n expiresAt,\n };\n\n // Cache the token\n if (jobId) {\n this.tokenCache.set(jobId, cachedToken);\n } else {\n this.globalToken = cachedToken;\n }\n\n return tokenResponse.token;\n }\n\n /**\n * Check if a cached token is expired.\n * Adds a 60-second buffer to avoid using tokens that are about to expire.\n */\n private isTokenExpired(cached: CachedToken): boolean {\n if (!cached.expiresAt) {\n // No expiry set - assume token is valid\n return false;\n }\n\n // Add 60-second buffer before actual expiry\n const bufferMs = 60 * 1000;\n return Date.now() > cached.expiresAt - bufferMs;\n }\n\n /**\n * Attach JWT token to an HTTP request as Authorization Bearer header.\n *\n * @param headers - Existing request headers\n * @param jobId - Optional job ID to use cached token for specific job\n * @returns Updated headers with Authorization header\n * @throws Error if token fetch fails\n */\n async attachToken(\n headers: Record<string, string> = {},\n jobId?: string,\n ): Promise<Record<string, string>> {\n try {\n // Get token (from cache or fetch new)\n const token = await this.getOrFetchToken(jobId);\n\n // Attach as Bearer token\n return {\n ...headers,\n Authorization: `Bearer ${token}`,\n };\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to attach auth token: ${message}`);\n }\n }\n\n /**\n * Clear cached token for a specific job.\n * Should be called when a job completes to free memory.\n *\n * @param jobId - Job ID to clear token for\n */\n clearToken(jobId: string): void {\n this.tokenCache.delete(jobId);\n }\n\n /**\n * Clear all cached tokens.\n * Useful for logout or when switching users.\n */\n clearAllTokens(): void {\n this.tokenCache.clear();\n this.globalToken = null;\n }\n\n /**\n * Get cache statistics for debugging and monitoring.\n */\n getCacheStats(): {\n cachedJobCount: number;\n hasGlobalToken: boolean;\n } {\n return {\n cachedJobCount: this.tokenCache.size,\n hasGlobalToken: this.globalToken !== null,\n };\n }\n}\n","/**\n * Logger interface for Uploadista client operations.\n *\n * Provides structured logging capabilities for debugging upload progress,\n * flow execution, and client operations. Platform implementations should\n * provide their own logging functions (e.g., console.log, custom loggers).\n *\n * @example Using with console\n * ```typescript\n * const logger = createLogger(true, console.log);\n * logger.log('Upload started');\n * logger.warn('Retrying failed chunk');\n * logger.error('Upload failed');\n * ```\n */\nexport type Logger = {\n /**\n * Log informational messages (e.g., upload progress, state changes)\n */\n log: (message: string) => void;\n\n /**\n * Log warning messages (e.g., retry attempts, degraded performance)\n */\n warn: (message: string) => void;\n\n /**\n * Log error messages (e.g., upload failures, network errors)\n */\n error: (message: string) => void;\n};\n\n/**\n * Platform-specific logging function type.\n *\n * Accepts a message string and outputs it to the appropriate logging destination.\n * This abstraction allows the client to work across different platforms\n * (browser, Node.js, React Native) with their own logging mechanisms.\n */\nexport type LogFunction = (message: string) => void;\n\n/**\n * Default no-op logger function.\n *\n * Used when no custom logging function is provided.\n * Platform implementations should provide their own (e.g., console.log).\n */\nconst noopLog: LogFunction = () => {\n // No-op by default - platforms will override\n};\n\n/**\n * Creates a Logger instance with configurable output.\n *\n * This factory function creates a logger that can be enabled/disabled\n * and customized with a platform-specific logging function.\n *\n * @param enabled - Whether logging is enabled. When false, all log calls are no-ops\n * @param logFn - Optional custom logging function. Defaults to no-op. Pass console.log for browser/Node.js\n * @returns A Logger instance with log, warn, and error methods\n *\n * @example Basic usage with console\n * ```typescript\n * const logger = createLogger(true, console.log);\n * logger.log('Upload started');\n * ```\n *\n * @example Disabled logger (no output)\n * ```typescript\n * const logger = createLogger(false);\n * logger.log('This will not be logged');\n * ```\n *\n * @example Custom logging function\n * ```typescript\n * const customLog = (msg: string) => {\n * // Send to analytics service\n * analytics.track('upload_log', { message: msg });\n * };\n * const logger = createLogger(true, customLog);\n * ```\n */\nexport function createLogger(\n enabled: boolean,\n logFn: LogFunction = noopLog,\n): Logger {\n return {\n log: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n warn: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n error: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n };\n}\n","import type {\n DataStoreCapabilities,\n UploadStrategy,\n} from \"@uploadista/core/types\";\n\n/**\n * Mock data store implementation for client-side capability negotiation.\n * This doesn't perform actual data store operations but provides capability information\n * for upload strategy decisions.\n */\nexport class MockClientDataStore {\n constructor(private capabilities: DataStoreCapabilities) {}\n\n getCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n validateUploadStrategy(strategy: UploadStrategy): boolean {\n switch (strategy) {\n case \"parallel\":\n return this.capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n }\n}\n\n/**\n * Default capabilities that assume basic parallel upload support\n * (conservative defaults that work with most backends)\n */\nexport const defaultClientCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 6, // Browser-safe default\n minChunkSize: 64 * 1024, // 64KB\n maxChunkSize: 100 * 1024 * 1024, // 100MB\n maxParts: 10000,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: false,\n};\n\n/**\n * Capabilities for S3-compatible backends\n */\nexport const s3LikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: true,\n maxConcurrentUploads: 60,\n minChunkSize: 5 * 1024 * 1024, // 5MiB S3 minimum\n maxChunkSize: 5 * 1024 * 1024 * 1024, // 5GiB S3 maximum\n maxParts: 10000,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: false,\n};\n\n/**\n * Capabilities for GCS-compatible backends\n */\nexport const gcsLikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: false, // GCS doesn't have native multipart\n supportsConcatenation: true, // Can combine files\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1,\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: true,\n};\n\n/**\n * Capabilities for filesystem-based backends\n */\nexport const filesystemLikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: false, // Sequential operations\n supportsConcatenation: false,\n supportsDeferredLength: false,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1,\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 1024 * 1024, // 1MB\n requiresOrderedChunks: true,\n};\n","/**\n * Assessment of current network conditions based on upload performance.\n *\n * Used by smart chunking algorithms to adapt chunk sizes based on network quality.\n */\nexport interface NetworkCondition {\n /**\n * Classification of network speed and stability:\n * - \"slow\": Average speed below slowThreshold (default 50 KB/s)\n * - \"fast\": Average speed above fastThreshold (default 5 MB/s)\n * - \"unstable\": High variance in upload speeds\n * - \"unknown\": Insufficient data to determine condition\n */\n type: \"slow\" | \"fast\" | \"unstable\" | \"unknown\";\n\n /**\n * Confidence level in the assessment (0-1).\n * Higher values indicate more samples and more reliable assessment.\n */\n confidence: number;\n}\n\n/**\n * Aggregated network performance metrics.\n *\n * Provides a comprehensive view of upload performance over time,\n * useful for debugging connectivity issues and optimizing upload strategies.\n */\nexport interface NetworkMetrics {\n /** Average upload speed in bytes per second */\n averageSpeed: number;\n\n /** Average network latency in milliseconds */\n latency: number;\n\n /** Ratio of successful uploads (0-1) */\n successRate: number;\n\n /** Ratio of failed uploads (0-1) */\n errorRate: number;\n\n /** Total number of upload requests made */\n totalRequests: number;\n\n /** Total bytes uploaded successfully */\n totalBytes: number;\n\n /** Total time spent uploading in milliseconds */\n totalTime: number;\n}\n\n/**\n * Individual upload sample for network analysis.\n *\n * Each successful or failed upload is recorded as a sample,\n * which is used to calculate network metrics and conditions.\n */\nexport interface UploadSample {\n /** Size of the uploaded chunk in bytes */\n size: number;\n\n /** Time taken to upload in milliseconds */\n duration: number;\n\n /** Whether the upload succeeded */\n success: boolean;\n\n /** Unix timestamp when the upload occurred */\n timestamp: number;\n\n /** Optional network latency measurement in milliseconds */\n latency?: number;\n}\n\n/**\n * Configuration options for NetworkMonitor.\n *\n * Controls how network conditions are assessed and how upload samples\n * are analyzed to determine optimal chunking strategies.\n */\nexport interface NetworkMonitorConfig {\n /** Maximum number of samples to keep in memory. Defaults to 100. */\n maxSamples?: number;\n\n /** Smoothing factor for exponential moving average (0-1). Defaults to 0.1. */\n smoothingFactor?: number;\n\n /** Minimum samples required before assessing network condition. Defaults to 5. */\n minSamplesForCondition?: number;\n\n /** Upload speed threshold for \"slow\" classification in bytes/second. Defaults to 50 KB/s. */\n slowThreshold?: number;\n\n /** Upload speed threshold for \"fast\" classification in bytes/second. Defaults to 5 MB/s. */\n fastThreshold?: number;\n\n /** Coefficient of variation threshold for \"unstable\" classification. Defaults to 0.5. */\n unstableThreshold?: number;\n}\n\n/**\n * Monitors network performance during uploads to enable adaptive chunking.\n *\n * Tracks upload samples over time and analyzes them to determine network conditions\n * (slow, fast, unstable). This information is used by smart chunking algorithms to\n * dynamically adjust chunk sizes for optimal upload performance.\n *\n * The monitor maintains a rolling window of recent samples and calculates various\n * metrics including average speed, latency, success rate, and throughput stability.\n *\n * @example Basic usage with smart chunking\n * ```typescript\n * const monitor = new NetworkMonitor({\n * maxSamples: 100,\n * slowThreshold: 50 * 1024, // 50 KB/s\n * fastThreshold: 5 * 1024 * 1024, // 5 MB/s\n * });\n *\n * // Record each upload\n * monitor.recordUpload(\n * chunkSize, // bytes\n * duration, // milliseconds\n * true, // success\n * latency // optional latency\n * );\n *\n * // Get current network condition\n * const condition = monitor.getNetworkCondition();\n * if (condition.type === 'slow') {\n * // Use smaller chunks\n * chunkSize = 256 * 1024;\n * } else if (condition.type === 'fast') {\n * // Use larger chunks\n * chunkSize = 5 * 1024 * 1024;\n * }\n * ```\n *\n * @example Monitoring network metrics\n * ```typescript\n * const monitor = new NetworkMonitor();\n *\n * // After some uploads\n * const metrics = monitor.getCurrentMetrics();\n * console.log(`Average speed: ${metrics.averageSpeed / 1024} KB/s`);\n * console.log(`Success rate: ${metrics.successRate * 100}%`);\n * console.log(`Average latency: ${metrics.latency}ms`);\n * ```\n */\nexport class NetworkMonitor {\n private samples: UploadSample[] = [];\n private config: Required<NetworkMonitorConfig>;\n private _currentMetrics: NetworkMetrics;\n\n /**\n * Creates a new NetworkMonitor instance.\n *\n * @param config - Optional configuration for thresholds and sample management\n */\n constructor(config: NetworkMonitorConfig = {}) {\n this.config = {\n maxSamples: config.maxSamples ?? 100,\n smoothingFactor: config.smoothingFactor ?? 0.1,\n minSamplesForCondition: config.minSamplesForCondition ?? 5,\n slowThreshold: config.slowThreshold ?? 50 * 1024, // 50 KB/s\n fastThreshold: config.fastThreshold ?? 5 * 1024 * 1024, // 5 MB/s\n unstableThreshold: config.unstableThreshold ?? 0.5, // 50% coefficient of variation\n };\n\n this._currentMetrics = this.createEmptyMetrics();\n }\n\n /**\n * Adds a raw upload sample to the monitor.\n *\n * This is called internally by recordUpload but can also be used\n * to add pre-constructed samples for testing or custom tracking.\n *\n * @param sample - The upload sample to add\n */\n addSample(sample: UploadSample): void {\n this.samples.push(sample);\n\n // Keep only the most recent samples\n if (this.samples.length > this.config.maxSamples) {\n this.samples = this.samples.slice(-this.config.maxSamples);\n }\n\n this.updateMetrics();\n }\n\n /**\n * Records an upload operation for network analysis.\n *\n * This is the primary method for tracking upload performance. Each chunk upload\n * should be recorded to build an accurate picture of network conditions.\n *\n * @param size - Size of the uploaded chunk in bytes\n * @param duration - Time taken to upload in milliseconds\n * @param success - Whether the upload succeeded\n * @param latency - Optional network latency measurement in milliseconds\n *\n * @example Recording successful upload\n * ```typescript\n * const startTime = Date.now();\n * await uploadChunk(data);\n * const duration = Date.now() - startTime;\n * monitor.recordUpload(data.length, duration, true);\n * ```\n *\n * @example Recording failed upload\n * ```typescript\n * try {\n * const startTime = Date.now();\n * await uploadChunk(data);\n * monitor.recordUpload(data.length, Date.now() - startTime, true);\n * } catch (error) {\n * monitor.recordUpload(data.length, Date.now() - startTime, false);\n * }\n * ```\n */\n recordUpload(\n size: number,\n duration: number,\n success: boolean,\n latency?: number,\n ): void {\n this.addSample({\n size,\n duration,\n success,\n timestamp: Date.now(),\n latency,\n });\n }\n\n /**\n * Returns the current network metrics.\n *\n * Provides aggregated statistics about all recorded uploads including\n * average speed, latency, success rate, and totals.\n *\n * @returns A snapshot of current network performance metrics\n *\n * @example\n * ```typescript\n * const metrics = monitor.getCurrentMetrics();\n * console.log(`Speed: ${(metrics.averageSpeed / 1024).toFixed(2)} KB/s`);\n * console.log(`Success: ${(metrics.successRate * 100).toFixed(1)}%`);\n * console.log(`Latency: ${metrics.latency.toFixed(0)}ms`);\n * ```\n */\n getCurrentMetrics(): NetworkMetrics {\n return { ...this._currentMetrics };\n }\n\n /**\n * Analyzes recent upload samples to determine current network condition.\n *\n * Uses statistical analysis (coefficient of variation, average speed) to classify\n * the network as slow, fast, unstable, or unknown. The confidence level indicates\n * how reliable the assessment is based on the number of samples collected.\n *\n * @returns Current network condition with confidence level\n *\n * @example Adaptive chunking based on network condition\n * ```typescript\n * const condition = monitor.getNetworkCondition();\n *\n * if (condition.confidence > 0.7) {\n * switch (condition.type) {\n * case 'fast':\n * chunkSize = 10 * 1024 * 1024; // 10MB\n * break;\n * case 'slow':\n * chunkSize = 256 * 1024; // 256KB\n * break;\n * case 'unstable':\n * chunkSize = 1 * 1024 * 1024; // 1MB, conservative\n * break;\n * }\n * }\n * ```\n */\n getNetworkCondition(): NetworkCondition {\n if (this.samples.length < this.config.minSamplesForCondition) {\n return { type: \"unknown\", confidence: 0 };\n }\n\n const recentSamples = this.getRecentSuccessfulSamples();\n if (recentSamples.length < this.config.minSamplesForCondition) {\n return { type: \"unknown\", confidence: 0.3 };\n }\n\n const speeds = recentSamples.map(\n (sample) => sample.size / (sample.duration / 1000),\n );\n const avgSpeed =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n\n // Calculate coefficient of variation for stability assessment\n const variance =\n speeds.reduce((sum, speed) => sum + (speed - avgSpeed) ** 2, 0) /\n speeds.length;\n const stdDev = Math.sqrt(variance);\n const coefficientOfVariation = stdDev / avgSpeed;\n\n // Determine network condition\n const confidence = Math.min(\n 1,\n this.samples.length / (this.config.minSamplesForCondition * 2),\n );\n\n if (coefficientOfVariation > this.config.unstableThreshold) {\n return { type: \"unstable\", confidence };\n }\n\n if (avgSpeed < this.config.slowThreshold) {\n return { type: \"slow\", confidence };\n }\n\n if (avgSpeed > this.config.fastThreshold) {\n return { type: \"fast\", confidence };\n }\n\n // Default to slow for conservative chunking\n return { type: \"slow\", confidence: confidence * 0.7 };\n }\n\n /**\n * Calculates the optimal upload throughput based on recent successful uploads.\n *\n * Uses a weighted average that gives more weight to recent samples,\n * providing a responsive measure of current network capacity.\n *\n * @returns Optimal throughput in bytes per second, or 0 if no successful samples\n *\n * @example Using for chunk size calculation\n * ```typescript\n * const throughput = monitor.getOptimalThroughput();\n * // Target 1 second per chunk\n * const optimalChunkSize = Math.min(throughput, MAX_CHUNK_SIZE);\n * ```\n */\n getOptimalThroughput(): number {\n const recentSamples = this.getRecentSuccessfulSamples(10);\n if (recentSamples.length === 0) return 0;\n\n // Calculate weighted average with recent samples having higher weight\n let totalWeight = 0;\n let weightedSum = 0;\n\n recentSamples.forEach((sample, index) => {\n const weight = index + 1; // More recent samples get higher weight\n const throughput = sample.size / (sample.duration / 1000);\n weightedSum += throughput * weight;\n totalWeight += weight;\n });\n\n return totalWeight > 0 ? weightedSum / totalWeight : 0;\n }\n\n /**\n * Resets all samples and metrics to initial state.\n *\n * Useful when network conditions change significantly or when\n * starting a new upload session.\n *\n * @example Resetting between uploads\n * ```typescript\n * // Complete first upload\n * await uploadFile1();\n *\n * // Reset metrics before starting a new upload\n * monitor.reset();\n * await uploadFile2();\n * ```\n */\n reset(): void {\n this.samples = [];\n this._currentMetrics = this.createEmptyMetrics();\n }\n\n private getRecentSuccessfulSamples(count?: number): UploadSample[] {\n const successful = this.samples.filter((sample) => sample.success);\n return count ? successful.slice(-count) : successful;\n }\n\n private updateMetrics(): void {\n const successfulSamples = this.samples.filter((sample) => sample.success);\n const totalRequests = this.samples.length;\n const totalSuccessful = successfulSamples.length;\n\n if (totalRequests === 0) {\n this._currentMetrics = this.createEmptyMetrics();\n return;\n }\n\n const totalBytes = successfulSamples.reduce(\n (sum, sample) => sum + sample.size,\n 0,\n );\n const totalTime = successfulSamples.reduce(\n (sum, sample) => sum + sample.duration,\n 0,\n );\n\n const averageSpeed = totalTime > 0 ? totalBytes / (totalTime / 1000) : 0;\n const successRate = totalSuccessful / totalRequests;\n const errorRate = 1 - successRate;\n\n // Calculate average latency from samples that have latency data\n const samplesWithLatency = this.samples.filter(\n (sample) => sample.latency !== undefined,\n );\n const averageLatency =\n samplesWithLatency.length > 0\n ? samplesWithLatency.reduce(\n (sum, sample) => sum + (sample.latency || 0),\n 0,\n ) / samplesWithLatency.length\n : 0;\n\n this._currentMetrics = {\n averageSpeed,\n latency: averageLatency,\n successRate,\n errorRate,\n totalRequests,\n totalBytes,\n totalTime,\n };\n }\n\n private createEmptyMetrics(): NetworkMetrics {\n return {\n averageSpeed: 0,\n latency: 0,\n successRate: 0,\n errorRate: 0,\n totalRequests: 0,\n totalBytes: 0,\n totalTime: 0,\n };\n }\n}\n","import type { NetworkCondition, NetworkMonitor } from \"./network-monitor\";\nimport type { ConnectionMetrics } from \"./services/http-client\";\n\nexport interface ChunkingStrategy {\n name: string;\n minChunkSize: number;\n maxChunkSize: number;\n initialChunkSize: number;\n adaptationRate: number; // how quickly to adapt (0-1)\n}\n\nexport interface DatastoreConstraints {\n minChunkSize: number;\n maxChunkSize: number;\n optimalChunkSize: number;\n requiresOrderedChunks?: boolean;\n}\n\nexport interface SmartChunkerConfig {\n enabled?: boolean;\n fallbackChunkSize?: number;\n minChunkSize?: number;\n maxChunkSize?: number;\n initialChunkSize?: number;\n targetUtilization?: number; // target bandwidth utilization (0-1)\n adaptationRate?: number;\n conservativeMode?: boolean;\n connectionPoolingAware?: boolean; // enable connection pooling optimizations\n datastoreConstraints?: DatastoreConstraints;\n}\n\nexport interface ChunkSizeDecision {\n size: number;\n strategy: string;\n reason: string;\n networkCondition: NetworkCondition;\n}\n\nconst DEFAULT_STRATEGIES: Record<string, ChunkingStrategy> = {\n conservative: {\n name: \"conservative\",\n minChunkSize: 64 * 1024, // 64 KB\n maxChunkSize: 2 * 1024 * 1024, // 2 MB\n initialChunkSize: 256 * 1024, // 256 KB\n adaptationRate: 0.1,\n },\n balanced: {\n name: \"balanced\",\n minChunkSize: 128 * 1024, // 128 KB\n maxChunkSize: 8 * 1024 * 1024, // 8 MB\n initialChunkSize: 512 * 1024, // 512 KB\n adaptationRate: 0.2,\n },\n aggressive: {\n name: \"aggressive\",\n minChunkSize: 256 * 1024, // 256 KB\n maxChunkSize: 32 * 1024 * 1024, // 32 MB\n initialChunkSize: 1024 * 1024, // 1 MB\n adaptationRate: 0.3,\n },\n};\n\nconst S3_OPTIMIZED_STRATEGIES: Record<string, ChunkingStrategy> = {\n conservative: {\n name: \"s3-conservative\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 64 * 1024 * 1024, // 64MB\n initialChunkSize: 8 * 1024 * 1024, // 8MB\n adaptationRate: 0.1,\n },\n balanced: {\n name: \"s3-balanced\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 128 * 1024 * 1024, // 128MB\n initialChunkSize: 16 * 1024 * 1024, // 16MB\n adaptationRate: 0.2,\n },\n aggressive: {\n name: \"s3-aggressive\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 256 * 1024 * 1024, // 256MB\n initialChunkSize: 32 * 1024 * 1024, // 32MB\n adaptationRate: 0.3,\n },\n};\n\nexport class SmartChunker {\n private config: Required<Omit<SmartChunkerConfig, \"datastoreConstraints\">> & {\n datastoreConstraints?: DatastoreConstraints;\n };\n private networkMonitor: NetworkMonitor;\n private currentChunkSize: number;\n private lastDecision: ChunkSizeDecision | null = null;\n private consecutiveFailures = 0;\n private consecutiveSuccesses = 0;\n private connectionMetrics: ConnectionMetrics | null = null;\n\n constructor(networkMonitor: NetworkMonitor, config: SmartChunkerConfig = {}) {\n this.networkMonitor = networkMonitor;\n this.config = {\n enabled: config.enabled ?? true,\n fallbackChunkSize: config.fallbackChunkSize ?? 1024 * 1024, // 1 MB\n minChunkSize: config.minChunkSize ?? 64 * 1024, // 64 KB\n maxChunkSize: config.maxChunkSize ?? 32 * 1024 * 1024, // 32 MB\n initialChunkSize: config.initialChunkSize ?? 512 * 1024, // 512 KB\n targetUtilization: config.targetUtilization ?? 0.85, // 85%\n adaptationRate: config.adaptationRate ?? 0.2,\n conservativeMode: config.conservativeMode ?? false,\n connectionPoolingAware: config.connectionPoolingAware ?? true, // Enable by default\n datastoreConstraints: config.datastoreConstraints,\n };\n\n this.currentChunkSize = this.getEffectiveInitialChunkSize();\n }\n\n private getEffectiveInitialChunkSize(): number {\n if (this.config.datastoreConstraints) {\n return Math.max(\n this.config.initialChunkSize,\n this.config.datastoreConstraints.optimalChunkSize,\n );\n }\n return this.config.initialChunkSize;\n }\n\n private applyDatastoreConstraints(size: number): number {\n if (this.config.datastoreConstraints) {\n return Math.max(\n this.config.datastoreConstraints.minChunkSize,\n Math.min(this.config.datastoreConstraints.maxChunkSize, size),\n );\n }\n return size;\n }\n\n getNextChunkSize(remainingBytes?: number): ChunkSizeDecision {\n if (!this.config.enabled) {\n return {\n size: this.config.fallbackChunkSize,\n strategy: \"fixed\",\n reason: \"Smart chunking disabled\",\n networkCondition: { type: \"unknown\", confidence: 0 },\n };\n }\n\n const networkCondition = this.networkMonitor.getNetworkCondition();\n\n let newSize = this.currentChunkSize;\n let strategy = \"adaptive\";\n let reason = \"\";\n\n // If we don't have enough data, use initial strategy\n if (networkCondition.type === \"unknown\") {\n newSize = this.config.initialChunkSize;\n strategy = \"initial\";\n reason = \"Insufficient network data\";\n } else {\n const chunkingStrategy = this.selectStrategy(networkCondition);\n newSize = this.calculateOptimalChunkSize(\n networkCondition,\n chunkingStrategy,\n );\n strategy = chunkingStrategy.name;\n reason = `Network condition: ${networkCondition.type} (confidence: ${Math.round(networkCondition.confidence * 100)}%)`;\n }\n\n // Apply remaining bytes limit\n if (remainingBytes && remainingBytes < newSize) {\n newSize = remainingBytes;\n reason += `, limited by remaining bytes (${remainingBytes})`;\n }\n\n // Apply datastore constraints first\n newSize = this.applyDatastoreConstraints(newSize);\n\n // Ensure bounds\n newSize = Math.max(\n this.config.minChunkSize,\n Math.min(this.config.maxChunkSize, newSize),\n );\n\n this.currentChunkSize = newSize;\n this.lastDecision = {\n size: newSize,\n strategy,\n reason,\n networkCondition,\n };\n\n return this.lastDecision;\n }\n\n recordChunkResult(size: number, duration: number, success: boolean): void {\n // Record the result in network monitor\n this.networkMonitor.recordUpload(size, duration, success);\n\n // Update our internal state\n if (success) {\n this.consecutiveSuccesses++;\n this.consecutiveFailures = 0;\n } else {\n this.consecutiveFailures++;\n this.consecutiveSuccesses = 0;\n }\n\n // Adjust chunk size based on recent performance\n this.adaptChunkSize(success, duration, size);\n }\n\n getCurrentChunkSize(): number {\n return this.currentChunkSize;\n }\n\n getLastDecision(): ChunkSizeDecision | null {\n return this.lastDecision;\n }\n\n reset(): void {\n this.currentChunkSize = this.config.initialChunkSize;\n this.consecutiveFailures = 0;\n this.consecutiveSuccesses = 0;\n this.lastDecision = null;\n this.connectionMetrics = null;\n }\n\n /**\n * Update connection metrics for connection pooling aware optimizations\n */\n updateConnectionMetrics(metrics: ConnectionMetrics): void {\n this.connectionMetrics = metrics;\n }\n\n /**\n * Get insights about connection pooling impact on chunking\n */\n getConnectionPoolingInsights(): {\n isOptimized: boolean;\n reuseRate: number;\n recommendedMinChunkSize: number;\n connectionOverhead: number;\n } {\n if (!this.connectionMetrics || !this.config.connectionPoolingAware) {\n return {\n isOptimized: false,\n reuseRate: 0,\n recommendedMinChunkSize: this.config.minChunkSize,\n connectionOverhead: 0,\n };\n }\n\n const reuseRate = this.connectionMetrics.reuseRate;\n const avgConnectionTime = this.connectionMetrics.averageConnectionTime;\n\n // With good connection reuse, we can afford smaller chunks\n const connectionOverhead = (1 - reuseRate) * avgConnectionTime;\n const recommendedMinChunkSize = Math.max(\n this.config.minChunkSize,\n Math.floor(connectionOverhead * 10000), // 10KB per ms of overhead\n );\n\n return {\n isOptimized: reuseRate > 0.7,\n reuseRate,\n recommendedMinChunkSize,\n connectionOverhead,\n };\n }\n\n private selectStrategy(networkCondition: NetworkCondition): ChunkingStrategy {\n const fallbackStrategy: ChunkingStrategy = {\n name: \"fallback\",\n minChunkSize: 128 * 1024,\n maxChunkSize: 4 * 1024 * 1024,\n initialChunkSize: 512 * 1024,\n adaptationRate: 0.2,\n };\n\n // Use S3-optimized strategies if datastore constraints indicate S3 (5MB minimum)\n const isS3Like =\n this.config.datastoreConstraints?.minChunkSize === 5 * 1024 * 1024;\n const strategiesSource = isS3Like\n ? S3_OPTIMIZED_STRATEGIES\n : DEFAULT_STRATEGIES;\n\n if (this.config.conservativeMode) {\n return strategiesSource.conservative ?? fallbackStrategy;\n }\n\n // Enhanced strategy selection with connection pooling awareness\n let baseStrategy: ChunkingStrategy;\n\n switch (networkCondition.type) {\n case \"fast\":\n baseStrategy =\n networkCondition.confidence > 0.7\n ? (strategiesSource.aggressive ?? fallbackStrategy)\n : (strategiesSource.balanced ?? fallbackStrategy);\n break;\n case \"slow\":\n baseStrategy = strategiesSource.conservative ?? fallbackStrategy;\n break;\n case \"unstable\":\n baseStrategy = strategiesSource.conservative ?? fallbackStrategy;\n break;\n default:\n baseStrategy = strategiesSource.balanced ?? fallbackStrategy;\n }\n\n // Apply connection pooling optimizations\n if (this.config.connectionPoolingAware && this.connectionMetrics) {\n return this.optimizeStrategyForConnectionPooling(baseStrategy);\n }\n\n return baseStrategy;\n }\n\n /**\n * Optimize chunking strategy based on connection pooling performance\n */\n private optimizeStrategyForConnectionPooling(\n strategy: ChunkingStrategy,\n ): ChunkingStrategy {\n if (!this.connectionMetrics) return strategy;\n\n const insights = this.getConnectionPoolingInsights();\n const reuseRate = insights.reuseRate;\n\n // High connection reuse allows for more aggressive chunking\n if (reuseRate > 0.8) {\n return {\n ...strategy,\n name: `${strategy.name}-pooled-aggressive`,\n minChunkSize: Math.max(strategy.minChunkSize * 0.5, 32 * 1024), // Smaller min chunks\n adaptationRate: Math.min(strategy.adaptationRate * 1.3, 0.5), // Faster adaptation\n };\n }\n\n // Good connection reuse allows moderate optimization\n if (reuseRate > 0.5) {\n return {\n ...strategy,\n name: `${strategy.name}-pooled-moderate`,\n minChunkSize: Math.max(strategy.minChunkSize * 0.75, 64 * 1024),\n adaptationRate: Math.min(strategy.adaptationRate * 1.1, 0.4),\n };\n }\n\n // Poor connection reuse requires conservative approach\n return {\n ...strategy,\n name: `${strategy.name}-pooled-conservative`,\n minChunkSize: Math.max(\n strategy.minChunkSize * 1.5,\n insights.recommendedMinChunkSize,\n ),\n adaptationRate: strategy.adaptationRate * 0.8,\n };\n }\n\n private calculateOptimalChunkSize(\n networkCondition: NetworkCondition,\n strategy: ChunkingStrategy,\n ): number {\n let targetSize = this.currentChunkSize;\n\n // Base calculation on current throughput\n const optimalThroughput = this.networkMonitor.getOptimalThroughput();\n\n if (optimalThroughput > 0) {\n // Calculate target chunk duration (aim for 2-5 seconds per chunk)\n const targetDuration = this.getTargetChunkDuration(networkCondition);\n const theoreticalSize =\n optimalThroughput * targetDuration * this.config.targetUtilization;\n\n // Blend current size with theoretical optimal size\n const blendFactor = strategy.adaptationRate;\n targetSize =\n this.currentChunkSize * (1 - blendFactor) +\n theoreticalSize * blendFactor;\n }\n\n // Apply strategy constraints\n targetSize = Math.max(\n strategy.minChunkSize,\n Math.min(strategy.maxChunkSize, targetSize),\n );\n\n // Apply failure-based adjustments\n if (this.consecutiveFailures > 0) {\n // Reduce size on failures\n const reductionFactor = Math.min(0.5, this.consecutiveFailures * 0.2);\n targetSize *= 1 - reductionFactor;\n } else if (this.consecutiveSuccesses > 2) {\n // Gradually increase size on consistent success\n const increaseFactor = Math.min(0.3, this.consecutiveSuccesses * 0.05);\n targetSize *= 1 + increaseFactor;\n }\n\n return Math.round(targetSize);\n }\n\n private getTargetChunkDuration(networkCondition: NetworkCondition): number {\n switch (networkCondition.type) {\n case \"fast\":\n return 3; // 3 seconds for fast connections\n case \"slow\":\n return 5; // 5 seconds for slow connections to reduce overhead\n case \"unstable\":\n return 2; // 2 seconds for unstable connections for quick recovery\n default:\n return 3; // Default to 3 seconds\n }\n }\n\n private adaptChunkSize(\n success: boolean,\n duration: number,\n size: number,\n ): void {\n if (!success) {\n // On failure, be more conservative\n this.currentChunkSize = Math.max(\n this.config.minChunkSize,\n this.currentChunkSize * 0.8,\n );\n return;\n }\n\n // On success, check if we should adjust based on performance\n const throughput = size / (duration / 1000); // bytes per second\n const metrics = this.networkMonitor.getCurrentMetrics();\n\n if (metrics.averageSpeed > 0) {\n const utilizationRatio = throughput / metrics.averageSpeed;\n\n if (utilizationRatio < this.config.targetUtilization * 0.8) {\n // We're not utilizing bandwidth well, try larger chunks\n this.currentChunkSize = Math.min(\n this.config.maxChunkSize,\n this.currentChunkSize * 1.1,\n );\n } else if (utilizationRatio > this.config.targetUtilization * 1.2) {\n // We might be overloading, try smaller chunks\n this.currentChunkSize = Math.max(\n this.config.minChunkSize,\n this.currentChunkSize * 0.95,\n );\n }\n }\n }\n}\n","/**\n * Utilities for detecting input data types to determine flow execution strategy.\n *\n * @module utils/input-detection\n */\n\n/**\n * Input type classification for flow execution.\n *\n * - `file`: File-like object requiring chunked upload\n * - `url`: URL string for direct file fetch\n * - `data`: Structured data to pass through unchanged\n */\nexport type InputType = \"file\" | \"url\" | \"data\";\n\n/**\n * Minimal interface for file-like objects (File, Blob, React Native assets).\n * Platform-agnostic representation of uploadable content.\n */\nexport interface FileLike {\n /** File name (optional) */\n name?: string;\n /** MIME type (optional) */\n type?: string;\n /** File size in bytes (optional) */\n size?: number;\n}\n\n/**\n * URL regex pattern matching http:// or https:// protocols.\n * Validates common URL structures for input type detection.\n */\nconst URL_PATTERN = /^https?:\\/\\/.+/i;\n\n/**\n * Detect the type of input data for flow execution.\n *\n * Detection rules:\n * 1. Object with file-like properties (name/type/size) → \"file\"\n * 2. String matching URL pattern → \"url\"\n * 3. Everything else → \"data\"\n *\n * Uses duck typing to detect file-like objects across platforms.\n *\n * @param data - Input data to classify\n * @returns Input type classification\n *\n * @example\n * ```typescript\n * detectInputType(new File([], \"test.jpg\")); // \"file\"\n * detectInputType({ name: \"test.jpg\", size: 1024 }); // \"file\"\n * detectInputType(\"https://example.com/image.jpg\"); // \"url\"\n * detectInputType({ field: \"value\" }); // \"data\"\n * ```\n */\nexport function detectInputType(data: unknown): InputType {\n // Check for file-like object using duck typing\n if (isFileLike(data)) {\n return \"file\";\n }\n\n // Check for URL string\n if (typeof data === \"string\" && URL_PATTERN.test(data)) {\n return \"url\";\n }\n\n // Default to structured data\n return \"data\";\n}\n\n/**\n * Check if input is a URL string.\n *\n * @param data - Input data to check\n * @returns True if data is a URL string\n *\n * @example\n * ```typescript\n * isURL(\"https://example.com/file.jpg\"); // true\n * isURL(\"not a url\"); // false\n * isURL({ url: \"https://...\" }); // false\n * ```\n */\nexport function isURL(data: unknown): data is string {\n return typeof data === \"string\" && URL_PATTERN.test(data);\n}\n\n/**\n * Check if input is a file-like object (File, Blob, or platform-specific file).\n *\n * Uses duck typing to identify objects with file-like properties.\n * Works across browser (File/Blob) and React Native environments.\n *\n * @param data - Input data to check\n * @returns True if data is file-like\n *\n * @example\n * ```typescript\n * isFileLike(new File([], \"test.jpg\")); // true\n * isFileLike(new Blob([\"data\"])); // true\n * isFileLike({ name: \"test.jpg\", size: 1024 }); // true\n * isFileLike(\"not a file\"); // false\n * ```\n */\nexport function isFileLike(data: unknown): data is FileLike {\n if (typeof data !== \"object\" || data === null) {\n return false;\n }\n\n // Check for File or Blob using runtime type check (browser)\n if (typeof globalThis !== \"undefined\") {\n // @ts-expect-error - File and Blob may not exist in all environments\n if (globalThis.File && data instanceof globalThis.File) {\n return true;\n }\n // @ts-expect-error - File and Blob may not exist in all environments\n if (globalThis.Blob && data instanceof globalThis.Blob) {\n return true;\n }\n }\n\n // Duck typing: object with file-like properties\n const obj = data as Record<string, unknown>;\n return (\n (\"name\" in obj || \"type\" in obj || \"size\" in obj) &&\n (typeof obj.size === \"number\" || typeof obj.size === \"undefined\")\n );\n}\n","/**\n * Specific error types that can occur during upload and flow operations.\n *\n * These error names provide fine-grained categorization of failures,\n * allowing applications to implement targeted error handling and recovery strategies.\n *\n * @example Error handling by type\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError) {\n * if (error.isNetworkError()) {\n * // Retry network-related failures\n * console.log('Network issue, retrying...');\n * } else if (error.name === 'UPLOAD_NOT_FOUND') {\n * // Handle missing upload\n * console.log('Upload not found, starting fresh');\n * }\n * }\n * }\n * ```\n */\nexport type UploadistaErrorName =\n | \"UPLOAD_SIZE_NOT_SPECIFIED\"\n | \"NETWORK_ERROR\"\n | \"NETWORK_UNEXPECTED_RESPONSE\"\n | \"UPLOAD_CHUNK_FAILED\"\n | \"WRONG_UPLOAD_SIZE\"\n | \"UPLOAD_LOCKED\"\n | \"UPLOAD_NOT_FOUND\"\n | \"CREATE_UPLOAD_FAILED\"\n | \"DELETE_UPLOAD_FAILED\"\n | \"PARALLEL_SEGMENT_CREATION_FAILED\"\n | \"PARALLEL_SEGMENT_UPLOAD_FAILED\"\n | \"FLOW_NOT_FOUND\"\n | \"FLOW_INIT_FAILED\"\n | \"FLOW_RUN_FAILED\"\n | \"FLOW_RESUMED_FAILED\"\n | \"FLOW_PAUSE_FAILED\"\n | \"FLOW_CANCEL_FAILED\"\n | \"FLOW_UNEXPECTED_STATE\"\n | \"FLOW_INCOMPATIBLE\"\n | \"FLOW_NO_UPLOAD_ID\"\n | \"FLOW_TIMEOUT\"\n | \"FLOW_FINALIZE_FAILED\"\n | \"VALIDATION_ERROR\"\n | \"JOB_NOT_FOUND\"\n | \"WEBSOCKET_AUTH_FAILED\";\n\n/**\n * Custom error class for all Uploadista client operations.\n *\n * Extends the standard Error class with additional context including\n * typed error names, HTTP status codes, and underlying error causes.\n * This allows for precise error handling and debugging.\n *\n * @example Basic error handling\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError) {\n * console.log(`Error: ${error.name} - ${error.message}`);\n * console.log(`HTTP Status: ${error.status}`);\n * }\n * }\n * ```\n *\n * @example Network error detection\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError && error.isNetworkError()) {\n * // Implement retry logic for network failures\n * await retryWithBackoff(() => client.upload(file));\n * }\n * }\n * ```\n */\nexport class UploadistaError extends Error {\n /**\n * Typed error name indicating the specific type of failure\n */\n name: UploadistaErrorName;\n\n /**\n * Human-readable error message describing what went wrong\n */\n message: string;\n\n /**\n * The underlying error that caused this failure, if any\n */\n cause: Error | undefined;\n\n /**\n * HTTP status code from the server response, if applicable\n */\n status: number | undefined;\n\n /**\n * Creates a new UploadistaError instance.\n *\n * @param options - Error configuration\n * @param options.name - Typed error name for categorization\n * @param options.message - Descriptive error message\n * @param options.cause - Optional underlying error that caused this failure\n * @param options.status - Optional HTTP status code from server response\n */\n constructor({\n name,\n message,\n cause,\n status,\n }: {\n name: UploadistaErrorName;\n message: string;\n cause?: Error;\n status?: number;\n }) {\n super();\n this.name = name;\n this.cause = cause;\n this.message = message;\n this.status = status;\n }\n\n /**\n * Checks if this error is related to network connectivity issues.\n *\n * Network errors are typically transient and may succeed on retry,\n * making them good candidates for automatic retry logic.\n *\n * @returns True if this is a network-related error\n *\n * @example\n * ```typescript\n * if (error.isNetworkError()) {\n * // Safe to retry\n * await retry(() => uploadChunk());\n * }\n * ```\n */\n isNetworkError(): boolean {\n return (\n this.name === \"NETWORK_ERROR\" ||\n this.name === \"NETWORK_UNEXPECTED_RESPONSE\"\n );\n }\n}\n","import { Base64 } from \"js-base64\";\nimport { UploadistaError } from \"../error\";\n\n/**\n * Encodes metadata for upload headers\n */\nexport function encodeMetadata(\n metadata: Record<string, string | null>,\n): string {\n return Object.entries(metadata)\n .map(([key, value]) => `${key} ${Base64.encode(String(value))}`)\n .join(\",\");\n}\n\n/**\n * Checks whether a given status is in the range of the expected category.\n * For example, only a status between 200 and 299 will satisfy the category 200.\n */\nexport function inStatusCategory(\n status: number,\n category: 100 | 200 | 300 | 400 | 500,\n): boolean {\n return status >= category && status < category + 100;\n}\n\nexport type CalculateFileSizeOptions = {\n uploadLengthDeferred?: boolean;\n uploadSize?: number;\n};\n\n/**\n * Calculate the final file size for upload based on options\n */\nexport function calculateFileSize(\n originalSize: number | null,\n { uploadLengthDeferred, uploadSize }: CalculateFileSizeOptions,\n): number | null {\n // First, we look at the uploadLengthDeferred option.\n // Next, we check if the caller has supplied a manual upload size.\n // Finally, we try to use the calculated size from the source object.\n if (uploadLengthDeferred) {\n return null;\n }\n\n if (uploadSize != null) {\n return uploadSize;\n }\n\n const size = originalSize;\n if (size == null) {\n throw new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message:\n \"cannot automatically derive upload's size from input. Specify it manually using the `uploadSize` option or use the `uploadLengthDeferred` option\",\n });\n }\n\n return size;\n}\n\n/**\n * Calculate segments for parallel upload\n */\nexport function calculateSegments(\n fileSize: number,\n parallelUploads: number,\n parallelChunkSize?: number,\n): { startByte: number; endByte: number; segmentIndex: number }[] {\n if (parallelUploads <= 1) {\n return [{ startByte: 0, endByte: fileSize, segmentIndex: 0 }];\n }\n\n // Use parallelChunkSize if provided, otherwise divide file equally\n const segments: {\n startByte: number;\n endByte: number;\n segmentIndex: number;\n }[] = [];\n\n if (parallelChunkSize) {\n // Fixed segment size approach\n let currentByte = 0;\n let segmentIndex = 0;\n\n while (currentByte < fileSize) {\n const endByte = Math.min(currentByte + parallelChunkSize, fileSize);\n segments.push({\n startByte: currentByte,\n endByte,\n segmentIndex,\n });\n currentByte = endByte;\n segmentIndex++;\n }\n } else {\n // Equal division approach\n const segmentSize = Math.ceil(fileSize / parallelUploads);\n\n for (let i = 0; i < parallelUploads; i++) {\n const startByte = i * segmentSize;\n const endByte = Math.min(startByte + segmentSize, fileSize);\n\n if (startByte < fileSize) {\n segments.push({\n startByte,\n endByte,\n segmentIndex: i,\n });\n }\n }\n }\n\n return segments;\n}\n","import type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService } from \"../services/platform-service\";\nimport type { SmartChunker } from \"../smart-chunker\";\nimport type { UploadResponse } from \"../types/upload-response\";\nimport { inStatusCategory } from \"./upload-utils\";\n\nexport type OnProgress = (\n uploadId: string,\n bytesSent: number,\n bytesTotal: number | null,\n) => void;\n\nexport type OnShouldRetry = (\n error: UploadistaError,\n retryAttempt: number,\n) => boolean;\n\n/**\n * uploadChunk reads a chunk from the source and sends it using the\n * supplied request object. It will not handle the response.\n */\nexport async function uploadChunk({\n uploadId,\n source,\n offset,\n uploadLengthDeferred,\n abortController,\n onProgress,\n smartChunker,\n uploadistaApi,\n logger,\n}: {\n uploadId: string;\n source: FileSource;\n offset: number;\n uploadLengthDeferred: boolean | undefined;\n abortController: AbortControllerLike;\n onProgress?: OnProgress;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n}): Promise<UploadResponse> {\n const start = offset ?? 0;\n const remainingBytes = source.size ? source.size - start : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const currentChunkSize = chunkSizeDecision.size;\n let end = start + currentChunkSize;\n\n // The specified chunkSize may be Infinity or the calcluated end position\n // may exceed the file's size. In both cases, we limit the end position to\n // the input's total size for simpler calculations and correctness.\n if (\n source.size &&\n (end === Number.POSITIVE_INFINITY || end > source.size) &&\n !uploadLengthDeferred\n ) {\n end = source.size;\n }\n\n const { value, size, done } = await source.slice(start, end);\n const sizeOfValue = size ?? 0;\n const chunkStartTime = Date.now();\n\n // If the upload length is deferred, the upload size was not specified during\n // upload creation. So, if the file reader is done reading, we know the total\n // upload size and can tell the tus server.\n if (uploadLengthDeferred && done) {\n source.size = offset + sizeOfValue;\n }\n\n // The specified uploadSize might not match the actual amount of data that a source\n // provides. In these cases, we cannot successfully complete the upload, so we\n // rather error out and let the user know. If not, tus-js-client will be stuck\n // in a loop of repeating empty PATCH requests.\n // See https://community.transloadit.com/t/how-to-abort-hanging-companion-uploads/16488/13\n const newSize = offset + sizeOfValue;\n if (!uploadLengthDeferred && done && newSize !== source.size) {\n throw new UploadistaError({\n name: \"WRONG_UPLOAD_SIZE\",\n message: `upload was configured with a size of ${size} bytes, but the source is done after ${newSize} bytes`,\n });\n }\n\n const result = await uploadistaApi.uploadChunk(uploadId, value, {\n onProgress: (bytes, total) => {\n onProgress?.(uploadId, bytes, total);\n },\n abortController,\n });\n\n // Record performance metrics\n const chunkDuration = Date.now() - chunkStartTime;\n const success = result.status >= 200 && result.status < 300;\n\n smartChunker.recordChunkResult(sizeOfValue, chunkDuration, success);\n\n logger.log(\n `Chunk upload ${success ? \"succeeded\" : \"failed\"}: ${sizeOfValue} bytes in ${chunkDuration}ms (${chunkSizeDecision.strategy} strategy)`,\n );\n\n return result;\n}\n\n/**\n * Checks whether or not it is ok to retry a request.\n * @param {UploadistaError} err the error returned from the last request\n * @param {number} retryAttempt the number of times the request has already been retried\n * @param {number[]} retryDelays configured retry delays\n * @param {OnShouldRetry} onShouldRetry optional custom retry logic\n */\nexport function shouldRetry(\n platformService: PlatformService,\n err: UploadistaError,\n retryAttempt: number,\n retryDelays?: number[],\n onShouldRetry?: OnShouldRetry,\n): boolean {\n if (\n retryDelays == null ||\n retryAttempt >= retryDelays.length ||\n !err.isNetworkError()\n ) {\n return false;\n }\n\n if (onShouldRetry) {\n return onShouldRetry(err, retryAttempt);\n }\n\n return defaultOnShouldRetry(platformService, err);\n}\n\n/**\n * determines if the request should be retried. Will only retry if not a status 4xx except a 409 or 423\n * @param {UploadistaError} err\n * @returns {boolean}\n */\nexport function defaultOnShouldRetry(\n platformService: PlatformService,\n err: UploadistaError,\n): boolean {\n const status = err.status ?? 0;\n return (\n (!inStatusCategory(status, 400) || status === 409 || status === 423) &&\n platformService.isOnline()\n );\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { FlowUploadConfig } from \"../types/flow-upload-config\";\n\nimport { shouldRetry } from \"./chunk-upload\";\nimport type { Callbacks } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { inStatusCategory } from \"./upload-utils\";\n\n/**\n * Start a flow-based upload by initializing the streaming input node\n */\nexport async function startFlowUpload({\n source,\n flowConfig,\n uploadistaApi,\n logger,\n platformService,\n openWebSocket,\n closeWebSocket,\n ...callbacks\n}: {\n source: FileSource;\n flowConfig: FlowUploadConfig;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n platformService: PlatformService;\n openWebSocket: (jobId: string) => void;\n closeWebSocket: (jobId: string) => void;\n} & Callbacks): Promise<\n { jobId: string; uploadFile: UploadFile; inputNodeId: string } | undefined\n> {\n const { flowId, storageId } = flowConfig;\n\n // Get the flow to find the streaming input node\n const { flow } = await uploadistaApi.getFlow(flowId);\n\n // Find the streaming-input-node in the flow\n const inputNode = flow.nodes.find((node) => node.type === \"input\");\n\n if (!inputNode) {\n const error = new UploadistaError({\n name: \"FLOW_INCOMPATIBLE\",\n message: `Flow ${flowId} does not have a streaming input node. The flow must contain a node with type \"input\" to support flow uploads.`,\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n const inputNodeId = inputNode.id;\n\n // Step 1: Initialize the flow with init operation\n const metadata = {\n originalName: source.name ?? \"unknown\",\n mimeType: source.type ?? \"application/octet-stream\",\n size: source.size ?? 0,\n ...flowConfig.metadata,\n };\n\n logger.log(`Starting flow upload for flow ${flowId}, node ${inputNodeId}`);\n\n const { status, job } = await uploadistaApi.runFlow(flowId, storageId, {\n [inputNodeId]: {\n operation: \"init\",\n storageId,\n metadata,\n },\n });\n\n const jobId = job.id;\n\n if (!inStatusCategory(status, 200) || !jobId) {\n const error = new UploadistaError({\n name: \"FLOW_INIT_FAILED\",\n message: \"Failed to initialize flow upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n callbacks.onJobStart?.(jobId);\n\n logger.log(`Flow job ${jobId} created, opening WebSocket`);\n\n // Open WebSocket to listen for flow events\n // Events are buffered in the Durable Object until connection is established\n openWebSocket(jobId);\n\n logger.log(`Waiting for upload ID from node`);\n\n // Step 2: Wait for the streaming-input-node to pause and return the upload file\n // Poll job status until paused (with timeout)\n const maxAttempts = 60; // 30 seconds total\n const pollInterval = 500; // 0.5 second\n let attempts = 0;\n let jobStatus = await uploadistaApi.getJobStatus(jobId);\n\n while (jobStatus.status !== \"paused\" && attempts < maxAttempts) {\n await new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, pollInterval),\n );\n jobStatus = await uploadistaApi.getJobStatus(jobId);\n attempts++;\n }\n\n if (jobStatus.status !== \"paused\") {\n const error = new UploadistaError({\n name: \"FLOW_TIMEOUT\",\n message: `Flow did not pause after init (status: ${jobStatus.status})`,\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n // Get the upload file from streaming input node task result\n const streamingInputTask = jobStatus.tasks.find(\n (task) => task.nodeId === inputNodeId,\n );\n const uploadFile = streamingInputTask?.result as UploadFile;\n\n if (!uploadFile?.id) {\n const error = new UploadistaError({\n name: \"FLOW_NO_UPLOAD_ID\",\n message: \"Flow did not return upload ID after init\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n logger.log(`Upload ID received: ${uploadFile.id}`);\n\n callbacks.onStart?.({\n uploadId: uploadFile.id,\n size: source.size ?? null,\n });\n\n return { jobId, uploadFile, inputNodeId };\n}\n\n/**\n * Upload chunks directly to the upload API (not through resumeFlow)\n * This is more efficient and reuses the existing upload infrastructure\n */\nexport async function performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset,\n source,\n retryAttempt = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n ...callbacks\n}: {\n jobId: string;\n uploadFile: UploadFile;\n inputNodeId: string;\n offset: number;\n retryAttempt?: number;\n source: FileSource;\n abortController: AbortControllerLike;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n} & Callbacks): Promise<void> {\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n // Get optimal chunk size\n const remainingBytes = source.size ? source.size - offset : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const chunkSize = chunkSizeDecision.size;\n const endByte = Math.min(offset + chunkSize, source.size ?? 0);\n const sliceResult = await source.slice(offset, endByte);\n\n if (!sliceResult || !sliceResult.value) {\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Failed to read chunk from file\",\n });\n }\n\n const chunkData = sliceResult.value;\n\n // Upload chunk directly to upload API (bypassing flow)\n const startTime = Date.now();\n\n const res = await uploadistaApi.uploadChunk(uploadFile.id, chunkData, {\n abortController,\n });\n\n const duration = Date.now() - startTime;\n\n if (!res.upload) {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: \"Upload chunk response missing upload data\",\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks.onProgress?.(uploadFile.id, currentOffset, source.size ?? 0);\n callbacks.onChunkComplete?.(\n currentOffset - offset,\n offset,\n source.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / chunkSize);\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration,\n speed: chunkSize / (duration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition:\n smartChunker.getLastDecision()?.networkCondition?.type,\n chunkingStrategy: smartChunker.getLastDecision()?.strategy,\n });\n\n // Update smart chunker with connection metrics\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n // Check if upload is complete after uploading the chunk\n if (currentOffset >= (source.size ?? 0)) {\n if (source) source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Flow upload completed: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n // Upload is complete - finalize the flow\n logger.log(`Finalizing flow upload for job ${jobId}`);\n\n try {\n await uploadistaApi.resumeFlow(\n jobId,\n inputNodeId,\n {\n operation: \"finalize\",\n uploadId: uploadFile.id,\n },\n { contentType: \"application/json\" },\n );\n } catch (err) {\n // Finalization errors should not trigger chunk retry logic\n const error = new UploadistaError({\n name: \"FLOW_FINALIZE_FAILED\",\n message: `Failed to finalize flow upload for job ${jobId}`,\n cause: err as Error,\n });\n callbacks.onError?.(error);\n throw error;\n }\n return;\n }\n\n // Continue uploading next chunk\n await performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset: currentOffset,\n source,\n platformService,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n } catch (err) {\n // Retry logic similar to single-upload\n if (retryDelays != null) {\n const shouldResetDelays =\n offset != null && currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error during flow upload\",\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset,\n source,\n retryAttempt: retryAttempt + 1,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n abortController,\n ...callbacks,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Failed to upload chunk for job ${jobId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n }\n }\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\n\nimport { shouldRetry } from \"./chunk-upload\";\nimport type { Callbacks } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { inStatusCategory } from \"./upload-utils\";\n\n/**\n * Result from initializing a flow input node\n */\nexport interface FlowInputInitResult {\n uploadFile: UploadFile;\n nodeId: string;\n}\n\n/**\n * Options for initializing a flow input node\n */\nexport interface InitializeFlowInputOptions {\n nodeId: string;\n jobId: string;\n source: FileSource;\n storageId: string;\n metadata?: Record<string, unknown>;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n platformService: PlatformService;\n callbacks?: Pick<Callbacks, \"onStart\" | \"onError\">;\n}\n\n/**\n * Options for uploading chunks for a flow input\n */\nexport interface UploadInputChunksOptions {\n nodeId: string;\n jobId: string;\n uploadFile: UploadFile;\n source: FileSource;\n offset?: number;\n retryAttempt?: number;\n abortController: AbortControllerLike;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n callbacks?: Callbacks;\n}\n\n/**\n * Options for finalizing a flow input\n */\nexport interface FinalizeFlowInputOptions {\n nodeId: string;\n jobId: string;\n uploadId: string;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n callbacks?: Pick<Callbacks, \"onError\">;\n}\n\n/**\n * Initialize a single flow input node with the init operation.\n * This starts the input processing and waits for the upload ID.\n *\n * @param options - Configuration for initializing the input\n * @returns Upload file metadata and node ID\n */\nexport async function initializeFlowInput(\n options: InitializeFlowInputOptions,\n): Promise<FlowInputInitResult> {\n const {\n nodeId,\n jobId,\n source,\n storageId,\n metadata = {},\n uploadistaApi,\n logger,\n platformService,\n callbacks,\n } = options;\n\n // Build metadata for the input\n const inputMetadata = {\n originalName: source.name ?? \"unknown\",\n mimeType: source.type ?? \"application/octet-stream\",\n size: source.size ?? 0,\n ...metadata,\n };\n\n logger.log(`Initializing input node ${nodeId} for job ${jobId}`);\n\n // Resume the job with init operation for this specific node\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"init\",\n storageId,\n metadata: inputMetadata,\n },\n { contentType: \"application/json\" },\n );\n\n logger.log(`Waiting for upload ID from node ${nodeId}`);\n\n // Poll job status until this node's task is paused with upload file\n const maxAttempts = 60; // 30 seconds total\n const pollInterval = 500; // 0.5 second\n let attempts = 0;\n let jobStatus = await uploadistaApi.getJobStatus(jobId);\n\n while (attempts < maxAttempts) {\n // Find this specific node's task\n const nodeTask = jobStatus.tasks.find((task) => task.nodeId === nodeId);\n\n // Check if this node is paused and has a result\n if (\n nodeTask?.status === \"paused\" &&\n nodeTask.result &&\n (nodeTask.result as UploadFile).id\n ) {\n const uploadFile = nodeTask.result as UploadFile;\n logger.log(`Upload ID received for node ${nodeId}: ${uploadFile.id}`);\n\n callbacks?.onStart?.({\n uploadId: uploadFile.id,\n size: source.size ?? null,\n });\n\n return { uploadFile, nodeId };\n }\n\n // If task failed, throw error\n if (nodeTask?.status === \"failed\") {\n const error = new UploadistaError({\n name: \"FLOW_INIT_FAILED\",\n message: `Input node ${nodeId} failed during initialization`,\n });\n callbacks?.onError?.(error);\n throw error;\n }\n\n await new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, pollInterval),\n );\n jobStatus = await uploadistaApi.getJobStatus(jobId);\n attempts++;\n }\n\n const error = new UploadistaError({\n name: \"FLOW_TIMEOUT\",\n message: `Input node ${nodeId} did not return upload ID after init`,\n });\n callbacks?.onError?.(error);\n throw error;\n}\n\n/**\n * Upload chunks for a single flow input.\n * This uploads file data directly to the upload API with smart chunking and retry logic.\n *\n * @param options - Configuration for uploading chunks\n */\nexport async function uploadInputChunks(\n options: UploadInputChunksOptions,\n): Promise<void> {\n const {\n nodeId,\n jobId,\n uploadFile,\n source,\n offset = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n callbacks,\n } = options;\n\n let retryAttempt = options.retryAttempt ?? 0;\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n // Get optimal chunk size\n const remainingBytes = source.size ? source.size - offset : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const chunkSize = chunkSizeDecision.size;\n const endByte = Math.min(offset + chunkSize, source.size ?? 0);\n const sliceResult = await source.slice(offset, endByte);\n\n if (!sliceResult || !sliceResult.value) {\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: `Failed to read chunk from file for node ${nodeId}`,\n });\n }\n\n const chunkData = sliceResult.value;\n\n // Upload chunk directly to upload API\n const startTime = Date.now();\n\n const res = await uploadistaApi.uploadChunk(uploadFile.id, chunkData, {\n abortController,\n });\n\n const duration = Date.now() - startTime;\n\n if (!res.upload) {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Upload chunk response missing upload data for node ${nodeId}`,\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks?.onProgress?.(uploadFile.id, currentOffset, source.size ?? 0);\n callbacks?.onChunkComplete?.(\n currentOffset - offset,\n offset,\n source.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / chunkSize);\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration,\n speed: chunkSize / (duration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition:\n smartChunker.getLastDecision()?.networkCondition?.type,\n chunkingStrategy: smartChunker.getLastDecision()?.strategy,\n });\n\n // Update smart chunker with connection metrics\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n // Check if upload is complete\n if (currentOffset >= (source.size ?? 0)) {\n source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Upload completed for node ${nodeId}: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n return;\n }\n\n // Continue uploading next chunk\n await uploadInputChunks({\n ...options,\n offset: currentOffset,\n retryAttempt: 0, // Reset retry count on successful chunk\n });\n } catch (err) {\n // Retry logic\n if (retryDelays != null) {\n const shouldResetDelays = currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n // biome-ignore lint: mutation needed for retry logic\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: `Network error during upload for node ${nodeId}`,\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks?.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await uploadInputChunks({\n ...options,\n offset,\n retryAttempt: retryAttempt + 1,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Failed to upload chunk for node ${nodeId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n } else {\n throw err;\n }\n }\n}\n\n/**\n * Finalize a flow input by sending the finalize operation.\n * This tells the flow that this input has completed uploading.\n *\n * @param options - Configuration for finalizing the input\n */\nexport async function finalizeFlowInput(\n options: FinalizeFlowInputOptions,\n): Promise<void> {\n const { nodeId, jobId, uploadId, uploadistaApi, logger, callbacks } = options;\n\n logger.log(`Finalizing input node ${nodeId} for job ${jobId}`);\n\n try {\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"finalize\",\n uploadId,\n },\n { contentType: \"application/json\" },\n );\n\n logger.log(`Input node ${nodeId} finalized successfully`);\n } catch (err) {\n const error = new UploadistaError({\n name: \"FLOW_FINALIZE_FAILED\",\n message: `Failed to finalize input node ${nodeId} for job ${jobId}`,\n cause: err as Error,\n });\n callbacks?.onError?.(error);\n throw error;\n }\n}\n","import type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport type { PreviousUpload } from \"../types/previous-upload\";\n\n/**\n * Find previous uploads by fingerprint\n */\nexport async function findPreviousUploads(\n clientStorage: ClientStorage,\n fingerprint: string,\n): Promise<PreviousUpload[]> {\n return clientStorage.findUploadsByFingerprint(fingerprint);\n}\n\n/**\n * Resume from a previous upload\n */\nexport function resumeFromPreviousUpload(previousUpload: PreviousUpload): {\n uploadId: string | null;\n parallelUploadUrls: string[] | undefined;\n clientStorageKey: string | null;\n} {\n return {\n uploadId: previousUpload.uploadId ?? null,\n parallelUploadUrls: previousUpload.parallelUploadUrls,\n clientStorageKey: previousUpload.clientStorageKey,\n };\n}\n\n/**\n * Add the upload URL to the URL storage, if possible.\n */\nexport async function saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size,\n metadata,\n clientStorageKey,\n storeFingerprintForResuming,\n generateId,\n}: {\n clientStorage: ClientStorage;\n fingerprint: string;\n size: number;\n metadata: Record<string, string | number | boolean>;\n clientStorageKey: string | null;\n storeFingerprintForResuming: boolean;\n generateId: IdGenerationService;\n}): Promise<string | undefined> {\n // We do not store the upload key\n // - if it was disabled in the option, or\n // - if no fingerprint was calculated for the input (i.e. a stream), or\n // - if the key is already stored.\n if (\n !storeFingerprintForResuming ||\n !fingerprint ||\n clientStorageKey != null\n ) {\n return undefined;\n }\n\n const storedUpload: PreviousUpload = {\n size,\n metadata,\n creationTime: new Date().toString(),\n clientStorageKey: fingerprint,\n };\n\n const newClientStorageKey = await clientStorage.addUpload(\n fingerprint,\n storedUpload,\n { generateId },\n );\n\n return newClientStorageKey;\n}\n\n/**\n * Remove the entry in the URL storage, if it has been saved before.\n */\nexport async function removeFromClientStorage(\n clientStorage: ClientStorage,\n clientStorageKey: string,\n): Promise<void> {\n if (!clientStorageKey) return;\n await clientStorage.removeUpload(clientStorageKey);\n}\n","import type { InputFile, UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport {\n type OnProgress,\n type OnShouldRetry,\n shouldRetry,\n uploadChunk,\n} from \"./chunk-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport {\n removeFromClientStorage,\n saveUploadInClientStorage,\n} from \"./upload-storage\";\nimport { encodeMetadata, inStatusCategory } from \"./upload-utils\";\n\nexport type Callbacks = {\n onProgress?: OnProgress;\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n onSuccess?: (payload: UploadFile) => void;\n onError?: (error: Error | UploadistaError) => void;\n onStart?: (file: { uploadId: string; size: number | null }) => void;\n onJobStart?: (jobId: string) => void;\n onShouldRetry?: OnShouldRetry;\n};\n\nexport type SingleUploadResult = {\n uploadIdStorageKey: string | undefined;\n uploadId: string;\n offset: number;\n};\n\n/**\n * Start uploading the file using PATCH requests. The file will be divided\n * into chunks as specified in the chunkSize option. During the upload\n * the onProgress event handler may be invoked multiple times.\n */\nexport async function performUpload({\n uploadId,\n offset,\n source,\n uploadLengthDeferred,\n retryAttempt = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n ...callbacks\n}: {\n uploadId: string;\n offset: number;\n retryAttempt?: number;\n source: FileSource;\n abortController: AbortControllerLike;\n uploadLengthDeferred: boolean | undefined;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n} & Callbacks): Promise<void> {\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n const res = await uploadChunk({\n uploadId,\n source,\n offset,\n uploadLengthDeferred,\n onProgress: callbacks.onProgress,\n abortController,\n smartChunker,\n uploadistaApi,\n logger,\n });\n\n if (!inStatusCategory(res.status, 200) || res.upload == null) {\n throw new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while uploading chunk\",\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks.onProgress?.(uploadId, currentOffset, res.upload.size ?? 0);\n callbacks.onChunkComplete?.(\n currentOffset - offset,\n offset,\n res.upload?.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / (currentOffset - offset || 1));\n const chunkSize = currentOffset - offset;\n const chunkDuration = Date.now() - (Date.now() - 100); // Approximate, real timing is in uploadChunk\n const lastDecision = smartChunker.getLastDecision();\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration: chunkDuration,\n speed: chunkSize / (chunkDuration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition: lastDecision?.networkCondition?.type,\n chunkingStrategy: lastDecision?.strategy,\n });\n\n // Update smart chunker with connection metrics for pooling optimization\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n if (currentOffset >= (source.size ?? 0)) {\n if (source) source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Upload completed: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n callbacks.onSuccess?.(res.upload);\n return;\n }\n\n await performUpload({\n uploadId,\n offset: currentOffset,\n source,\n uploadLengthDeferred,\n retryDelays,\n smartChunker,\n platformService,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n } catch (err) {\n // Check if we should retry, when enabled, before sending the error to the user.\n if (retryDelays != null) {\n // We will reset the attempt counter if\n // - we were already able to connect to the server (offset != null) and\n // - we were able to upload a small chunk of data to the server\n const shouldResetDelays =\n offset != null && currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error\",\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await performUpload({\n uploadId,\n offset,\n source,\n retryAttempt: retryAttempt + 1,\n uploadLengthDeferred,\n retryDelays,\n smartChunker,\n platformService,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `failed to upload chunk for ${uploadId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n }\n }\n}\n\n/**\n * Create a new upload using the creation extension by sending a POST\n * request to the endpoint. After successful creation the file will be\n * uploaded\n */\nexport async function createUpload({\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata,\n uploadistaApi,\n logger,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n computeChecksum = true,\n checksumAlgorithm = \"sha256\",\n platformService,\n ...callbacks\n}: {\n fingerprint: string;\n storageId: string;\n source: FileSource;\n uploadLengthDeferred: boolean | undefined;\n metadata: Record<string, string>;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n checksumService: ChecksumService;\n computeChecksum?: boolean;\n checksumAlgorithm?: string;\n platformService: PlatformService;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n if (!uploadLengthDeferred && source.size == null) {\n const error = new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message: \"expected size to be set\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n // Compute checksum if enabled and file is a File object\n let checksum: string | undefined;\n if (computeChecksum && platformService.isFileLike(source.input)) {\n try {\n logger.log(\"Computing file checksum...\");\n checksum = await checksumService.computeChecksum(\n new Uint8Array(source.input as any),\n );\n logger.log(`Checksum computed: ${checksum}`);\n } catch (error) {\n logger.log(\n `Warning: Failed to compute checksum: ${error instanceof Error ? error.message : \"Unknown error\"}`,\n );\n // Continue without checksum if computation fails\n }\n }\n\n const createUploadData: InputFile = {\n uploadLengthDeferred,\n storageId,\n size: source.size ?? 0,\n metadata: metadata ? encodeMetadata(metadata) : undefined,\n fileName: source.name ?? undefined,\n type: source.type ?? \"\",\n lastModified: source.lastModified ?? undefined,\n checksum,\n checksumAlgorithm: checksum ? checksumAlgorithm : undefined,\n };\n\n const { upload, status } = await uploadistaApi.createUpload(createUploadData);\n\n if (!inStatusCategory(status, 200) || upload == null) {\n const error = new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while creating upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n logger.log(`Created upload ${upload.id}`);\n\n openWebSocket(upload.id);\n\n if (upload.size === 0) {\n // Nothing to upload and file was successfully created\n callbacks.onSuccess?.(upload);\n if (source) source.close();\n closeWebSocket(upload.id);\n return;\n }\n\n const uploadIdStorageKey = await saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size: upload.size ?? 0,\n metadata: upload.metadata ?? {},\n clientStorageKey: null,\n storeFingerprintForResuming,\n generateId,\n });\n\n callbacks.onStart?.({\n uploadId: upload.id,\n size: upload.size ?? null,\n });\n\n return {\n uploadIdStorageKey,\n uploadId: upload.id,\n offset: upload.offset,\n };\n}\n\n/**\n * Try to resume an existing upload. First a HEAD request will be sent\n * to retrieve the offset. If the request fails a new upload will be\n * created. In the case of a successful response the file will be uploaded.\n */\nexport async function resumeUpload({\n uploadId,\n storageId,\n uploadIdStorageKey,\n fingerprint,\n source,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n platformService,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n ...callbacks\n}: {\n uploadId: string;\n storageId: string;\n uploadIdStorageKey: string;\n fingerprint: string;\n platformService: PlatformService;\n source: FileSource;\n uploadLengthDeferred: boolean | undefined;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n const res = await uploadistaApi.getUpload(uploadId);\n const status = res.status;\n\n if (!inStatusCategory(status, 200)) {\n // If the upload is locked (indicated by the 423 Locked status code), we\n // emit an error instead of directly starting a new upload. This way the\n // retry logic can catch the error and will retry the upload. An upload\n // is usually locked for a short period of time and will be available\n // afterwards.\n if (status === 423) {\n const error = new UploadistaError({\n name: \"UPLOAD_LOCKED\",\n message: \"upload is currently locked; retry later\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n if (inStatusCategory(status, 400)) {\n // Remove stored fingerprint and corresponding endpoint,\n // on client errors since the file can not be found\n await removeFromClientStorage(clientStorage, uploadIdStorageKey);\n }\n\n // Try to create a new upload\n return await createUpload({\n platformService,\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata: {},\n uploadistaApi,\n logger,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket: () => {}, // Placeholder, will be provided by caller\n ...callbacks,\n });\n }\n\n const upload = res.upload;\n if (upload == null) {\n const error = new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while resuming upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n await saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size: upload.size ?? 0,\n metadata: upload.metadata ?? {},\n clientStorageKey: uploadIdStorageKey,\n storeFingerprintForResuming,\n generateId,\n });\n\n // Upload has already been completed and we do not need to send additional\n // data to the server\n if (upload.offset === upload.size) {\n return undefined;\n }\n\n openWebSocket(upload.id);\n\n return {\n uploadId,\n uploadIdStorageKey,\n offset: upload.offset,\n };\n}\n\n/**\n * Initiate the uploading procedure for a non-parallel upload. Here the entire file is\n * uploaded in a sequential matter.\n */\nexport async function startSingleUpload({\n source,\n uploadId,\n uploadIdStorageKey,\n storageId,\n fingerprint,\n platformService,\n uploadLengthDeferred,\n uploadistaApi,\n checksumService,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n ...callbacks\n}: {\n source: FileSource;\n uploadId: string | null;\n uploadIdStorageKey: string | null;\n storageId: string;\n fingerprint: string;\n platformService: PlatformService;\n uploadLengthDeferred: boolean | undefined;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n // The upload had been started previously and we should reuse this URL.\n if (uploadId != null && uploadIdStorageKey != null) {\n logger.log(`Resuming upload from previous id: ${uploadId}`);\n return await resumeUpload({\n uploadId,\n uploadIdStorageKey,\n storageId,\n fingerprint,\n source,\n checksumService,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n platformService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n ...callbacks,\n });\n }\n\n // An upload has not started for the file yet, so we start a new one\n logger.log(\"Creating a new upload\");\n return await createUpload({\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata: {},\n uploadistaApi,\n logger,\n checksumService,\n platformService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n ...callbacks,\n });\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type {\n AbortControllerFactory,\n AbortControllerLike,\n} from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport { type Callbacks, createUpload, performUpload } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { calculateSegments } from \"./upload-utils\";\n\nexport type ParallelUploadSegment = {\n uploadId: string;\n uploadIdStorageKey: string | undefined;\n segmentIndex: number;\n startByte: number;\n endByte: number;\n offset: number;\n abortController: AbortControllerLike;\n retryTimeout: Timeout | null;\n};\n\nexport type ParallelUploadState = {\n segments: ParallelUploadSegment[];\n totalProgress: number;\n completed: boolean;\n failed: boolean;\n error?: Error;\n};\n\nexport type ParallelUploadResult = {\n parallelState: ParallelUploadState;\n abort: () => Promise<void>;\n};\n\n/**\n * Initiate the uploading procedure for a parallelized upload, where one file is split into\n * multiple request which are run in parallel.\n */\nexport async function startParallelUpload({\n source,\n storageId,\n fingerprint,\n uploadLengthDeferred,\n parallelUploads,\n parallelChunkSize,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n checksumService,\n smartChunking,\n metrics,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n terminate,\n abortControllerFactory,\n platformService,\n ...callbacks\n}: {\n source: FileSource;\n storageId: string;\n fingerprint: string;\n uploadLengthDeferred: boolean | undefined;\n parallelUploads: number;\n parallelChunkSize?: number;\n retryDelays?: number[];\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n terminate: (uploadId: string) => Promise<void>;\n abortControllerFactory: AbortControllerFactory;\n platformService: PlatformService;\n} & Callbacks): Promise<ParallelUploadResult | undefined> {\n if (!source.size || source.size === 0) {\n callbacks.onError?.(\n new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message: \"Parallel upload requires a known file size\",\n }),\n );\n return;\n }\n\n // Calculate segments for parallel upload\n const segments = calculateSegments(\n source.size,\n parallelUploads,\n parallelChunkSize,\n );\n logger.log(`Starting parallel upload with ${segments.length} segments`);\n\n // Initialize parallel upload state\n const parallelState: ParallelUploadState = {\n segments: [],\n totalProgress: 0,\n completed: false,\n failed: false,\n };\n\n // Progress tracking for aggregation\n const segmentProgress = new Map<number, number>();\n const segmentTotals = new Map<number, number>();\n\n const updateTotalProgress = () => {\n const totalBytes = Array.from(segmentTotals.values()).reduce(\n (sum, size) => sum + size,\n 0,\n );\n const progressBytes = Array.from(segmentProgress.values()).reduce(\n (sum, progress) => sum + progress,\n 0,\n );\n parallelState.totalProgress =\n totalBytes > 0 ? progressBytes / totalBytes : 0;\n\n // Aggregate progress callback\n if (callbacks.onProgress && totalBytes > 0) {\n callbacks.onProgress(`parallel-upload`, progressBytes, totalBytes);\n }\n };\n\n try {\n // Create upload sessions for each segment\n const segmentUploads = await Promise.all(\n segments.map(async (segment) => {\n // Create a segmented source for this chunk\n const segmentSource: FileSource = {\n ...source,\n size: segment.endByte - segment.startByte,\n async slice(start, end) {\n // Adjust slice to segment boundaries\n const actualStart = segment.startByte + (start ?? 0);\n const actualEnd = Math.min(\n segment.startByte + (end ?? segment.endByte - segment.startByte),\n segment.endByte,\n );\n return await source.slice(actualStart, actualEnd);\n },\n };\n\n const createResult = await createUpload({\n fingerprint: `${fingerprint}-segment-${segment.segmentIndex}`,\n storageId,\n source: segmentSource,\n uploadLengthDeferred,\n platformService,\n metadata: {\n parallelUpload: \"true\",\n segmentIndex: segment.segmentIndex.toString(),\n totalSegments: segments.length.toString(),\n parentFingerprint: fingerprint,\n },\n checksumService,\n uploadistaApi,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n onSuccess: () => {},\n onError: (error) =>\n logger.log(\n `Segment ${segment.segmentIndex} creation error: ${error}`,\n ),\n onStart: (info) => {\n segmentTotals.set(segment.segmentIndex, info.size ?? 0);\n updateTotalProgress();\n },\n });\n\n if (!createResult) {\n throw new UploadistaError({\n name: \"PARALLEL_SEGMENT_CREATION_FAILED\",\n message: `Failed to create upload segment ${segment.segmentIndex}`,\n });\n }\n\n const parallelSegment: ParallelUploadSegment = {\n uploadId: createResult.uploadId,\n uploadIdStorageKey: createResult.uploadIdStorageKey,\n segmentIndex: segment.segmentIndex,\n startByte: segment.startByte,\n endByte: segment.endByte,\n offset: createResult.offset,\n abortController: abortControllerFactory.create(),\n retryTimeout: null,\n };\n\n return {\n segment: parallelSegment,\n source: segmentSource,\n };\n }),\n );\n\n // Store segments in state\n parallelState.segments = segmentUploads.map((upload) => upload.segment);\n\n // Notify start with combined upload info\n callbacks.onStart?.({\n uploadId: `parallel-${parallelState.segments.map((s) => s.uploadId).join(\",\")}`,\n size: source.size,\n });\n\n // Start parallel upload for each segment\n const uploadPromises = segmentUploads.map(\n async ({ segment, source: segmentSource }) => {\n try {\n await performUpload({\n uploadId: segment.uploadId,\n offset: segment.offset,\n source: segmentSource,\n uploadLengthDeferred,\n abortController: segment.abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n platformService,\n logger,\n smartChunking,\n metrics,\n onProgress: (_, bytes, total) => {\n segmentProgress.set(segment.segmentIndex, bytes);\n if (total) segmentTotals.set(segment.segmentIndex, total);\n updateTotalProgress();\n },\n onChunkComplete: (chunkSize, bytesAccepted, bytesTotal) => {\n if (callbacks.onChunkComplete) {\n callbacks.onChunkComplete(chunkSize, bytesAccepted, bytesTotal);\n }\n },\n onSuccess: (_uploadFile) => {\n logger.log(\n `Segment ${segment.segmentIndex} completed successfully`,\n );\n // Mark this segment as completed\n segmentProgress.set(\n segment.segmentIndex,\n segmentTotals.get(segment.segmentIndex) ?? 0,\n );\n updateTotalProgress();\n },\n onShouldRetry: (error, retryAttempt) => {\n logger.log(\n `Segment ${segment.segmentIndex} retry attempt ${retryAttempt}: ${error}`,\n );\n return retryAttempt < (retryDelays?.length ?? 0);\n },\n onRetry: (timeout) => {\n segment.retryTimeout = timeout;\n },\n onError: (error) => {\n logger.log(`Segment ${segment.segmentIndex} failed: ${error}`);\n throw error;\n },\n });\n } catch (error) {\n logger.log(`Segment ${segment.segmentIndex} upload failed: ${error}`);\n throw new UploadistaError({\n name: \"PARALLEL_SEGMENT_UPLOAD_FAILED\",\n message: `Segment ${segment.segmentIndex} upload failed`,\n cause: error as Error,\n });\n }\n },\n );\n\n // Wait for all segments to complete\n await Promise.all(uploadPromises);\n\n // Mark as completed\n parallelState.completed = true;\n logger.log(\"All parallel upload segments completed successfully\");\n\n // Call success callback with aggregated result\n if (callbacks.onSuccess) {\n const aggregatedResult: UploadFile = {\n id: `parallel-${parallelState.segments.map((s) => s.uploadId).join(\",\")}`,\n offset: source.size,\n size: source.size,\n storage: {\n id: storageId,\n type: \"parallel-upload\",\n },\n metadata: {\n parallelUpload: \"true\",\n totalSegments: segments.length.toString(),\n fingerprint,\n },\n };\n callbacks.onSuccess(aggregatedResult);\n }\n\n // Close all sources\n for (const upload of segmentUploads) {\n upload.source.close?.();\n }\n\n return {\n parallelState,\n abort: async () => {\n await abortParallelUpload(\n parallelState,\n logger,\n terminate,\n closeWebSocket,\n platformService,\n );\n },\n };\n } catch (error) {\n parallelState.failed = true;\n parallelState.error = error as Error;\n\n // Clean up any created segments\n await abortParallelUpload(\n parallelState,\n logger,\n terminate,\n closeWebSocket,\n platformService,\n );\n\n callbacks.onError?.(error as Error);\n throw error;\n }\n}\n\n/**\n * Abort a parallel upload by cleaning up all segments\n */\nexport async function abortParallelUpload(\n state: ParallelUploadState,\n logger: Logger,\n terminate: (uploadId: string) => Promise<void>,\n closeWebSocket: (uploadId: string) => void,\n platformService: PlatformService,\n): Promise<void> {\n logger.log(\"Aborting parallel upload...\");\n\n // Abort all segment controllers\n for (const segment of state.segments) {\n segment.abortController.abort();\n\n if (segment.retryTimeout) {\n platformService.clearTimeout(segment.retryTimeout);\n segment.retryTimeout = null;\n }\n\n // Attempt to terminate the upload on the server\n try {\n await terminate(segment.uploadId);\n } catch (error) {\n logger.log(\n `Failed to terminate segment ${segment.segmentIndex}: ${error}`,\n );\n }\n\n // Close websockets\n closeWebSocket(segment.uploadId);\n }\n\n state.completed = false;\n state.failed = true;\n logger.log(\"Parallel upload aborted\");\n}\n","/**\n * Platform-agnostic service for platform-specific APIs\n * Provides abstraction for timer functions and platform detection\n */\n\nexport type Timeout = unknown;\n\nexport interface PlatformService {\n /**\n * Schedule a callback to run after a delay\n */\n setTimeout: (callback: () => void, ms: number | undefined) => Timeout;\n\n /**\n * Cancel a scheduled callback\n */\n clearTimeout: (id: Timeout) => void;\n\n /**\n * Check if we're in a browser environment\n */\n isBrowser: () => boolean;\n\n /**\n * Check if network is online\n */\n isOnline: () => boolean;\n\n /**\n * Check if a value is a File-like object\n */\n isFileLike: (value: unknown) => boolean;\n\n /**\n * Get file name from File-like object\n */\n getFileName: (file: unknown) => string | undefined;\n\n /**\n * Get file type from File-like object\n */\n getFileType: (file: unknown) => string | undefined;\n\n /**\n * Get file size from File-like object\n */\n getFileSize: (file: unknown) => number | undefined;\n\n /**\n * Get file last modified timestamp from File-like object\n */\n getFileLastModified: (file: unknown) => number | undefined;\n}\n\n/**\n * Simple async wait utility\n */\nexport async function wait(\n platformService: PlatformService,\n ms: number,\n): Promise<void> {\n return new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, ms),\n );\n}\n","import type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport {\n type PlatformService,\n type Timeout,\n wait,\n} from \"../services/platform-service\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport { shouldRetry } from \"./chunk-upload\";\nimport { removeFromClientStorage } from \"./upload-storage\";\n\n/**\n * Use the Termination extension to delete an upload from the server by sending a DELETE\n * request to the specified upload URL. This is only possible if the server supports the\n * Termination extension. If the `retryDelays` property is set, the method will\n * also retry if an error occurs.\n */\nexport async function terminate(\n uploadId: string,\n uploadistaApi: UploadistaApi,\n platformService: PlatformService,\n retryDelays: number[] | undefined,\n retryAttempt = 0,\n): Promise<void> {\n try {\n const res = await uploadistaApi.deleteUpload(uploadId);\n // A 204 response indicates a successful request\n if (res.status === 204) {\n return;\n }\n\n throw new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while terminating upload\",\n });\n } catch (err) {\n const error = err as UploadistaError;\n\n if (!shouldRetry(platformService, error, retryAttempt, retryDelays)) {\n throw err;\n }\n\n // Instead of keeping track of the retry attempts, we remove the first element from the delays\n // array. If the array is empty, all retry attempts are used up and we will bubble up the error.\n // We recursively call the terminate function will removing elements from the retryDelays array.\n const delay = retryDelays?.[retryAttempt] ?? 0;\n\n await wait(platformService, delay);\n\n return await terminate(\n uploadId,\n uploadistaApi,\n platformService,\n retryDelays,\n retryAttempt + 1,\n );\n }\n}\n\n/**\n * Abort any running request and stop the current upload. After abort is called, no event\n * handler will be invoked anymore. You can use the `start` method to resume the upload\n * again.\n * If `shouldTerminate` is true, the `terminate` function will be called to remove the\n * current upload from the server.\n */\nexport async function abort({\n uploadId,\n uploadIdStorageKey,\n retryTimeout,\n shouldTerminate,\n abortController,\n uploadistaApi,\n platformService,\n retryDelays,\n clientStorage,\n}: {\n uploadId: string;\n uploadIdStorageKey: string | undefined;\n retryTimeout: Timeout | null;\n shouldTerminate: boolean;\n abortController: AbortControllerLike;\n uploadistaApi: UploadistaApi;\n platformService: PlatformService;\n retryDelays?: number[];\n clientStorage: ClientStorage;\n}): Promise<void> {\n // Stop any current running request.\n abortController.abort();\n\n // Stop any timeout used for initiating a retry.\n if (retryTimeout != null) {\n platformService.clearTimeout(retryTimeout);\n }\n\n if (!shouldTerminate || uploadId == null) {\n return;\n }\n\n await terminate(uploadId, uploadistaApi, platformService, retryDelays);\n\n if (uploadIdStorageKey != null) {\n return removeFromClientStorage(clientStorage, uploadIdStorageKey);\n }\n}\n","import type { ChunkMetrics } from \"../types/chunk-metrics\";\nimport type { PerformanceInsights } from \"../types/performance-insights\";\nimport type { UploadSessionMetrics } from \"../types/upload-session-metrics\";\n\nexport interface UploadMetricsConfig {\n maxChunkHistory?: number;\n enableDetailedMetrics?: boolean;\n performanceThresholds?: {\n slowSpeed: number; // bytes per second\n fastSpeed: number; // bytes per second\n highRetryRate: number; // ratio\n };\n}\n\nexport class UploadMetrics {\n private config: Required<UploadMetricsConfig>;\n private chunkHistory: ChunkMetrics[] = [];\n private currentSession: Partial<UploadSessionMetrics> = {};\n private sessionStartTime = 0;\n\n constructor(config: UploadMetricsConfig = {}) {\n this.config = {\n maxChunkHistory: config.maxChunkHistory ?? 1000,\n enableDetailedMetrics: config.enableDetailedMetrics ?? true,\n performanceThresholds: {\n slowSpeed: 100 * 1024, // 100 KB/s\n fastSpeed: 5 * 1024 * 1024, // 5 MB/s\n highRetryRate: 0.2, // 20%\n ...config.performanceThresholds,\n },\n };\n }\n\n startSession(\n uploadId: string,\n totalSize: number,\n adaptiveChunkingEnabled: boolean,\n ): void {\n this.sessionStartTime = Date.now();\n this.currentSession = {\n uploadId,\n totalSize,\n chunksCompleted: 0,\n chunksTotal: Math.ceil(totalSize / (1024 * 1024)), // rough estimate\n totalDuration: 0,\n totalRetries: 0,\n adaptiveChunkingEnabled,\n startTime: this.sessionStartTime,\n };\n this.chunkHistory = [];\n }\n\n recordChunk(metrics: Omit<ChunkMetrics, \"timestamp\">): void {\n const chunkMetrics: ChunkMetrics = {\n ...metrics,\n timestamp: Date.now(),\n };\n\n this.chunkHistory.push(chunkMetrics);\n\n // Keep history within limits\n if (this.chunkHistory.length > this.config.maxChunkHistory) {\n this.chunkHistory = this.chunkHistory.slice(-this.config.maxChunkHistory);\n }\n\n // Update session metrics\n if (this.currentSession && chunkMetrics.success) {\n this.currentSession.chunksCompleted =\n (this.currentSession.chunksCompleted || 0) + 1;\n this.currentSession.totalDuration =\n (this.currentSession.totalDuration || 0) + chunkMetrics.duration;\n this.currentSession.totalRetries =\n (this.currentSession.totalRetries || 0) + chunkMetrics.retryCount;\n }\n }\n\n endSession(): UploadSessionMetrics | null {\n if (!this.currentSession.uploadId) {\n return null;\n }\n\n const endTime = Date.now();\n const totalDuration = endTime - this.sessionStartTime;\n const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);\n\n if (successfulChunks.length === 0) {\n return null;\n }\n\n const speeds = successfulChunks.map((chunk) => chunk.speed);\n const averageSpeed =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n const peakSpeed = Math.max(...speeds);\n const minSpeed = Math.min(...speeds);\n const successRate = successfulChunks.length / this.chunkHistory.length;\n\n const sessionMetrics: UploadSessionMetrics = {\n uploadId: this.currentSession.uploadId || \"\",\n totalSize: this.currentSession.totalSize || 0,\n totalDuration,\n chunksCompleted: successfulChunks.length,\n chunksTotal: this.chunkHistory.length,\n averageSpeed,\n peakSpeed,\n minSpeed,\n totalRetries: this.currentSession.totalRetries || 0,\n successRate,\n adaptiveChunkingEnabled:\n this.currentSession.adaptiveChunkingEnabled || false,\n startTime: this.currentSession.startTime || 0,\n endTime,\n };\n\n // Reset current session\n this.currentSession = {};\n\n return sessionMetrics;\n }\n\n getCurrentSessionMetrics(): Partial<UploadSessionMetrics> {\n return { ...this.currentSession };\n }\n\n getChunkHistory(count?: number): ChunkMetrics[] {\n const history = this.chunkHistory.slice();\n return count ? history.slice(-count) : history;\n }\n\n getPerformanceInsights(): PerformanceInsights {\n if (this.chunkHistory.length < 5) {\n return {\n overallEfficiency: 0,\n chunkingEffectiveness: 0,\n networkStability: 0,\n recommendations: [\"Insufficient data for analysis\"],\n optimalChunkSizeRange: { min: 256 * 1024, max: 2 * 1024 * 1024 },\n };\n }\n\n const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);\n const speeds = successfulChunks.map((chunk) => chunk.speed);\n\n // Calculate metrics\n const averageSpeed =\n speeds.length > 0\n ? speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length\n : 0;\n const speedVariance = this.calculateVariance(speeds);\n const speedStdDev = Math.sqrt(speedVariance);\n const coefficientOfVariation = speedStdDev / averageSpeed;\n\n // Overall efficiency based on speed and retry rate\n const successRate = successfulChunks.length / this.chunkHistory.length;\n const speedScore = Math.min(\n 1,\n averageSpeed / this.config.performanceThresholds.fastSpeed,\n );\n const overallEfficiency = speedScore * 0.7 + successRate * 0.3;\n\n // Network stability (lower coefficient of variation = higher stability)\n const networkStability = Math.max(\n 0,\n 1 - Math.min(1, coefficientOfVariation),\n );\n\n // Chunking effectiveness based on how well chunk sizes correlate with performance\n const chunkingEffectiveness =\n this.calculateChunkingEffectiveness(successfulChunks);\n\n // Generate recommendations\n const recommendations = this.generateRecommendations(\n averageSpeed,\n successRate,\n coefficientOfVariation,\n );\n\n // Calculate optimal chunk size range\n const optimalChunkSizeRange =\n this.calculateOptimalChunkSizeRange(successfulChunks);\n\n return {\n overallEfficiency,\n chunkingEffectiveness,\n networkStability,\n recommendations,\n optimalChunkSizeRange,\n };\n }\n\n exportMetrics(): {\n session: Partial<UploadSessionMetrics>;\n chunks: ChunkMetrics[];\n insights: PerformanceInsights;\n } {\n return {\n session: this.getCurrentSessionMetrics(),\n chunks: this.getChunkHistory(),\n insights: this.getPerformanceInsights(),\n };\n }\n\n reset(): void {\n this.chunkHistory = [];\n this.currentSession = {};\n this.sessionStartTime = 0;\n }\n\n private calculateVariance(values: number[]): number {\n if (values.length === 0) return 0;\n\n const mean = values.reduce((sum, value) => sum + value, 0) / values.length;\n const squaredDifferences = values.map((value) => (value - mean) ** 2);\n return (\n squaredDifferences.reduce((sum, diff) => sum + diff, 0) / values.length\n );\n }\n\n private calculateChunkingEffectiveness(chunks: ChunkMetrics[]): number {\n if (chunks.length < 3) return 0.5;\n\n // Look for correlation between chunk size and upload speed\n // Better chunking should show consistent performance across different sizes\n const sizeGroups = this.groupChunksBySize(chunks);\n\n if (Object.keys(sizeGroups).length < 2) return 0.5;\n\n // Calculate coefficient of variation for each size group\n const groupVariations = Object.values(sizeGroups).map((group) => {\n const speeds = group.map((chunk) => chunk.speed);\n const mean =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n const variance = this.calculateVariance(speeds);\n return Math.sqrt(variance) / mean;\n });\n\n // Lower average variation indicates better chunking effectiveness\n const averageVariation =\n groupVariations.reduce((sum, cv) => sum + cv, 0) / groupVariations.length;\n return Math.max(0, 1 - Math.min(1, averageVariation));\n }\n\n private groupChunksBySize(\n chunks: ChunkMetrics[],\n ): Record<string, ChunkMetrics[]> {\n const groups: Record<string, ChunkMetrics[]> = {};\n\n chunks.forEach((chunk) => {\n // Group by size ranges (64KB, 128KB, 256KB, 512KB, 1MB, 2MB, 4MB, 8MB+)\n let sizeGroup: string;\n if (chunk.size < 128 * 1024) sizeGroup = \"64KB\";\n else if (chunk.size < 256 * 1024) sizeGroup = \"128KB\";\n else if (chunk.size < 512 * 1024) sizeGroup = \"256KB\";\n else if (chunk.size < 1024 * 1024) sizeGroup = \"512KB\";\n else if (chunk.size < 2 * 1024 * 1024) sizeGroup = \"1MB\";\n else if (chunk.size < 4 * 1024 * 1024) sizeGroup = \"2MB\";\n else if (chunk.size < 8 * 1024 * 1024) sizeGroup = \"4MB\";\n else sizeGroup = \"8MB+\";\n\n if (!groups[sizeGroup]) groups[sizeGroup] = [];\n const group = groups[sizeGroup];\n if (group) group.push(chunk);\n });\n\n return groups;\n }\n\n private generateRecommendations(\n averageSpeed: number,\n successRate: number,\n coefficientOfVariation: number,\n ): string[] {\n const recommendations: string[] = [];\n\n if (averageSpeed < this.config.performanceThresholds.slowSpeed) {\n recommendations.push(\n \"Consider using smaller chunk sizes for better performance on slow connections\",\n );\n }\n\n if (averageSpeed > this.config.performanceThresholds.fastSpeed) {\n recommendations.push(\n \"Network is fast - larger chunk sizes may improve efficiency\",\n );\n }\n\n if (successRate < 0.9) {\n recommendations.push(\n \"High failure rate detected - consider more conservative chunking strategy\",\n );\n }\n\n if (coefficientOfVariation > 0.5) {\n recommendations.push(\n \"Network appears unstable - smaller, more frequent chunks may be more reliable\",\n );\n }\n\n if (\n coefficientOfVariation < 0.2 &&\n averageSpeed > this.config.performanceThresholds.slowSpeed\n ) {\n recommendations.push(\n \"Stable network detected - larger chunks may improve efficiency\",\n );\n }\n\n if (recommendations.length === 0) {\n recommendations.push(\n \"Performance appears optimal with current configuration\",\n );\n }\n\n return recommendations;\n }\n\n private calculateOptimalChunkSizeRange(chunks: ChunkMetrics[]): {\n min: number;\n max: number;\n } {\n if (chunks.length < 5) {\n return { min: 256 * 1024, max: 2 * 1024 * 1024 };\n }\n\n // Find chunks with best performance (top 30% by speed)\n const sortedBySpeed = chunks.slice().sort((a, b) => b.speed - a.speed);\n const topPerformers = sortedBySpeed.slice(\n 0,\n Math.ceil(chunks.length * 0.3),\n );\n\n const topSizes = topPerformers.map((chunk) => chunk.size);\n const minOptimal = Math.min(...topSizes);\n const maxOptimal = Math.max(...topSizes);\n\n return {\n min: Math.max(64 * 1024, minOptimal), // At least 64KB\n max: Math.min(32 * 1024 * 1024, maxOptimal), // At most 32MB\n };\n }\n}\n","import type { DataStoreCapabilities } from \"@uploadista/core/types\";\nimport {\n type NegotiatedStrategy,\n UploadStrategyNegotiator,\n type UploadStrategyOptions,\n} from \"@uploadista/core/upload\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport {\n defaultClientCapabilities,\n MockClientDataStore,\n} from \"../mock-data-store\";\nimport type { HttpClient } from \"../services/http-client\";\n\nexport type UploadStrategyConfig = {\n preferredStrategy?: \"single\" | \"parallel\" | \"auto\";\n minFileSizeForParallel?: number;\n enableCapabilityNegotiation?: boolean;\n onStrategySelected?: (strategy: {\n chosen: \"single\" | \"parallel\";\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n }) => void;\n};\n\nexport type UploadClientOptions = {\n baseUrl: string;\n uploadBasePath?: string;\n storageId: string;\n retryDelays?: number[];\n chunkSize: number;\n parallelUploads?: number;\n parallelChunkSize?: number;\n uploadStrategy?: UploadStrategyConfig;\n};\n\nexport function createUploadStrategyNegotiator(\n dataStore: MockClientDataStore,\n): UploadStrategyNegotiator {\n return new UploadStrategyNegotiator(dataStore.getCapabilities(), (strategy) =>\n dataStore.validateUploadStrategy(strategy),\n );\n}\n\n/**\n * Fetch capabilities from server\n */\nexport async function fetchServerCapabilities(\n baseUrl: string,\n uploadBasePath: string,\n storageId: string,\n httpClient: HttpClient,\n): Promise<DataStoreCapabilities> {\n const capabilitiesUrl = `${baseUrl}/${uploadBasePath}/capabilities?storageId=${encodeURIComponent(storageId)}`;\n\n try {\n const response = await httpClient.request(capabilitiesUrl, {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n });\n\n if (!response.ok) {\n throw new Error(\n `Failed to fetch capabilities: ${response.status} ${response.statusText}`,\n );\n }\n\n const data = await response.json();\n return (data as { capabilities: DataStoreCapabilities }).capabilities;\n } catch (_error) {\n // Fall back to default capabilities if server fetch fails\n return defaultClientCapabilities;\n }\n}\n\n/**\n * Negotiate upload strategy based on capabilities and options\n */\nexport function negotiateUploadStrategy({\n capabilities,\n fileSize,\n chunkSize,\n parallelUploads,\n uploadLengthDeferred,\n strategyConfig,\n logger,\n}: {\n capabilities: DataStoreCapabilities;\n fileSize: number | null;\n chunkSize: number;\n parallelUploads: number;\n uploadLengthDeferred?: boolean;\n strategyConfig?: UploadStrategyConfig;\n logger: Logger;\n}): NegotiatedStrategy {\n if (strategyConfig?.enableCapabilityNegotiation !== false) {\n // Use capability negotiation with server-fetched capabilities\n const mockDataStore = new MockClientDataStore(capabilities);\n const negotiator = createUploadStrategyNegotiator(mockDataStore);\n\n const negotiationOptions: UploadStrategyOptions = {\n fileSize: fileSize || 0,\n preferredStrategy:\n strategyConfig?.preferredStrategy === \"auto\"\n ? undefined\n : strategyConfig?.preferredStrategy,\n preferredChunkSize: chunkSize,\n parallelUploads,\n minChunkSizeForParallel:\n strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024,\n };\n\n const negotiatedStrategy = negotiator.negotiateStrategy(negotiationOptions);\n\n // Log negotiation results\n logger.log(`Upload strategy negotiated: ${negotiatedStrategy.strategy}`);\n for (const reason of negotiatedStrategy.reasoning) {\n logger.log(` - ${reason}`);\n }\n for (const warning of negotiatedStrategy.warnings) {\n logger.log(` Warning: ${warning}`);\n }\n\n // Notify client of strategy selection if callback provided\n strategyConfig?.onStrategySelected?.({\n chosen: negotiatedStrategy.strategy,\n chunkSize: negotiatedStrategy.chunkSize,\n parallelUploads: negotiatedStrategy.parallelUploads,\n reasoning: negotiatedStrategy.reasoning,\n warnings: negotiatedStrategy.warnings,\n });\n\n return negotiatedStrategy;\n } else {\n // Fallback to legacy logic\n const shouldUseParallelUpload =\n parallelUploads > 1 &&\n fileSize &&\n fileSize > (strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024) &&\n !uploadLengthDeferred;\n\n return {\n strategy: shouldUseParallelUpload ? \"parallel\" : \"single\",\n chunkSize,\n parallelUploads: shouldUseParallelUpload ? parallelUploads : 1,\n reasoning: [\n `Legacy strategy selection: ${shouldUseParallelUpload ? \"parallel\" : \"single\"}`,\n ],\n warnings: [],\n };\n }\n}\n\n/**\n * Validate upload client configuration against data store capabilities\n */\nexport function validateConfiguration(\n options: UploadClientOptions,\n capabilities: DataStoreCapabilities = defaultClientCapabilities,\n logger: Logger,\n): {\n valid: boolean;\n errors: string[];\n warnings: string[];\n} {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n // Validate against capabilities\n const mockDataStore = new MockClientDataStore(capabilities);\n const negotiator = createUploadStrategyNegotiator(mockDataStore);\n\n const validation = negotiator.validateConfiguration({\n fileSize: 0, // Placeholder for validation\n preferredStrategy:\n options.uploadStrategy?.preferredStrategy === \"auto\"\n ? undefined\n : options.uploadStrategy?.preferredStrategy,\n preferredChunkSize: options.chunkSize,\n parallelUploads: options.parallelUploads,\n });\n\n if (!validation.valid) {\n errors.push(...validation.errors);\n }\n\n // Additional client-specific validations\n if (options.parallelUploads && options.parallelUploads < 1) {\n errors.push(\"parallelUploads must be at least 1\");\n }\n\n if (options.chunkSize && options.chunkSize < 1024) {\n warnings.push(\"Chunk size below 1KB may impact performance\");\n }\n\n if (\n options.uploadStrategy?.preferredStrategy === \"parallel\" &&\n !options.parallelUploads\n ) {\n warnings.push(\n \"Parallel strategy requested but parallelUploads not configured\",\n );\n }\n\n // Log validation results\n if (errors.length > 0) {\n logger.log(\"Configuration validation errors:\");\n for (const error of errors) {\n logger.log(` Error: ${error}`);\n }\n }\n\n if (warnings.length > 0) {\n logger.log(\"Configuration validation warnings:\");\n for (const warning of warnings) {\n logger.log(` Warning: ${warning}`);\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n}\n\n/**\n * Async configuration validation with server capabilities\n */\nexport async function validateConfigurationAsync(\n options: UploadClientOptions,\n httpClient: HttpClient,\n logger: Logger,\n): Promise<{\n valid: boolean;\n errors: string[];\n warnings: string[];\n capabilities: DataStoreCapabilities;\n}> {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n let capabilities: DataStoreCapabilities;\n try {\n capabilities = await fetchServerCapabilities(\n options.baseUrl,\n options.uploadBasePath || \"api/upload\",\n options.storageId,\n httpClient,\n );\n } catch (error) {\n logger.log(`Failed to fetch server capabilities for validation: ${error}`);\n capabilities = defaultClientCapabilities;\n warnings.push(\n \"Using default capabilities for validation - server unavailable\",\n );\n }\n\n const validation = validateConfiguration(options, capabilities, logger);\n errors.push(...validation.errors);\n warnings.push(...validation.warnings);\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n capabilities,\n };\n}\n\n/**\n * Validate options and throw if invalid\n */\nexport function validateAndThrow(\n options: UploadClientOptions,\n logger: Logger,\n): void {\n const validationResult = validateConfiguration(\n options,\n defaultClientCapabilities,\n logger,\n );\n\n if (!validationResult.valid) {\n const errorMessage = `Upload client configuration validation failed: ${validationResult.errors.join(\", \")}`;\n logger.log(errorMessage);\n throw new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\", // Reusing existing error type\n message: errorMessage,\n });\n }\n}\n","import type { FlowData, FlowJob } from \"@uploadista/core/flow\";\nimport type {\n DataStoreCapabilities,\n InputFile,\n UploadFile,\n} from \"@uploadista/core/types\";\nimport { AuthHttpClient, type AuthManager } from \"../auth\";\nimport { UploadistaError, type UploadistaErrorName } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport { defaultClientCapabilities } from \"../mock-data-store\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport type {\n ConnectionMetrics,\n DetailedConnectionMetrics,\n HttpClient,\n RequestBody,\n} from \"../services/http-client\";\nimport type {\n WebSocketFactory,\n WebSocketLike,\n} from \"../services/websocket-service\";\n\n// Error response type - matches server format\ntype ErrorResponse = {\n error?: string;\n message?: string;\n code?: string;\n details?: unknown;\n timestamp?: string;\n};\n\n/**\n * Maps server error codes to client error names\n * If no mapping exists, uses a default error name based on context\n */\nconst mapServerErrorCodeToClientName = (\n serverCode: string | undefined,\n defaultName: UploadistaErrorName,\n): UploadistaErrorName => {\n if (!serverCode) return defaultName;\n\n // Map common server error codes to client error names\n const errorMap: Record<string, UploadistaErrorName> = {\n FILE_NOT_FOUND: \"UPLOAD_NOT_FOUND\",\n UPLOAD_ID_NOT_FOUND: \"UPLOAD_NOT_FOUND\",\n FLOW_JOB_NOT_FOUND: \"JOB_NOT_FOUND\",\n FLOW_NODE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_STRUCTURE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_CYCLE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_INPUT_VALIDATION_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_OUTPUT_VALIDATION_ERROR: \"FLOW_RUN_FAILED\",\n VALIDATION_ERROR: \"CREATE_UPLOAD_FAILED\",\n DATASTORE_NOT_FOUND: \"FLOW_RUN_FAILED\",\n };\n\n return errorMap[serverCode] || defaultName;\n};\n\n/**\n * Response from upload-related API calls.\n *\n * Contains the upload metadata and HTTP status code.\n */\nexport type UploadistaUploadResponse = {\n /** Upload file metadata, undefined if request failed */\n upload?: UploadFile;\n /** HTTP status code */\n status: number;\n};\n\n/**\n * Response from delete upload API call.\n */\nexport type UploadistaDeleteUploadResponse =\n | {\n /** Successfully deleted (no content) */\n status: 204;\n }\n | {\n /** Other status codes (e.g., 404, 500) */\n status: number;\n };\n\n/**\n * Response from flow retrieval API call.\n */\nexport type FlowResponse = {\n /** HTTP status code */\n status: number;\n /** Flow configuration and metadata */\n flow: FlowData;\n};\n\n/**\n * Unified Uploadista API interface combining upload and flow operations.\n *\n * This low-level API provides direct access to server endpoints for:\n * - Upload CRUD operations (create, get, delete, patch chunks)\n * - Flow operations (get, run, continue)\n * - Job status tracking\n * - WebSocket connections for real-time updates\n * - Server capabilities discovery\n * - Connection pooling metrics\n *\n * Most applications should use the higher-level {@link UploadistaClient} instead,\n * which provides a more convenient interface with automatic retry, resumption,\n * and smart chunking.\n *\n * @example Direct API usage (advanced)\n * ```typescript\n * const api = createUploadistaApi(baseUrl, basePath, {\n * httpClient,\n * logger,\n * authManager,\n * webSocketFactory,\n * });\n *\n * // Create an upload\n * const { upload } = await api.createUpload({\n * storageId: 'my-storage',\n * size: 1024000,\n * metadata: { filename: 'test.txt' },\n * });\n *\n * // Upload a chunk\n * const chunk = new Uint8Array(1024);\n * await api.uploadChunk(upload.id, chunk, {});\n *\n * // Check status\n * const { upload: updated } = await api.getUpload(upload.id);\n * console.log(`Progress: ${updated.offset}/${updated.size}`);\n * ```\n *\n * @see {@link createUploadistaApi} for creating an instance\n */\nexport type UploadistaApi = {\n /**\n * Retrieves upload metadata and current status.\n *\n * @param uploadId - Unique upload identifier\n * @returns Upload metadata including current offset and status\n * @throws {UploadistaError} If upload not found or request fails\n */\n getUpload: (uploadId: string) => Promise<UploadistaUploadResponse>;\n\n /**\n * Deletes an upload and its associated data.\n *\n * @param uploadId - Unique upload identifier\n * @returns Response with status 204 on success\n * @throws {UploadistaError} If upload not found or deletion fails\n */\n deleteUpload: (uploadId: string) => Promise<UploadistaDeleteUploadResponse>;\n\n /**\n * Creates a new upload on the server.\n *\n * @param body - Upload configuration including storageId, size, and metadata\n * @returns Created upload metadata with unique ID\n * @throws {UploadistaError} If creation fails or validation errors occur\n */\n createUpload: (body: InputFile) => Promise<UploadistaUploadResponse>;\n\n /**\n * Uploads a chunk of data to an existing upload.\n *\n * @param uploadId - Upload identifier to append data to\n * @param data - Chunk data bytes, or null to finalize without data\n * @param options - Upload options including abort controller and progress callback\n * @returns Updated upload metadata with new offset\n * @throws {UploadistaError} If chunk upload fails or upload is locked\n */\n uploadChunk: (\n uploadId: string,\n data: Uint8Array | null,\n options: {\n abortController?: AbortControllerLike;\n onProgress?: (bytes: number, total: number) => void;\n },\n ) => Promise<UploadistaUploadResponse>;\n\n /**\n * Retrieves flow configuration and metadata.\n *\n * @param flowId - Unique flow identifier\n * @returns Flow configuration including nodes and edges\n * @throws {UploadistaError} If flow not found\n */\n getFlow: (flowId: string) => Promise<FlowResponse>;\n\n /**\n * Executes a flow with the provided inputs.\n *\n * @param flowId - Flow to execute\n * @param storageId - Storage backend to use for flow outputs\n * @param inputs - Input data for flow nodes (keyed by node ID)\n * @returns Job metadata including job ID and initial state\n * @throws {UploadistaError} If flow execution fails or inputs are invalid\n */\n runFlow: (\n flowId: string,\n storageId: string,\n inputs: Record<string, unknown>,\n ) => Promise<{ status: number; job: FlowJob }>;\n\n /**\n * Continues a paused flow execution with new data.\n *\n * Used for interactive flows that wait for user input or external data.\n *\n * @param jobId - Job identifier for the paused flow\n * @param nodeId - Node ID where execution should continue\n * @param newData - Data to provide to the node\n * @param options - Options including content type for binary data\n * @returns Updated job metadata\n * @throws {UploadistaError} If job not found or continuation fails\n */\n resumeFlow: (\n jobId: string,\n nodeId: string,\n newData: unknown,\n options?: {\n contentType?: \"application/json\" | \"application/octet-stream\";\n },\n ) => Promise<FlowJob>;\n\n /**\n * Pauses a running flow execution.\n *\n * The flow will stop at the next node boundary (not mid-node execution).\n * Can be resumed later using resumeFlow.\n *\n * @param jobId - Job identifier for the running flow\n * @returns Updated job metadata with \"paused\" status\n * @throws {UploadistaError} If job not found or cannot be paused\n */\n pauseFlow: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Cancels a running or paused flow execution.\n *\n * The flow will stop at the next node boundary (not mid-node execution).\n * Intermediate files are automatically cleaned up. This operation is terminal\n * and cannot be undone.\n *\n * @param jobId - Job identifier for the flow to cancel\n * @returns Updated job metadata with \"cancelled\" status\n * @throws {UploadistaError} If job not found or cannot be cancelled\n */\n cancelFlow: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Retrieves current job status and outputs.\n *\n * Works for both upload and flow jobs.\n *\n * @param jobId - Job identifier\n * @returns Job metadata including state, progress, and outputs\n * @throws {UploadistaError} If job not found\n */\n getJobStatus: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Opens a WebSocket connection for upload progress events.\n *\n * @param uploadId - Upload to monitor\n * @returns WebSocket instance for receiving real-time updates\n */\n openUploadWebSocket: (uploadId: string) => Promise<WebSocketLike>;\n\n /**\n * Opens a WebSocket connection for flow job events.\n *\n * @param jobId - Flow job to monitor\n * @returns WebSocket instance for receiving real-time updates\n */\n openFlowWebSocket: (jobId: string) => Promise<WebSocketLike>;\n\n /**\n * Closes a WebSocket connection.\n *\n * @param ws - WebSocket instance to close\n */\n closeWebSocket: (ws: WebSocketLike) => void;\n\n /**\n * Returns current connection pool metrics.\n *\n * @returns Basic metrics including active connections and reuse rate\n */\n getConnectionMetrics: () => ConnectionMetrics;\n\n /**\n * Returns detailed connection pool metrics with health diagnostics.\n *\n * @returns Comprehensive metrics including health status and recommendations\n */\n getDetailedConnectionMetrics: () => DetailedConnectionMetrics;\n\n /**\n * Pre-warms connections to the specified URLs.\n *\n * Useful for reducing latency on first upload by establishing\n * connections ahead of time.\n *\n * @param urls - URLs to pre-connect to\n */\n warmupConnections: (urls: string[]) => Promise<void>;\n\n /**\n * Fetches server capabilities for the specified storage backend.\n *\n * Returns information about chunk size constraints, supported features,\n * and storage-specific requirements. Falls back to default capabilities\n * if the request fails.\n *\n * @param storageId - Storage backend identifier\n * @returns Storage capabilities including chunk size limits\n */\n getCapabilities: (storageId: string) => Promise<DataStoreCapabilities>;\n};\n\n/**\n * Creates an Uploadista API instance for direct server communication.\n *\n * This factory creates a low-level API client that handles:\n * - HTTP requests to upload and flow endpoints\n * - Authentication via AuthManager (optional)\n * - WebSocket connections for real-time updates\n * - Error mapping from server to client error types\n * - Connection pooling and metrics\n *\n * Most applications should use {@link createUploadistaClient} instead,\n * which wraps this API with higher-level features like automatic retry,\n * resumption, and smart chunking.\n *\n * @param baseURL - Base URL of the Uploadista server (e.g., \"https://upload.example.com\")\n * @param uploadistBasePath - Base path for endpoints, typically \"uploadista\"\n * @param options - Configuration object\n * @param options.httpClient - HTTP client for making requests\n * @param options.logger - Optional logger for debugging\n * @param options.authManager - Optional authentication manager\n * @param options.webSocketFactory - Factory for creating WebSocket connections\n * @returns UploadistaApi instance\n *\n * @example Basic API instance\n * ```typescript\n * import { createUploadistaApi } from '@uploadista/client-core';\n *\n * const api = createUploadistaApi(\n * 'https://upload.example.com',\n * 'uploadista',\n * {\n * httpClient: myHttpClient,\n * logger: console,\n * webSocketFactory: {\n * create: (url) => new WebSocket(url),\n * },\n * }\n * );\n *\n * // Use the API directly\n * const { upload } = await api.createUpload({\n * storageId: 'my-storage',\n * size: 1024,\n * });\n * ```\n *\n * @example With authentication\n * ```typescript\n * const authManager = new DirectAuthManager(authConfig, platformService, logger);\n *\n * const api = createUploadistaApi(baseUrl, 'uploadista', {\n * httpClient,\n * logger,\n * authManager, // Automatically adds auth headers to requests\n * webSocketFactory,\n * });\n * ```\n *\n * @see {@link UploadistaApi} for the API interface\n * @see {@link createUploadistaClient} for the high-level client\n */\nexport function createUploadistaApi(\n baseURL: string,\n uploadistBasePath: string,\n {\n httpClient: baseHttpClient,\n logger,\n authManager,\n webSocketFactory,\n }: {\n httpClient: HttpClient;\n logger?: Logger;\n authManager?: AuthManager;\n webSocketFactory: WebSocketFactory;\n },\n): UploadistaApi {\n // Create base HTTP client with connection pooling\n\n // Wrap with auth if auth manager is provided\n const httpClient = authManager\n ? new AuthHttpClient(baseHttpClient, authManager)\n : baseHttpClient;\n\n // Construct endpoint URLs\n const uploadEndpoint = `${baseURL}/${uploadistBasePath}/api/upload`;\n const flowEndpoint = `${baseURL}/${uploadistBasePath}/api/flow`;\n const jobsEndpoint = `${baseURL}/${uploadistBasePath}/api/jobs`;\n\n // WebSocket URLs\n const wsBaseURL = baseURL.replace(\"http\", \"ws\");\n const uploadWsURL = `${wsBaseURL}/uploadista/ws/upload`;\n const flowWsURL = `${wsBaseURL}/uploadista/ws/flow`;\n\n /**\n * Helper function to extract auth token for WebSocket connection.\n * Supports both DirectAuthManager (extracts from headers) and UploadistaCloudAuthManager (gets cached token).\n */\n const getAuthTokenForWebSocket = async (\n manager: AuthManager,\n jobId?: string,\n ): Promise<string | null> => {\n logger?.log(`Getting auth token for WebSocket (jobId: ${jobId})`);\n\n // Check if this is a UploadistaCloudAuthManager (has attachToken method)\n if (\"attachToken\" in manager) {\n logger?.log(\"Detected UploadistaCloudAuthManager, calling attachToken\");\n const headers = await manager.attachToken({}, jobId);\n const authHeader = headers.Authorization;\n if (authHeader?.startsWith(\"Bearer \")) {\n logger?.log(\n \"Successfully extracted Bearer token from UploadistaCloudAuthManager\",\n );\n return authHeader.substring(7); // Remove \"Bearer \" prefix\n }\n logger?.log(\n `No valid Authorization header from UploadistaCloudAuthManager: ${authHeader}`,\n );\n }\n\n // Check if this is a DirectAuthManager (has attachCredentials method)\n if (\"attachCredentials\" in manager) {\n logger?.log(\"Detected DirectAuthManager, calling attachCredentials\");\n const headers = await manager.attachCredentials({});\n const authHeader = headers.Authorization;\n if (authHeader) {\n logger?.log(\n \"Successfully extracted Authorization header from DirectAuthManager\",\n );\n // Support both \"Bearer token\" and plain token formats\n return authHeader.startsWith(\"Bearer \")\n ? authHeader.substring(7)\n : authHeader;\n }\n logger?.log(`No Authorization header from DirectAuthManager`);\n }\n\n logger?.log(\"No auth token could be extracted from auth manager\");\n return null;\n };\n\n return {\n // Upload operations\n getUpload: async (uploadId: string) => {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"UPLOAD_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Upload ${uploadId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as UploadFile;\n return { status: res.status, upload: data };\n },\n\n deleteUpload: async (uploadId: string) => {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`, {\n method: \"DELETE\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"DELETE_UPLOAD_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to delete upload ${uploadId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n return { status: res.status };\n },\n\n createUpload: async (data: InputFile) => {\n logger?.log(`createUpload ${JSON.stringify(data)}`);\n const res = await httpClient.request(uploadEndpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(data),\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"CREATE_UPLOAD_FAILED\",\n );\n const errorMessage =\n errorData.error || errorData.message || \"Failed to create upload\";\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const responseData = (await res.json()) as UploadFile;\n logger?.log(JSON.stringify(responseData));\n return { status: res.status, upload: responseData };\n },\n\n uploadChunk: async (uploadId, data, { abortController }) => {\n try {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`, {\n method: \"PATCH\",\n headers: {\n \"Content-Type\": \"application/octet-stream\",\n },\n body: data,\n signal: abortController?.signal,\n });\n\n if (!res.ok) {\n const errorData = (await res\n .json()\n .catch(() => ({}))) as ErrorResponse;\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message:\n errorData.error || errorData.message || \"Unknown network error\",\n status: res.status,\n });\n }\n\n const responseData = (await res.json()) as UploadFile;\n return { status: res.status, upload: responseData };\n } catch (err) {\n if (err instanceof UploadistaError) {\n throw err;\n }\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error\",\n cause: err as Error,\n });\n }\n },\n\n // Flow operations\n getFlow: async (flowId: string) => {\n const res = await httpClient.request(`${flowEndpoint}/${flowId}`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error || errorData.message || `Flow ${flowId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowData;\n logger?.log(`getFlow: ${flowId}`);\n return { status: res.status, flow: data };\n },\n\n runFlow: async (\n flowId: string,\n storageId: string,\n inputs: Record<string, unknown>,\n ) => {\n logger?.log(`runFlow: ${flowId} with storage: ${storageId}`);\n const res = await httpClient.request(\n `${flowEndpoint}/${flowId}/${storageId}`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({ inputs }),\n },\n );\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_RUN_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to run flow ${flowId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`runFlow response: ${JSON.stringify(data)}`);\n return { status: res.status, job: data };\n },\n\n resumeFlow: async (\n jobId: string,\n nodeId: string,\n newData: unknown,\n options?: {\n contentType?: \"application/json\" | \"application/octet-stream\";\n },\n ) => {\n const contentType = options?.contentType || \"application/json\";\n\n let body: RequestBody;\n if (contentType === \"application/octet-stream\") {\n // For octet-stream, newData should be a Uint8Array or similar\n body = newData as RequestBody;\n } else {\n // For JSON, wrap newData in an object\n body = JSON.stringify({ newData });\n }\n\n const res = await httpClient.request(\n `${jobsEndpoint}/${jobId}/resume/${nodeId}`,\n {\n method: \"PATCH\",\n headers: {\n \"Content-Type\": contentType,\n },\n body,\n },\n );\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_RESUMED_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to resume flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n return data;\n },\n\n pauseFlow: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/pause`, {\n method: \"POST\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_PAUSE_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to pause flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`Flow paused: ${jobId}, status: ${data.status}`);\n return data;\n },\n\n cancelFlow: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/cancel`, {\n method: \"POST\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_CANCEL_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to cancel flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`Flow cancelled: ${jobId}, status: ${data.status}`);\n return data;\n },\n\n // Unified job operations\n getJobStatus: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/status`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"JOB_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error || errorData.message || `Job ${jobId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n return data;\n },\n\n // WebSocket operations\n openUploadWebSocket: async (uploadId: string) => {\n let wsUrl = `${uploadWsURL}/${uploadId}`;\n\n // Attach auth token if auth manager is configured\n // Note: For cookie-based auth (e.g., HttpOnly cookies with better-auth),\n // no token is needed as cookies are automatically sent by the browser\n if (authManager) {\n try {\n const token = await getAuthTokenForWebSocket(authManager, uploadId);\n if (token) {\n wsUrl += `?token=${encodeURIComponent(token)}`;\n logger?.log(`WebSocket token attached for upload: ${uploadId}`);\n } else {\n // No token means cookie-based auth - this is fine\n logger?.log(\n `No token for upload WebSocket (using cookie-based auth): ${uploadId}`,\n );\n }\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n logger?.log(\n `Error getting auth token for upload WebSocket: ${errorMessage}`,\n );\n // Don't throw - allow cookie-based auth to proceed\n logger?.log(\n `Proceeding with cookie-based authentication for upload WebSocket: ${uploadId}`,\n );\n }\n }\n\n const ws = webSocketFactory.create(wsUrl);\n\n ws.onopen = () => {\n logger?.log(`Upload WebSocket connection opened for: ${uploadId}`);\n };\n\n ws.onclose = () => {\n logger?.log(`Upload WebSocket connection closed for: ${uploadId}`);\n };\n\n ws.onerror = (error) => {\n logger?.log(`Upload WebSocket error for ${uploadId}: ${error}`);\n };\n\n return ws;\n },\n\n openFlowWebSocket: async (jobId: string) => {\n let wsUrl = `${flowWsURL}/${jobId}`;\n\n // Attach auth token if auth manager is configured\n // Note: For cookie-based auth (e.g., HttpOnly cookies with better-auth),\n // no token is needed as cookies are automatically sent by the browser\n if (authManager) {\n try {\n const token = await getAuthTokenForWebSocket(authManager, jobId);\n if (token) {\n wsUrl += `?token=${encodeURIComponent(token)}`;\n logger?.log(`WebSocket token attached for flow job: ${jobId}`);\n } else {\n // No token means cookie-based auth - this is fine\n logger?.log(\n `No token for flow WebSocket (using cookie-based auth): ${jobId}`,\n );\n }\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n logger?.log(\n `Error getting auth token for flow WebSocket: ${errorMessage}`,\n );\n // Don't throw - allow cookie-based auth to proceed\n logger?.log(\n `Proceeding with cookie-based authentication for flow WebSocket: ${jobId}`,\n );\n }\n }\n\n const ws = webSocketFactory.create(wsUrl);\n\n ws.onopen = () => {\n logger?.log(`Flow WebSocket connection opened for job: ${jobId}`);\n };\n\n ws.onclose = () => {\n logger?.log(`Flow WebSocket connection closed for job: ${jobId}`);\n };\n\n ws.onerror = (error) => {\n logger?.log(`Flow WebSocket error for job ${jobId}: ${error}`);\n };\n\n return ws;\n },\n\n closeWebSocket: (ws: WebSocketLike) => {\n ws.close();\n },\n\n // Connection metrics\n getConnectionMetrics: () => {\n return httpClient.getMetrics();\n },\n\n getDetailedConnectionMetrics: () => {\n return httpClient.getDetailedMetrics();\n },\n\n warmupConnections: async (urls: string[]) => {\n return httpClient.warmupConnections(urls);\n },\n\n // Capabilities\n getCapabilities: async (storageId: string) => {\n const capabilitiesUrl = `${uploadEndpoint}/capabilities?storageId=${encodeURIComponent(storageId)}`;\n\n try {\n const response = await httpClient.request(capabilitiesUrl, {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n });\n\n if (!response.ok) {\n logger?.log(\n `Failed to fetch capabilities: ${response.status} ${response.statusText}`,\n );\n return defaultClientCapabilities;\n }\n\n const data = await response.json();\n return (data as { capabilities: DataStoreCapabilities }).capabilities;\n } catch (error) {\n logger?.log(\n `Failed to fetch server capabilities, using defaults: ${error}`,\n );\n return defaultClientCapabilities;\n }\n },\n };\n}\n","import type { FlowEvent } from \"@uploadista/core/flow\";\nimport type { UploadEvent } from \"@uploadista/core/types\";\nimport { webSocketMessageSchema } from \"@uploadista/core/types\";\nimport type { Logger } from \"../logger\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { UploadistaApi } from \"./uploadista-api\";\n\nexport type UploadistaEvent = UploadEvent | FlowEvent;\n\nexport type UploadistaWebSocketEventHandler = (event: UploadistaEvent) => void;\n\nexport type UploadistaWebSocketMessage =\n | { type: \"connection\"; message: string; id: string; timestamp: string }\n | {\n type: \"subscribed\";\n payload: { uploadId?: string; jobId?: string };\n timestamp: string;\n }\n | { type: \"error\"; message: string; code?: string }\n | { type: \"pong\"; timestamp: string }\n | { type: \"upload_event\"; payload: UploadEvent }\n | { type: \"flow_event\"; payload: FlowEvent };\n\n/**\n * Unified WebSocket management for both upload and flow events\n */\nexport class UploadistaWebSocketManager {\n private uploadWebsockets = new Map<string, WebSocketLike>();\n private flowWebsockets = new Map<string, WebSocketLike>();\n\n constructor(\n private uploadistaApi: UploadistaApi,\n private logger: Logger,\n private onEvent?: UploadistaWebSocketEventHandler,\n ) {}\n\n /**\n * Open a WebSocket connection for upload events\n */\n async openUploadWebSocket(uploadId: string): Promise<WebSocketLike> {\n // Close existing connection if any\n this.closeUploadWebSocket(uploadId);\n\n const ws = await this.uploadistaApi.openUploadWebSocket(uploadId);\n this.uploadWebsockets.set(uploadId, ws);\n\n ws.onmessage = (event) => {\n try {\n const parsedEvent = webSocketMessageSchema.safeParse(\n JSON.parse(event.data),\n );\n\n if (parsedEvent.success) {\n if (parsedEvent.data.type === \"upload_event\") {\n this.onEvent?.(parsedEvent.data.payload);\n }\n } else {\n this.logger.error(\n `Error parsing upload event: ${parsedEvent.error.message}`,\n );\n }\n } catch (error) {\n this.logger.error(`Error parsing upload event: ${error}`);\n }\n };\n\n ws.onerror = (error) => {\n this.logger.error(`Upload WebSocket error for ${uploadId}: ${error}`);\n };\n\n ws.onclose = (event) => {\n this.logger.log(\n `Upload WebSocket closed for ${uploadId}, \\n code: ${event.code as number}, reason: ${event.reason as string}`,\n );\n this.uploadWebsockets.delete(uploadId);\n };\n\n return ws;\n }\n\n /**\n * Open a WebSocket connection for flow/job events\n */\n async openFlowWebSocket(jobId: string): Promise<WebSocketLike> {\n // Close existing connection if any\n this.closeFlowWebSocket(jobId);\n\n const ws = await this.uploadistaApi.openFlowWebSocket(jobId);\n this.flowWebsockets.set(jobId, ws);\n\n ws.onmessage = (event) => {\n try {\n const message = JSON.parse(event.data) as UploadistaWebSocketMessage;\n\n switch (message.type) {\n case \"connection\":\n this.logger.log(`Flow WebSocket connected for job: ${message.id}`);\n break;\n case \"subscribed\":\n this.logger.log(\n `Flow WebSocket subscribed for job: ${message.payload.jobId}`,\n );\n break;\n case \"error\":\n this.logger.error(\n `Flow WebSocket error: ${message.message} for job ${jobId} with code ${message.code}`,\n );\n break;\n case \"pong\":\n this.logger.log(`Flow WebSocket pong received for job: ${jobId}`);\n break;\n case \"flow_event\":\n this.onEvent?.(message.payload);\n break;\n default:\n this.logger.warn(\n `Unknown flow WebSocket message type: ${message.type}`,\n );\n }\n } catch (error) {\n this.logger.error(`Error parsing flow WebSocket message:${error}`);\n }\n };\n\n ws.onerror = (error) => {\n this.logger.error(`Flow WebSocket error for job ${jobId}: ${error}`);\n };\n\n ws.onclose = (event) => {\n this.logger.log(\n `Flow WebSocket closed for job ${jobId}, \\n code: ${event.code as number}, reason: ${event.reason as string}`,\n );\n this.flowWebsockets.delete(jobId);\n };\n\n return ws;\n }\n\n /**\n * Open a unified WebSocket connection - automatically determines if it's for upload or flow\n * based on the ID format (upload IDs typically start with 'upload-', job IDs start with 'job-')\n */\n async openWebSocket(id: string): Promise<WebSocketLike> {\n // Heuristic: if ID starts with 'upload-' or contains upload-related patterns, treat as upload\n // Otherwise, treat as flow/job\n if (id.startsWith(\"upload-\") || id.includes(\"upload\")) {\n return await this.openUploadWebSocket(id);\n }\n return await this.openFlowWebSocket(id);\n }\n\n /**\n * Close upload WebSocket connection\n */\n closeUploadWebSocket(uploadId: string): void {\n const ws = this.uploadWebsockets.get(uploadId);\n if (ws) {\n this.uploadistaApi.closeWebSocket(ws);\n this.uploadWebsockets.delete(uploadId);\n }\n }\n\n /**\n * Close flow WebSocket connection\n */\n closeFlowWebSocket(jobId: string): void {\n const ws = this.flowWebsockets.get(jobId);\n if (ws) {\n this.uploadistaApi.closeWebSocket(ws);\n this.flowWebsockets.delete(jobId);\n }\n }\n\n /**\n * Close WebSocket connection by ID (auto-detects type)\n */\n closeWebSocket(id: string): void {\n // Try both maps\n this.closeUploadWebSocket(id);\n this.closeFlowWebSocket(id);\n }\n\n /**\n * Close all WebSocket connections (both upload and flow)\n */\n closeAll(): void {\n // Close all upload websockets\n for (const [uploadId, ws] of this.uploadWebsockets.entries()) {\n this.uploadistaApi.closeWebSocket(ws);\n this.uploadWebsockets.delete(uploadId);\n }\n\n // Close all flow websockets\n for (const [jobId, ws] of this.flowWebsockets.entries()) {\n this.uploadistaApi.closeWebSocket(ws);\n this.flowWebsockets.delete(jobId);\n }\n }\n\n /**\n * Send ping to flow WebSocket\n */\n sendPing(jobId: string): boolean {\n const ws = this.flowWebsockets.get(jobId);\n if (ws && ws.readyState === ws.OPEN) {\n ws.send(\n JSON.stringify({\n type: \"ping\",\n timestamp: new Date().toISOString(),\n }),\n );\n return true;\n }\n return false;\n }\n\n /**\n * Get upload WebSocket by ID\n */\n getUploadWebSocket(uploadId: string): WebSocketLike | undefined {\n return this.uploadWebsockets.get(uploadId);\n }\n\n /**\n * Get flow WebSocket by ID\n */\n getFlowWebSocket(jobId: string): WebSocketLike | undefined {\n return this.flowWebsockets.get(jobId);\n }\n\n /**\n * Check if upload WebSocket is connected\n */\n isUploadConnected(uploadId: string): boolean {\n const ws = this.uploadWebsockets.get(uploadId);\n return ws?.readyState === ws?.OPEN;\n }\n\n /**\n * Check if flow WebSocket is connected\n */\n isFlowConnected(jobId: string): boolean {\n const ws = this.flowWebsockets.get(jobId);\n return ws?.readyState === ws?.OPEN;\n }\n\n /**\n * Check if WebSocket is connected (auto-detects type)\n */\n isConnected(id: string): boolean {\n return this.isUploadConnected(id) || this.isFlowConnected(id);\n }\n\n /**\n * Get total number of active WebSocket connections\n */\n getConnectionCount(): number {\n return this.uploadWebsockets.size + this.flowWebsockets.size;\n }\n\n /**\n * Get connection counts by type\n */\n getConnectionCountByType(): {\n upload: number;\n flow: number;\n total: number;\n } {\n return {\n upload: this.uploadWebsockets.size,\n flow: this.flowWebsockets.size,\n total: this.uploadWebsockets.size + this.flowWebsockets.size,\n };\n }\n}\n","import type { FlowJob } from \"@uploadista/core/flow\";\nimport type { DataStoreCapabilities } from \"@uploadista/core/types\";\nimport type { AuthConfig, AuthManager } from \"../auth\";\nimport {\n DirectAuthManager,\n NoAuthManager,\n UploadistaCloudAuthManager,\n} from \"../auth\";\nimport type { Logger } from \"../logger\";\nimport { createLogger } from \"../logger\";\nimport { defaultClientCapabilities } from \"../mock-data-store\";\nimport { NetworkMonitor, type NetworkMonitorConfig } from \"../network-monitor\";\nimport type { AbortControllerFactory } from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileReaderService } from \"../services/file-reader-service\";\nimport type { FingerprintService } from \"../services/fingerprint-service\";\nimport type { ConnectionPoolConfig, HttpClient } from \"../services/http-client\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type {\n WebSocketFactory,\n WebSocketLike,\n} from \"../services/websocket-service\";\nimport { SmartChunker, type SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport type { FlowUploadConfig } from \"../types/flow-upload-config\";\nimport { detectInputType } from \"../utils/input-detection\";\n\nimport { performFlowUpload, startFlowUpload } from \"../upload/flow-upload\";\nimport {\n finalizeFlowInput,\n initializeFlowInput,\n uploadInputChunks,\n} from \"../upload/flow-upload-orchestrator\";\nimport { startParallelUpload } from \"../upload/parallel-upload\";\nimport {\n type Callbacks,\n performUpload,\n startSingleUpload,\n} from \"../upload/single-upload\";\nimport { abort, terminate } from \"../upload/upload-manager\";\nimport {\n UploadMetrics,\n type UploadMetricsConfig,\n} from \"../upload/upload-metrics\";\nimport {\n findPreviousUploads,\n resumeFromPreviousUpload,\n} from \"../upload/upload-storage\";\nimport {\n negotiateUploadStrategy,\n type UploadStrategyConfig,\n validateAndThrow,\n validateConfiguration,\n} from \"../upload/upload-strategy\";\nimport { calculateFileSize } from \"../upload/upload-utils\";\nimport { createUploadistaApi } from \"./uploadista-api\";\nimport {\n type UploadistaWebSocketEventHandler,\n UploadistaWebSocketManager,\n} from \"./uploadista-websocket-manager\";\n\n/**\n * Options for individual upload operations.\n *\n * Extends the base upload callbacks with configuration for deferred length,\n * size overrides, metadata, and checksum computation.\n */\nexport type UploadistaUploadOptions = {\n /**\n * Whether to defer specifying the upload size until later.\n * Useful for streaming uploads where size isn't known upfront.\n * Defaults to false.\n */\n uploadLengthDeferred?: boolean;\n\n /**\n * Manual override for upload size in bytes.\n * If not provided, size is determined from the file/blob.\n */\n uploadSize?: number;\n\n /**\n * Custom metadata to attach to the upload.\n * Stored as key-value pairs on the server.\n */\n metadata?: Record<string, string>;\n\n /**\n * Whether to compute checksums for uploaded chunks.\n * Enables integrity verification but adds computational overhead.\n * Defaults to false.\n */\n computeChecksum?: boolean;\n\n /**\n * Checksum algorithm to use (e.g., \"sha256\", \"md5\").\n * Only relevant if computeChecksum is true.\n */\n checksumAlgorithm?: string;\n} & Callbacks;\n\n/**\n * Configuration options for creating an Uploadista client.\n *\n * This comprehensive configuration object allows customization of all aspects\n * of upload behavior including chunking, retries, authentication, storage,\n * network monitoring, and platform-specific services.\n *\n * @template UploadInput - The platform-specific file/blob type (e.g., File, Blob, Buffer)\n */\nexport type UploadistaClientOptions<UploadInput> = {\n /** Base URL of the Uploadista server (e.g., \"https://upload.example.com\") */\n baseUrl: string;\n\n /** Base path for Uploadista endpoints. Defaults to \"uploadista\" */\n uploadistaBasePath?: string;\n\n /** Storage backend identifier configured on the server */\n storageId: string;\n\n /** Retry delay intervals in milliseconds. Defaults to [1000, 3000, 5000] */\n retryDelays?: number[];\n\n /** Default chunk size in bytes for uploads */\n chunkSize: number;\n\n /** Number of parallel upload streams. Defaults to 1 (sequential) */\n parallelUploads?: number;\n\n /** Chunk size for parallel uploads. Required if parallelUploads > 1 */\n parallelChunkSize?: number;\n\n /** Service for computing checksums of uploaded chunks */\n checksumService: ChecksumService;\n\n /** Strategy configuration for determining upload approach (single/parallel/chunked) */\n uploadStrategy?: UploadStrategyConfig;\n\n /** Smart chunking configuration for adaptive chunk sizes based on network conditions */\n smartChunking?: SmartChunkerConfig;\n\n /** Network monitoring configuration for tracking upload performance */\n networkMonitoring?: NetworkMonitorConfig;\n\n /** Upload metrics configuration for performance insights */\n uploadMetrics?: UploadMetricsConfig;\n\n /** HTTP client with connection pooling support */\n httpClient: HttpClient;\n\n /** Service for generating unique IDs */\n generateId: IdGenerationService;\n\n /** Client-side storage for upload resumption data */\n clientStorage: ClientStorage;\n\n /** Platform-specific file reading service */\n fileReader: FileReaderService<UploadInput>;\n\n /** Logger for debugging and monitoring */\n logger: Logger;\n\n /** Service for computing file fingerprints for resumption */\n fingerprintService: FingerprintService<UploadInput>;\n\n /** Whether to store fingerprints for upload resumption. Defaults to true */\n storeFingerprintForResuming: boolean;\n\n /** Factory for creating WebSocket connections */\n webSocketFactory: WebSocketFactory;\n\n /** Factory for creating abort controllers */\n abortControllerFactory: AbortControllerFactory;\n\n /** Platform-specific service for timers and async operations */\n platformService: PlatformService;\n\n /** Global error handler for all upload operations */\n onError?: (error: Error) => void;\n\n /** WebSocket event handler for real-time upload/flow events */\n onEvent?: UploadistaWebSocketEventHandler;\n\n /**\n * Optional authentication configuration.\n * Supports two modes:\n * - Direct: Bring your own auth (headers, cookies, custom tokens)\n * - UploadistaCloud: Standard JWT token exchange with auth server\n *\n * If omitted, client operates in no-auth mode (backward compatible).\n *\n * @example Direct mode with Bearer token\n * ```typescript\n * auth: {\n * mode: 'direct',\n * getCredentials: () => ({\n * headers: { 'Authorization': 'Bearer token123' }\n * })\n * }\n * ```\n *\n * @example UploadistaCloud mode with auth server\n * ```typescript\n * auth: {\n * mode: 'uploadista-cloud',\n * authServerUrl: 'https://auth.myapp.com/token',\n * getCredentials: () => ({ username: 'user', password: 'pass' })\n * }\n * ```\n */\n auth?: AuthConfig;\n};\n\n/**\n * Default connection pooling configuration with health monitoring.\n *\n * Optimized for typical upload scenarios with support for HTTP/2 multiplexing,\n * connection reuse, and automatic retry on connection errors.\n */\nexport const defaultConnectionPoolingConfig: ConnectionPoolConfig = {\n /** Maximum concurrent connections per host */\n maxConnectionsPerHost: 8,\n /** Timeout for establishing new connections in milliseconds */\n connectionTimeout: 20000,\n /** Keep-alive timeout for idle connections in milliseconds */\n keepAliveTimeout: 90000,\n /** Enable HTTP/2 for connection multiplexing */\n enableHttp2: true,\n /** Automatically retry requests on connection errors */\n retryOnConnectionError: true,\n};\n\n/**\n * Creates a unified Uploadista client for file uploads and flow processing.\n *\n * This is the primary factory function for creating an Uploadista client instance.\n * It configures all upload capabilities including:\n * - Resumable chunked uploads with automatic retry\n * - Parallel upload streams for large files\n * - Smart chunking based on network conditions\n * - Flow-based file processing pipelines\n * - WebSocket support for real-time progress\n * - Authentication (direct, uploadista-cloud, or no-auth modes)\n *\n * The client automatically:\n * - Fetches server capabilities and adapts upload strategy\n * - Monitors network performance for optimal chunking\n * - Stores upload state for resumption across sessions\n * - Manages WebSocket connections for progress tracking\n *\n * @template UploadInput - Platform-specific file type (File, Blob, Buffer, etc.)\n * @param options - Comprehensive client configuration\n * @returns Uploadista client instance with upload and flow methods\n *\n * @example Basic browser setup\n * ```typescript\n * import { createUploadistaClient } from '@uploadista/client-core';\n * import { browserServices } from '@uploadista/client-browser';\n *\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'my-storage',\n * chunkSize: 5 * 1024 * 1024, // 5MB chunks\n * ...browserServices,\n * });\n *\n * // Upload a file\n * const { abort } = await client.upload(file, {\n * onProgress: (progress) => console.log(`${progress}% complete`),\n * onSuccess: (result) => console.log('Upload complete:', result),\n * });\n * ```\n *\n * @example Upload with flow processing\n * ```typescript\n * const client = createUploadistaClient(config);\n *\n * // Upload and process through a flow\n * const { abort, jobId } = await client.uploadWithFlow(file, {\n * flowId: 'image-optimization-flow',\n * storageId: 'images',\n * outputNodeId: 'optimized-output',\n * }, {\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Processed:', result),\n * });\n *\n * // Monitor job status\n * const status = await client.getJobStatus(jobId);\n * ```\n *\n * @example Parallel uploads for large files\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'large-files',\n * chunkSize: 10 * 1024 * 1024, // 10MB\n * parallelUploads: 4, // 4 concurrent streams\n * parallelChunkSize: 5 * 1024 * 1024, // 5MB per stream\n * ...browserServices,\n * });\n *\n * await client.upload(largeFile);\n * ```\n *\n * @example With authentication\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'protected',\n * chunkSize: 5 * 1024 * 1024,\n * auth: {\n * mode: 'direct',\n * getCredentials: async () => ({\n * headers: {\n * 'Authorization': `Bearer ${await getToken()}`,\n * },\n * }),\n * },\n * ...browserServices,\n * });\n * ```\n *\n * @example Smart chunking with network monitoring\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'adaptive',\n * chunkSize: 1 * 1024 * 1024, // Fallback: 1MB\n * smartChunking: {\n * enabled: true,\n * minChunkSize: 256 * 1024, // 256KB min\n * maxChunkSize: 10 * 1024 * 1024, // 10MB max\n * },\n * networkMonitoring: {\n * slowThreshold: 50 * 1024, // 50 KB/s\n * fastThreshold: 5 * 1024 * 1024, // 5 MB/s\n * },\n * ...browserServices,\n * });\n *\n * // Monitor network conditions\n * const condition = client.getNetworkCondition();\n * console.log(`Network: ${condition.type} (confidence: ${condition.confidence})`);\n * ```\n *\n * @see {@link UploadistaClientOptions} for full configuration options\n * @see {@link UploadistaUploadOptions} for per-upload options\n */\nexport function createUploadistaClient<UploadInput>({\n baseUrl: _baseUrl,\n uploadistaBasePath = \"uploadista\",\n storageId,\n retryDelays = [1000, 3000, 5000],\n chunkSize,\n parallelUploads = 1,\n parallelChunkSize,\n uploadStrategy,\n smartChunking,\n networkMonitoring,\n uploadMetrics,\n checksumService,\n onEvent,\n generateId,\n httpClient,\n logger = createLogger(true),\n fileReader,\n fingerprintService,\n clientStorage,\n storeFingerprintForResuming = true,\n webSocketFactory,\n abortControllerFactory,\n platformService,\n auth,\n}: UploadistaClientOptions<UploadInput>) {\n const baseUrl = _baseUrl.replace(/\\/$/, \"\");\n\n // Create auth manager based on configuration\n const authManager: AuthManager = auth\n ? auth.mode === \"direct\"\n ? new DirectAuthManager(auth, platformService, logger)\n : new UploadistaCloudAuthManager(auth, httpClient)\n : new NoAuthManager();\n\n // Log auth mode for debugging (without exposing credentials)\n if (auth) {\n logger.log(\n `Authentication enabled in ${auth.mode} mode${auth.mode === \"uploadista-cloud\" ? ` (server: ${auth.authServerUrl})` : \"\"}`,\n );\n }\n\n // Create the unified API with auth support\n const uploadistaApi = createUploadistaApi(baseUrl, uploadistaBasePath, {\n logger,\n httpClient,\n authManager,\n webSocketFactory,\n });\n\n // Initialize smart chunking components\n const networkMonitor = new NetworkMonitor(networkMonitoring);\n const metrics = new UploadMetrics(uploadMetrics);\n\n // Cache for server capabilities\n let cachedCapabilities: DataStoreCapabilities | null = null;\n\n const getCapabilities = async (): Promise<DataStoreCapabilities> => {\n if (cachedCapabilities) {\n return cachedCapabilities;\n }\n cachedCapabilities = await uploadistaApi.getCapabilities(storageId);\n return cachedCapabilities;\n };\n\n // Initialize smart chunker with datastore constraints from server capabilities\n let smartChunker: SmartChunker;\n const initializeSmartChunker = async () => {\n if (smartChunker) return smartChunker;\n\n const capabilities = await getCapabilities();\n\n const datastoreConstraints =\n capabilities.minChunkSize &&\n capabilities.maxChunkSize &&\n capabilities.optimalChunkSize\n ? {\n minChunkSize: capabilities.minChunkSize,\n maxChunkSize: capabilities.maxChunkSize,\n optimalChunkSize: capabilities.optimalChunkSize,\n requiresOrderedChunks: capabilities.requiresOrderedChunks,\n }\n : undefined;\n\n smartChunker = new SmartChunker(networkMonitor, {\n enabled: true,\n ...smartChunking,\n fallbackChunkSize: chunkSize,\n datastoreConstraints,\n });\n\n logger.log(\n `Smart chunker initialized with datastore constraints: ${JSON.stringify(datastoreConstraints)}`,\n );\n\n return smartChunker;\n };\n\n // WebSocket management (uses uploadistaApi for both upload and flow websockets)\n const wsManager = new UploadistaWebSocketManager(\n uploadistaApi,\n logger,\n onEvent,\n );\n\n /**\n * Upload a file\n */\n const upload = async (\n file: UploadInput,\n {\n uploadLengthDeferred = false,\n uploadSize,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onError,\n }: UploadistaUploadOptions = {},\n ): Promise<{ abort: () => void }> => {\n let uploadId: string | null = null;\n let uploadIdStorageKey: string | null = null;\n\n const fingerprint = await fingerprintService.computeFingerprint(\n file,\n `${baseUrl}/${uploadistaBasePath}/api/upload`,\n );\n\n logger.log(`fingerprint: ${fingerprint}`);\n if (!fingerprint) {\n throw new Error(\"unable calculate fingerprint for this input file\");\n }\n\n const previousUploads = await findPreviousUploads(\n clientStorage,\n fingerprint,\n );\n if (previousUploads.length > 0 && previousUploads[0] != null) {\n const previousUpload = resumeFromPreviousUpload(previousUploads[0]);\n uploadIdStorageKey = previousUpload.clientStorageKey;\n uploadId = previousUpload.uploadId;\n }\n\n const source = await fileReader.openFile(file, chunkSize);\n\n const size = calculateFileSize(source.size, {\n uploadLengthDeferred,\n uploadSize,\n });\n source.size = size;\n\n const initializedSmartChunker = await initializeSmartChunker();\n\n const isSmartChunkingEnabled = smartChunking?.enabled !== false;\n if (isSmartChunkingEnabled) {\n metrics.startSession(fingerprint, size || 0, true);\n }\n\n const capabilities = await getCapabilities();\n\n const negotiatedStrategy = negotiateUploadStrategy({\n capabilities,\n fileSize: size,\n chunkSize,\n parallelUploads,\n uploadLengthDeferred,\n strategyConfig: uploadStrategy,\n logger,\n });\n\n if (negotiatedStrategy.strategy === \"parallel\") {\n logger.log(\n `Using parallel upload with ${negotiatedStrategy.parallelUploads} streams`,\n );\n\n const parallelResult = await startParallelUpload({\n checksumService,\n source,\n storageId,\n fingerprint,\n uploadLengthDeferred,\n parallelUploads: negotiatedStrategy.parallelUploads,\n parallelChunkSize,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n abortControllerFactory,\n platformService,\n openWebSocket: (id) => {\n wsManager.openUploadWebSocket(id);\n // Note: WebSocket opening is now async due to auth, but this callback is sync\n // The WebSocket will be opened in the background\n return null as unknown as WebSocketLike;\n },\n closeWebSocket: (id) => wsManager.closeUploadWebSocket(id),\n terminate: (id) =>\n terminate(id, uploadistaApi, platformService, retryDelays),\n onProgress,\n onChunkComplete,\n onSuccess,\n onError,\n });\n\n if (parallelResult) {\n return {\n abort: async () => {\n await parallelResult.abort();\n },\n };\n }\n\n logger.log(\"Parallel upload failed, falling back to single upload\");\n }\n\n // Single upload path\n const result = await startSingleUpload({\n source,\n storageId,\n uploadId,\n platformService,\n uploadIdStorageKey,\n checksumService,\n fingerprint,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket: (id) => {\n wsManager.openUploadWebSocket(id);\n // Note: WebSocket opening is now async due to auth, but this callback is sync\n // The WebSocket will be opened in the background\n return null as unknown as WebSocketLike;\n },\n closeWebSocket: (id) => wsManager.closeUploadWebSocket(id),\n onProgress,\n onChunkComplete,\n onSuccess,\n onError,\n });\n\n if (result) {\n const abortController = abortControllerFactory.create();\n const { uploadId, uploadIdStorageKey, offset } = result;\n\n let timeoutId: Timeout | null = null;\n\n performUpload({\n platformService,\n uploadId,\n offset,\n source,\n uploadLengthDeferred,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n abortController,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onRetry: (timeout) => {\n timeoutId = timeout;\n },\n onError,\n });\n\n return {\n abort: () => {\n abort({\n platformService,\n uploadId,\n uploadIdStorageKey,\n retryTimeout: timeoutId,\n shouldTerminate: true,\n abortController,\n uploadistaApi,\n retryDelays,\n clientStorage,\n });\n },\n };\n }\n\n return {\n abort: () => {},\n };\n };\n\n // Run validation on client creation\n validateAndThrow(\n {\n baseUrl,\n storageId,\n chunkSize,\n parallelUploads,\n parallelChunkSize,\n uploadStrategy,\n },\n logger,\n );\n\n /**\n * Upload a file through a flow (using streaming-input-node)\n */\n const uploadWithFlow = async (\n file: UploadInput,\n flowConfig: FlowUploadConfig,\n {\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onJobStart,\n onError,\n }: Omit<\n UploadistaUploadOptions,\n \"uploadLengthDeferred\" | \"uploadSize\" | \"metadata\"\n > = {},\n ): Promise<{\n abort: () => Promise<void>;\n pause: () => Promise<void>;\n jobId: string;\n }> => {\n const source = await fileReader.openFile(file, chunkSize);\n\n const initializedSmartChunker = await initializeSmartChunker();\n\n const isSmartChunkingEnabled = smartChunking?.enabled !== false;\n if (isSmartChunkingEnabled) {\n const fingerprint = await fingerprintService.computeFingerprint(\n file,\n `${baseUrl}/${uploadistaBasePath}/api/flow`,\n );\n metrics.startSession(fingerprint || \"unknown\", source.size || 0, true);\n }\n\n const result = await startFlowUpload({\n source,\n flowConfig,\n uploadistaApi,\n logger,\n platformService,\n openWebSocket: (id) => wsManager.openFlowWebSocket(id),\n closeWebSocket: (id) => wsManager.closeWebSocket(id),\n onProgress,\n onChunkComplete,\n onSuccess,\n onJobStart,\n onError,\n });\n\n if (!result) {\n return {\n abort: async () => {},\n pause: async () => {},\n jobId: \"\",\n };\n }\n\n const { jobId, uploadFile, inputNodeId } = result;\n const abortController = abortControllerFactory.create();\n\n // Open upload WebSocket to receive upload progress events\n await wsManager.openUploadWebSocket(uploadFile.id);\n\n let timeoutId: Timeout | null = null;\n\n performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset: uploadFile.offset,\n source,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n abortController,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onRetry: (timeout) => {\n timeoutId = timeout;\n },\n onError,\n });\n\n return {\n abort: async () => {\n // First, tell the server to cancel the flow\n try {\n await uploadistaApi.cancelFlow(jobId);\n logger.log(`Flow cancelled on server: ${jobId}`);\n } catch (err) {\n // Log but don't throw - client cleanup should still happen\n logger.log(`Failed to cancel flow on server: ${err}`);\n }\n\n // Then do client-side cleanup\n abortController.abort();\n if (timeoutId) {\n platformService.clearTimeout(timeoutId);\n }\n // Close both flow and upload WebSockets\n wsManager.closeWebSocket(jobId);\n wsManager.closeUploadWebSocket(uploadFile.id);\n },\n pause: async () => {\n await uploadistaApi.pauseFlow(jobId);\n },\n jobId,\n };\n };\n\n /**\n * Upload multiple inputs through a flow with parallel coordination.\n * Supports mixed input types: File/Blob (upload), URL strings (fetch), structured data.\n *\n * @param inputs - Record of nodeId to input data (File, URL string, or object)\n * @param flowConfig - Flow configuration\n * @param callbacks - Upload lifecycle callbacks\n * @returns Abort controller and job ID\n */\n const multiInputFlowUpload = async (\n inputs: Record<string, unknown>,\n flowConfig: FlowUploadConfig,\n {\n onProgress,\n onChunkComplete,\n onShouldRetry,\n onJobStart,\n onError,\n onInputProgress,\n onInputComplete,\n onInputError,\n }: Omit<\n UploadistaUploadOptions,\n \"uploadLengthDeferred\" | \"uploadSize\" | \"metadata\"\n > & {\n onInputProgress?: (\n nodeId: string,\n progress: number,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n onInputComplete?: (nodeId: string) => void;\n onInputError?: (nodeId: string, error: Error) => void;\n } = {},\n ): Promise<{\n abort: () => Promise<void>;\n pause: () => Promise<void>;\n jobId: string;\n }> => {\n // Start the flow and get job ID\n const { job } = await uploadistaApi.runFlow(\n flowConfig.flowId,\n flowConfig.storageId || storageId,\n {},\n );\n const jobId = job.id;\n\n logger.log(`Multi-input flow started: ${jobId}`);\n onJobStart?.(jobId);\n\n // Open flow WebSocket for flow events\n await wsManager.openFlowWebSocket(jobId);\n\n const abortControllers: Map<string, ReturnType<typeof abortControllerFactory.create>> = new Map();\n const uploadIds: Map<string, string> = new Map();\n const timeoutIds: Timeout[] = [];\n\n try {\n // Initialize all inputs in parallel\n const inputEntries = Object.entries(inputs);\n const initPromises = inputEntries.map(async ([nodeId, data]) => {\n const inputType = detectInputType(data);\n\n if (inputType === \"file\") {\n // File/Blob upload\n const file = data as UploadInput;\n const source = await fileReader.openFile(file, chunkSize);\n\n const initResult = await initializeFlowInput({\n nodeId,\n jobId,\n source,\n storageId: flowConfig.storageId || storageId,\n metadata: {},\n uploadistaApi,\n logger,\n platformService,\n callbacks: {\n onStart: ({ uploadId }) => {\n uploadIds.set(nodeId, uploadId);\n // Open WebSocket for this upload\n wsManager.openUploadWebSocket(uploadId);\n },\n onError,\n },\n });\n\n return { nodeId, uploadFile: initResult.uploadFile, source, inputType };\n } else if (inputType === \"url\") {\n // URL input - send to server immediately\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"url\",\n url: data as string,\n storageId: flowConfig.storageId || storageId,\n },\n { contentType: \"application/json\" },\n );\n\n return { nodeId, uploadFile: null, source: null, inputType };\n } else {\n // Structured data input\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n data,\n { contentType: \"application/json\" },\n );\n\n return { nodeId, uploadFile: null, source: null, inputType };\n }\n });\n\n const initializedInputs = await Promise.all(initPromises);\n\n // Upload all file inputs in parallel\n const initializedSmartChunker = await initializeSmartChunker();\n const uploadPromises = initializedInputs\n .filter((input) => input.inputType === \"file\" && input.uploadFile && input.source)\n .map(async ({ nodeId, uploadFile, source }) => {\n const abortController = abortControllerFactory.create();\n abortControllers.set(nodeId, abortController);\n\n const metrics = new UploadMetrics({\n enableDetailedMetrics: smartChunking?.enabled !== false,\n });\n\n if (!uploadFile || !source) {\n throw new Error(`Missing uploadFile or source for node ${nodeId}`);\n }\n\n try {\n await uploadInputChunks({\n nodeId,\n jobId,\n uploadFile,\n source,\n offset: uploadFile.offset,\n retryAttempt: 0,\n abortController,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry: (timeout) => {\n timeoutIds.push(timeout);\n },\n callbacks: {\n onProgress: (uploadId, bytesUploaded, totalBytes) => {\n onProgress?.(uploadId, bytesUploaded, totalBytes);\n\n // Calculate progress percentage\n const progress = totalBytes ? Math.round((bytesUploaded / totalBytes) * 100) : 0;\n onInputProgress?.(nodeId, progress, bytesUploaded, totalBytes);\n },\n onChunkComplete,\n onShouldRetry,\n },\n });\n\n // Finalize this input\n await finalizeFlowInput({\n nodeId,\n jobId,\n uploadId: uploadFile.id,\n uploadistaApi,\n logger,\n callbacks: { onError },\n });\n\n onInputComplete?.(nodeId);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onInputError?.(nodeId, error);\n throw error;\n }\n });\n\n await Promise.all(uploadPromises);\n\n logger.log(`All inputs uploaded for job: ${jobId}`);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.log(`Multi-input flow upload failed: ${error.message}`);\n onError?.(error);\n throw error;\n }\n\n return {\n abort: async () => {\n try {\n await uploadistaApi.cancelFlow(jobId);\n logger.log(`Flow cancelled on server: ${jobId}`);\n } catch (err) {\n logger.log(`Failed to cancel flow on server: ${err}`);\n }\n\n // Abort all uploads\n for (const controller of abortControllers.values()) {\n controller.abort();\n }\n\n // Clear all timeouts\n for (const timeoutId of timeoutIds) {\n platformService.clearTimeout(timeoutId);\n }\n\n // Close all WebSockets\n wsManager.closeWebSocket(jobId);\n for (const uploadId of uploadIds.values()) {\n wsManager.closeUploadWebSocket(uploadId);\n }\n },\n pause: async () => {\n await uploadistaApi.pauseFlow(jobId);\n },\n jobId,\n };\n };\n\n return {\n // Upload operations\n upload,\n uploadWithFlow,\n multiInputFlowUpload,\n abort: (params: Parameters<typeof abort>[0]) => abort(params),\n\n // Flow operations\n getFlow: async (flowId: string) => {\n const { status, flow } = await uploadistaApi.getFlow(flowId);\n return { status, flow };\n },\n\n runFlow: async ({\n flowId,\n inputs,\n storageId: flowStorageId,\n }: {\n flowId: string;\n inputs: Record<string, unknown>;\n storageId?: string;\n }) => {\n const { status, job } = await uploadistaApi.runFlow(\n flowId,\n flowStorageId || storageId,\n inputs,\n );\n return { status, job };\n },\n\n resumeFlow: async ({\n jobId,\n nodeId,\n newData,\n contentType,\n }: {\n jobId: string;\n nodeId: string;\n newData: unknown;\n contentType?: \"application/json\" | \"application/octet-stream\";\n }) => {\n return uploadistaApi.resumeFlow(jobId, nodeId, newData, {\n contentType,\n });\n },\n\n pauseFlow: async (jobId: string) => {\n return uploadistaApi.pauseFlow(jobId);\n },\n\n cancelFlow: async (jobId: string) => {\n return uploadistaApi.cancelFlow(jobId);\n },\n\n /**\n * Find input nodes in a flow.\n *\n * Discovers all input nodes in a flow and returns their metadata.\n * Useful for auto-discovering input node IDs in single-input flows.\n *\n * @param flowId - The flow ID to inspect\n * @returns Discovery result with input node information\n *\n * @example\n * ```typescript\n * const { inputNodes, single } = await client.findInputNode(\"my-flow\");\n *\n * if (single) {\n * // Flow has exactly one input node, can auto-map input data\n * const inputNodeId = inputNodes[0].id;\n * } else {\n * // Multi-input flow, requires explicit node IDs\n * console.log(\"Input nodes:\", inputNodes.map(n => n.id));\n * }\n * ```\n */\n findInputNode: async (flowId: string) => {\n const { flow } = await uploadistaApi.getFlow(flowId);\n const inputNodes = flow.nodes\n .filter((node) => node.type === \"input\")\n .map((node) => ({\n id: node.id,\n type: node.type,\n name: node.name,\n }));\n\n return {\n inputNodes,\n single: inputNodes.length === 1,\n };\n },\n\n /**\n * Execute a flow with arbitrary inputs (URL, structured data, etc.).\n *\n * This method supports flexible flow execution beyond traditional file uploads.\n * It directly executes flows with provided inputs, bypassing chunked upload for\n * non-file operations like URL fetching or structured data processing.\n *\n * @param flowId - The flow ID to execute\n * @param inputs - Map of node IDs to their input data\n * @param options - Optional execution options\n * @returns Job status and initial result\n *\n * @example\n * ```typescript\n * // URL-based flow execution\n * const { job } = await client.executeFlowWithInputs(\"optimize-flow\", {\n * \"input-node\": {\n * operation: \"url\",\n * url: \"https://example.com/image.jpg\",\n * storageId: \"s3\"\n * }\n * });\n *\n * // Listen for flow events\n * client.openFlowWebSocket(job.id);\n * client.subscribeToEvents((event) => {\n * if (event.eventType === EventType.FlowEnd) {\n * console.log(\"Flow complete:\", event.outputs);\n * }\n * });\n * ```\n */\n executeFlowWithInputs: async (\n flowId: string,\n inputs: Record<string, unknown>,\n options?: {\n storageId?: string;\n onJobStart?: (jobId: string) => void;\n },\n ) => {\n // Execute flow with provided inputs\n const { status, job } = await uploadistaApi.runFlow(\n flowId,\n options?.storageId || storageId,\n inputs,\n );\n\n // Notify callback if job started successfully\n if (job?.id && options?.onJobStart) {\n options.onJobStart(job.id);\n }\n\n return { status, job };\n },\n\n // Job operations (unified for both uploads and flows)\n getJobStatus: async (jobId: string) => {\n return uploadistaApi.getJobStatus(jobId);\n },\n\n // WebSocket management methods\n openUploadWebSocket: (uploadId: string) =>\n wsManager.openUploadWebSocket(uploadId),\n openFlowWebSocket: (jobId: string) => wsManager.openFlowWebSocket(jobId),\n openWebSocket: (id: string) => wsManager.openWebSocket(id),\n closeWebSocket: (id: string) => wsManager.closeWebSocket(id),\n closeAllWebSockets: () => wsManager.closeAll(),\n sendPing: (jobId: string) => wsManager.sendPing(jobId),\n isWebSocketConnected: (id: string) => wsManager.isConnected(id),\n getWebSocketConnectionCount: () => wsManager.getConnectionCount(),\n getWebSocketConnectionCountByType: () =>\n wsManager.getConnectionCountByType(),\n\n // Smart chunking utilities\n getNetworkMetrics: () => networkMonitor.getCurrentMetrics(),\n getNetworkCondition: () => networkMonitor.getNetworkCondition(),\n getChunkingInsights: () => metrics.getPerformanceInsights(),\n exportMetrics: () => metrics.exportMetrics(),\n\n // Connection pooling utilities\n getConnectionMetrics: () => uploadistaApi.getConnectionMetrics(),\n getDetailedConnectionMetrics: () =>\n uploadistaApi.getDetailedConnectionMetrics(),\n warmupConnections: (urls: string[]) =>\n uploadistaApi.warmupConnections(urls),\n\n // Smart chunking insights\n getConnectionPoolingInsights: async () => {\n const chunker = await initializeSmartChunker();\n return chunker.getConnectionPoolingInsights();\n },\n\n resetMetrics: async () => {\n networkMonitor.reset();\n const chunker = await initializeSmartChunker();\n chunker.reset();\n metrics.reset();\n },\n\n // Configuration validation\n validateConfiguration: (options: UploadistaClientOptions<UploadInput>) => {\n return validateConfiguration(options, defaultClientCapabilities, logger);\n },\n\n validateConfigurationAsync: async (\n options: UploadistaClientOptions<UploadInput>,\n ) => {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n // Fetch capabilities using the authenticated HTTP client\n const capabilities = await uploadistaApi.getCapabilities(\n options.storageId,\n );\n\n const validation = validateConfiguration(options, capabilities, logger);\n errors.push(...validation.errors);\n warnings.push(...validation.warnings);\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n capabilities,\n };\n },\n\n getCapabilities,\n };\n}\n\n/**\n * Uploadista client instance type.\n *\n * The client provides methods for:\n * - **Upload operations**: upload(), uploadWithFlow()\n * - **Flow operations**: getFlow(), runFlow(), resumeFlow()\n * - **Job management**: getJobStatus()\n * - **WebSocket management**: openUploadWebSocket(), openFlowWebSocket(), closeWebSocket()\n * - **Metrics and diagnostics**: getNetworkMetrics(), getChunkingInsights(), exportMetrics()\n * - **Connection pooling**: getConnectionMetrics(), warmupConnections()\n * - **Configuration validation**: validateConfiguration(), validateConfigurationAsync()\n *\n * @example Basic usage\n * ```typescript\n * const client = createUploadistaClient(config);\n *\n * // Upload a file\n * await client.upload(file, {\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Done:', result.id),\n * });\n *\n * // Get network metrics\n * const metrics = client.getNetworkMetrics();\n * console.log(`Speed: ${metrics.averageSpeed / 1024} KB/s`);\n * ```\n *\n * @see {@link createUploadistaClient} for creating an instance\n */\nexport type UploadistaClient = ReturnType<typeof createUploadistaClient>;\n","import z from \"zod\";\n\nexport type PreviousUpload = {\n size: number | null;\n metadata: { [key: string]: string | number | boolean };\n creationTime: string;\n uploadId?: string;\n parallelUploadUrls?: string[];\n clientStorageKey: string;\n};\n\nexport const previousUploadSchema = z.object({\n size: z.number().nullable(),\n metadata: z.record(\n z.string(),\n z.union([z.string(), z.number(), z.boolean()]),\n ),\n creationTime: z.string(),\n uploadId: z.string().optional(),\n parallelUploadUrls: z.array(z.string()).optional(),\n clientStorageKey: z.string(),\n});\n","import type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { StorageService } from \"../services/storage-service\";\nimport {\n type PreviousUpload,\n previousUploadSchema,\n} from \"../types/previous-upload\";\n\n/**\n * Client-side storage interface for managing upload resumption data.\n *\n * Provides methods to store, retrieve, and manage previous upload information,\n * enabling the client to resume interrupted uploads from where they left off.\n * This is essential for implementing reliable upload resumption across sessions.\n *\n * Storage keys are namespaced with \"uploadista::\" prefix and organized by\n * file fingerprint to allow quick lookup of resumable uploads.\n *\n * @example Finding resumable uploads\n * ```typescript\n * const storage = createClientStorage(localStorage);\n *\n * // Find all previous uploads\n * const allUploads = await storage.findAllUploads();\n *\n * // Find uploads for a specific file\n * const fingerprint = await computeFingerprint(file);\n * const matches = await storage.findUploadsByFingerprint(fingerprint);\n *\n * if (matches.length > 0) {\n * // Resume from the most recent upload\n * const uploadId = matches[0].uploadId;\n * await resumeUpload(uploadId);\n * }\n * ```\n */\nexport type ClientStorage = {\n /**\n * Retrieves all stored upload records from client storage.\n *\n * Useful for debugging or displaying a list of resumable uploads to the user.\n *\n * @returns Array of all previous upload records\n */\n findAllUploads: () => Promise<PreviousUpload[]>;\n\n /**\n * Finds previous upload records matching a specific file fingerprint.\n *\n * This is the primary method for discovering resumable uploads.\n * Returns uploads sorted by most recent first.\n *\n * @param fingerprint - The file fingerprint to search for\n * @returns Array of matching upload records, or empty array if none found\n *\n * @example\n * ```typescript\n * const fingerprint = await computeFingerprint(file);\n * const previous = await storage.findUploadsByFingerprint(fingerprint);\n *\n * if (previous.length > 0) {\n * console.log(`Found ${previous.length} resumable uploads`);\n * console.log(`Last upload was ${previous[0].offset} bytes`);\n * }\n * ```\n */\n findUploadsByFingerprint: (fingerprint: string) => Promise<PreviousUpload[]>;\n\n /**\n * Removes an upload record from client storage.\n *\n * Called after an upload completes successfully or is explicitly cancelled\n * to clean up storage and prevent stale resumption attempts.\n *\n * @param clientStorageKey - The storage key returned by addUpload\n *\n * @example Cleanup after successful upload\n * ```typescript\n * await uploadFile(file);\n * await storage.removeUpload(storageKey);\n * ```\n */\n removeUpload: (clientStorageKey: string) => Promise<void>;\n\n /**\n * Stores an upload record in client storage for future resumption.\n *\n * Creates a namespaced storage key that includes the file fingerprint,\n * making it easy to find resumable uploads later.\n *\n * @param fingerprint - File fingerprint for organizing uploads\n * @param upload - Upload metadata to store (uploadId, offset, etc.)\n * @param options - Options object containing ID generation service\n * @returns The storage key that can be used to remove this upload later, or undefined if storage failed\n *\n * @example Storing upload progress\n * ```typescript\n * const fingerprint = await computeFingerprint(file);\n * const key = await storage.addUpload(\n * fingerprint,\n * { uploadId: 'abc123', offset: 1024000 },\n * { generateId: idService }\n * );\n *\n * // Later, remove when complete\n * if (key) await storage.removeUpload(key);\n * ```\n */\n addUpload: (\n fingerprint: string,\n upload: PreviousUpload,\n { generateId }: { generateId: IdGenerationService },\n ) => Promise<string | undefined>;\n};\n\n/**\n * Creates a ClientStorage instance using the provided storage service.\n *\n * This factory function wraps a platform-specific StorageService (e.g., localStorage,\n * AsyncStorage) with the ClientStorage interface, providing a consistent API\n * for upload resumption across different platforms.\n *\n * @param storageService - Platform-specific storage implementation\n * @returns ClientStorage instance for managing upload records\n *\n * @example Browser with localStorage\n * ```typescript\n * const storage = createClientStorage({\n * find: async (prefix) => {\n * const items: Record<string, string> = {};\n * for (let i = 0; i < localStorage.length; i++) {\n * const key = localStorage.key(i);\n * if (key?.startsWith(prefix)) {\n * items[key] = localStorage.getItem(key) || '';\n * }\n * }\n * return items;\n * },\n * setItem: async (key, value) => localStorage.setItem(key, value),\n * removeItem: async (key) => localStorage.removeItem(key),\n * });\n * ```\n *\n * @example React Native with AsyncStorage\n * ```typescript\n * const storage = createClientStorage({\n * find: async (prefix) => {\n * const keys = await AsyncStorage.getAllKeys();\n * const matching = keys.filter(k => k.startsWith(prefix));\n * const pairs = await AsyncStorage.multiGet(matching);\n * return Object.fromEntries(pairs);\n * },\n * setItem: async (key, value) => AsyncStorage.setItem(key, value),\n * removeItem: async (key) => AsyncStorage.removeItem(key),\n * });\n * ```\n */\nexport function createClientStorage(\n storageService: StorageService,\n): ClientStorage {\n return {\n findAllUploads: async () => {\n const items = await storageService.find(\"uploadista::\");\n return Object.values(items).map((item) =>\n previousUploadSchema.parse(JSON.parse(item)),\n );\n },\n findUploadsByFingerprint: async (fingerprint: string) => {\n const items = await storageService.find(`uploadista::${fingerprint}`);\n return Object.values(items).map((item) =>\n previousUploadSchema.parse(JSON.parse(item)),\n );\n },\n removeUpload: (clientStorageKey: string) =>\n storageService.removeItem(clientStorageKey),\n addUpload: async (\n fingerprint: string,\n upload: PreviousUpload,\n { generateId }: { generateId: IdGenerationService },\n ) => {\n const key = generateId.generate();\n const clientStorageKey = `uploadista::${fingerprint}::${key}`;\n await storageService.setItem(clientStorageKey, JSON.stringify(upload));\n return clientStorageKey;\n },\n };\n}\n","import type { StorageService } from \"../services/storage-service\";\n\n/**\n * In-memory fallback storage service for Expo\n * Used when AsyncStorage is not available or for testing\n */\nexport function createInMemoryStorageService(): StorageService {\n const storage = new Map<string, string>();\n\n return {\n async getItem(key: string): Promise<string | null> {\n return storage.get(key) ?? null;\n },\n\n async setItem(key: string, value: string): Promise<void> {\n storage.set(key, value);\n },\n\n async removeItem(key: string): Promise<void> {\n storage.delete(key);\n },\n\n async findAll(): Promise<Record<string, string>> {\n return Object.fromEntries(storage.entries());\n },\n\n async find(prefix: string): Promise<Record<string, string>> {\n return Object.fromEntries(\n Array.from(storage.entries()).filter(([key]) => key.startsWith(prefix)),\n );\n },\n };\n}\n","/**\n * Generic event type that the subscription manager can handle\n */\nexport interface GenericEvent {\n type: string;\n data?: unknown;\n}\n\n/**\n * Event handler callback function\n */\nexport type SubscriptionEventHandler<T = GenericEvent> = (event: T) => void;\n\n/**\n * Unsubscribe function returned from subscriptions\n */\nexport type UnsubscribeFunction = () => void;\n\n/**\n * Event source that provides subscription capabilities\n */\nexport interface EventSource<T = GenericEvent> {\n /**\n * Subscribe to events from this source\n * @returns Unsubscribe function to clean up the subscription\n */\n subscribe(handler: SubscriptionEventHandler<T>): UnsubscribeFunction;\n}\n\n/**\n * Options for event filtering\n */\nexport interface EventFilterOptions {\n /**\n * Filter events by type (exact match)\n */\n eventType?: string;\n\n /**\n * Filter events by upload/job ID\n * If provided, only events with matching ID will be passed to the handler\n */\n uploadId?: string | null;\n\n /**\n * Custom filter function for advanced filtering\n * Return true to pass the event to the handler\n */\n customFilter?: (event: GenericEvent) => boolean;\n}\n\n/**\n * Subscription information for tracking\n */\ninterface SubscriptionInfo<T extends GenericEvent = GenericEvent> {\n unsubscribe: UnsubscribeFunction;\n handler: SubscriptionEventHandler<T>;\n filter?: EventFilterOptions;\n}\n\n/**\n * Platform-agnostic event subscription manager that handles event filtering,\n * subscription tracking, and automatic cleanup.\n *\n * This manager simplifies event handling by:\n * - Filtering events by type and/or ID\n * - Tracking all active subscriptions\n * - Providing cleanup methods to unsubscribe from all events\n * - Supporting custom filter functions for advanced scenarios\n *\n * @example Basic event subscription\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n *\n * manager.subscribe(\n * (event) => console.log('Upload progress:', event),\n * { eventType: 'UPLOAD_PROGRESS', uploadId: 'abc123' }\n * );\n *\n * // Clean up all subscriptions when done\n * manager.cleanup();\n * ```\n *\n * @example Multiple filtered subscriptions\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n *\n * // Subscribe to progress events for specific upload\n * manager.subscribe(\n * onProgress,\n * { eventType: 'UPLOAD_PROGRESS', uploadId: currentUploadId }\n * );\n *\n * // Subscribe to error events for any upload\n * manager.subscribe(\n * onError,\n * { eventType: 'UPLOAD_ERROR' }\n * );\n *\n * // Subscribe to all events with custom filtering\n * manager.subscribe(\n * onEvent,\n * { customFilter: (e) => e.data?.priority === 'high' }\n * );\n * ```\n */\nexport class EventSubscriptionManager<T extends GenericEvent = GenericEvent> {\n private subscriptions: SubscriptionInfo<T>[] = [];\n\n /**\n * Create a new EventSubscriptionManager\n *\n * @param eventSource - Source to subscribe to for events\n */\n constructor(private readonly eventSource: EventSource<T>) {}\n\n /**\n * Subscribe to events with optional filtering\n *\n * @param handler - Callback function to invoke when matching events occur\n * @param filter - Optional filter options to narrow down which events trigger the handler\n * @returns Unsubscribe function to remove this specific subscription\n *\n * @example Subscribe to specific event type\n * ```typescript\n * const unsubscribe = manager.subscribe(\n * (event) => console.log('Progress:', event),\n * { eventType: 'UPLOAD_PROGRESS' }\n * );\n *\n * // Later, unsubscribe\n * unsubscribe();\n * ```\n */\n subscribe(\n handler: SubscriptionEventHandler<T>,\n filter?: EventFilterOptions,\n ): UnsubscribeFunction {\n // Create a wrapper handler that applies filtering\n const wrappedHandler: SubscriptionEventHandler<T> = (event: T) => {\n if (this.shouldHandleEvent(event, filter)) {\n handler(event);\n }\n };\n\n // Subscribe to the event source with the wrapped handler\n const unsubscribe = this.eventSource.subscribe(wrappedHandler);\n\n // Track this subscription\n const subscription: SubscriptionInfo<T> = {\n unsubscribe,\n handler: wrappedHandler,\n filter,\n };\n\n this.subscriptions.push(subscription);\n\n // Return unsubscribe function that also removes from tracking\n return () => {\n const index = this.subscriptions.indexOf(subscription);\n if (index !== -1) {\n this.subscriptions.splice(index, 1);\n }\n unsubscribe();\n };\n }\n\n /**\n * Check if an event matches the filter criteria\n *\n * @param event - Event to check\n * @param filter - Filter options to apply\n * @returns True if the event passes all filters\n */\n private shouldHandleEvent(event: T, filter?: EventFilterOptions): boolean {\n if (!filter) {\n return true;\n }\n\n // Check event type filter\n if (filter.eventType && event.type !== filter.eventType) {\n return false;\n }\n\n // Check upload ID filter\n if (filter.uploadId !== undefined) {\n const eventData = event.data as { id?: string } | undefined;\n const eventId = eventData?.id;\n\n // If filter.uploadId is null, only pass events without an ID\n // If filter.uploadId is a string, only pass events with matching ID\n if (filter.uploadId === null) {\n if (eventId !== undefined) {\n return false;\n }\n } else if (eventId !== filter.uploadId) {\n return false;\n }\n }\n\n // Check custom filter\n if (filter.customFilter) {\n // Cast to GenericEvent for custom filter as it operates on the base interface\n return filter.customFilter(event as unknown as GenericEvent);\n }\n\n return true;\n }\n\n /**\n * Get the number of active subscriptions\n *\n * @returns Number of tracked subscriptions\n */\n getSubscriptionCount(): number {\n return this.subscriptions.length;\n }\n\n /**\n * Check if there are any active subscriptions\n *\n * @returns True if at least one subscription is active\n */\n hasSubscriptions(): boolean {\n return this.subscriptions.length > 0;\n }\n\n /**\n * Unsubscribe from all tracked subscriptions and clear the subscription list\n *\n * This is typically called when disposing of a component or cleaning up resources.\n *\n * @example Cleanup in framework hooks\n * ```typescript\n * // React\n * useEffect(() => {\n * const manager = new EventSubscriptionManager(eventSource);\n * manager.subscribe(handler, filter);\n *\n * return () => manager.cleanup();\n * }, []);\n *\n * // Vue\n * onUnmounted(() => {\n * manager.cleanup();\n * });\n * ```\n */\n cleanup(): void {\n for (const subscription of this.subscriptions) {\n subscription.unsubscribe();\n }\n this.subscriptions = [];\n }\n\n /**\n * Update the upload ID filter for all subscriptions that have an uploadId filter\n *\n * This is useful when the current upload changes and you want to update\n * all subscriptions to listen for the new upload's events.\n *\n * @param newUploadId - New upload ID to filter events by\n *\n * @example Update upload ID when starting new upload\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n * manager.subscribe(onProgress, { eventType: 'UPLOAD_PROGRESS', uploadId: null });\n *\n * // When upload starts\n * manager.updateUploadIdFilter(uploadId);\n * ```\n */\n updateUploadIdFilter(newUploadId: string | null): void {\n for (const subscription of this.subscriptions) {\n if (subscription.filter && subscription.filter.uploadId !== undefined) {\n subscription.filter.uploadId = newUploadId;\n }\n }\n }\n}\n","import type { FlowEvent, TypedOutput } from \"@uploadista/core/flow\";\nimport { EventType } from \"@uploadista/core/flow\";\nimport type { UploadFile } from \"@uploadista/core/types\";\nimport type { FlowUploadOptions } from \"../types/flow-upload-options\";\nimport { detectInputType } from \"../utils/input-detection\";\n\n/**\n * Flow upload status representing the current state of a flow upload lifecycle.\n * Flow uploads progress through: idle → uploading → processing → success/error/aborted\n */\nexport type FlowUploadStatus =\n | \"idle\"\n | \"uploading\"\n | \"processing\"\n | \"success\"\n | \"error\"\n | \"aborted\";\n\n/**\n * Complete state information for a flow upload operation.\n * Tracks both the upload phase (file transfer) and processing phase (flow execution).\n */\nexport interface FlowUploadState {\n /** Current upload status */\n status: FlowUploadStatus;\n /** Upload progress percentage (0-100) */\n progress: number;\n /** Number of bytes uploaded */\n bytesUploaded: number;\n /** Total bytes to upload, null if unknown */\n totalBytes: number | null;\n /** Error if upload or processing failed */\n error: Error | null;\n /** Unique identifier for the flow execution job */\n jobId: string | null;\n /** Whether the flow processing has started */\n flowStarted: boolean;\n /** Name of the currently executing flow node */\n currentNodeName: string | null;\n /** Type of the currently executing flow node */\n currentNodeType: string | null;\n /**\n * Complete typed outputs from all output nodes in the flow.\n * Each output includes nodeId, optional nodeType, data, and timestamp.\n * Available when status is \"success\".\n */\n flowOutputs: TypedOutput[] | null;\n}\n\n/**\n * State for a single input in a multi-input flow.\n */\nexport interface InputExecutionState {\n /** Input node ID */\n nodeId: string;\n /** Input type (file, url, data) */\n type: \"file\" | \"url\" | \"data\";\n /** Current status of this input */\n status: \"pending\" | \"uploading\" | \"complete\" | \"error\";\n /** Progress percentage for file uploads (0-100) */\n progress: number;\n /** Bytes uploaded for file uploads */\n bytesUploaded: number;\n /** Total bytes for file uploads */\n totalBytes: number | null;\n /** Error if this input failed */\n error: Error | null;\n /** Abort controller for this specific input */\n abortController: FlowUploadAbortController | null;\n}\n\n/**\n * Callbacks that FlowManager invokes during the flow upload lifecycle\n */\nexport interface FlowManagerCallbacks {\n /**\n * Called when the flow upload state changes\n */\n onStateChange: (state: FlowUploadState) => void;\n\n /**\n * Called when upload progress updates\n * @param progress - Progress percentage (0-100)\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when a chunk completes\n * @param chunkSize - Size of the completed chunk\n * @param bytesAccepted - Total bytes accepted so far\n * @param bytesTotal - Total bytes to upload, null if unknown\n */\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n\n /**\n * Called when the flow completes successfully (receives full flow outputs)\n * Each output includes nodeId, optional nodeType (e.g., \"storage-output-v1\"), data, and timestamp.\n *\n * @param outputs - Array of typed outputs from all output nodes\n *\n * @example\n * ```typescript\n * onFlowComplete: (outputs) => {\n * for (const output of outputs) {\n * console.log(`${output.nodeId} (${output.nodeType}):`, output.data);\n * }\n * }\n * ```\n */\n onFlowComplete?: (outputs: TypedOutput[]) => void;\n\n /**\n * Called when upload succeeds (receives typed outputs from all output nodes)\n * Each output includes nodeId, optional nodeType (e.g., \"storage-output-v1\"), data, and timestamp.\n *\n * @param outputs - Array of typed outputs from all output nodes\n *\n * @example\n * ```typescript\n * onSuccess: (outputs) => {\n * for (const output of outputs) {\n * console.log(`${output.nodeId} completed:`, output.data);\n * }\n * }\n * ```\n */\n onSuccess?: (outputs: TypedOutput[]) => void;\n\n /**\n * Called when upload or flow processing fails with an error\n * @param error - The error that occurred\n */\n onError?: (error: Error) => void;\n\n /**\n * Called when upload or flow is aborted\n */\n onAbort?: () => void;\n}\n\n/**\n * Generic flow execution input type - can be any value that the flow execution client accepts.\n * Common types include File, Blob, string (for URLs), or structured data objects.\n *\n * @remarks\n * The flexibility of this type enables different flow execution patterns:\n * - File/Blob: Traditional chunked file upload with init/finalize operations\n * - string (URL): Direct file fetch from external URL\n * - object: Structured data for non-file input nodes (future)\n */\nexport type FlowUploadInput = unknown;\n\n/**\n * Flow configuration for upload\n */\nexport interface FlowConfig {\n flowId: string;\n storageId: string;\n outputNodeId?: string;\n metadata?: Record<string, string>;\n}\n\n/**\n * Abort and pause controller interface for canceling/pausing flow uploads\n */\nexport interface FlowUploadAbortController {\n abort: () => void | Promise<void>;\n pause: () => void | Promise<void>;\n}\n\n/**\n * Internal upload options used by the flow upload function.\n * The upload phase always returns UploadFile, regardless of the final TOutput type.\n */\nexport interface InternalFlowUploadOptions {\n onJobStart?: (jobId: string) => void;\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n onSuccess?: (result: UploadFile) => void;\n onError?: (error: Error) => void;\n onAbort?: () => void;\n onShouldRetry?: (error: Error, retryAttempt: number) => boolean;\n}\n\n/**\n * Flow upload function that performs the actual upload with flow processing.\n * Returns a promise that resolves to an abort controller with pause capability.\n *\n * Note: The upload phase onSuccess always receives UploadFile. The final TOutput\n * result comes from the flow execution and is handled via FlowEnd events.\n */\nexport type FlowUploadFunction<TInput = FlowUploadInput> = (\n input: TInput,\n flowConfig: FlowConfig,\n options: InternalFlowUploadOptions,\n) => Promise<FlowUploadAbortController>;\n\n/**\n * Callbacks for tracking individual input progress in multi-input flows\n */\nexport interface MultiInputCallbacks {\n /**\n * Called when an input's progress updates\n * @param nodeId - The input node ID\n * @param progress - Progress percentage (0-100)\n * @param bytesUploaded - Bytes uploaded for this input\n * @param totalBytes - Total bytes for this input\n */\n onInputProgress?: (\n nodeId: string,\n progress: number,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when an input completes successfully\n * @param nodeId - The input node ID\n */\n onInputComplete?: (nodeId: string) => void;\n\n /**\n * Called when an input fails\n * @param nodeId - The input node ID\n * @param error - The error that occurred\n */\n onInputError?: (nodeId: string, error: Error) => void;\n}\n\n/**\n * Multi-input flow upload function that coordinates multiple inputs in a single flow.\n * Platform packages should implement this to enable parallel multi-input upload support.\n *\n * @param inputs - Record of nodeId to input data (File, URL string, or structured data)\n * @param flowConfig - Flow configuration\n * @param options - Upload callbacks and configuration\n * @param multiInputCallbacks - Per-input progress tracking callbacks\n * @returns Promise resolving to abort controller for the entire flow execution\n *\n * @example\n * ```typescript\n * const uploadFn: MultiInputFlowUploadFunction = async (inputs, flowConfig, options, callbacks) => {\n * // 1. Start flow and create job\n * const jobId = await startFlow(flowConfig.flowId, flowConfig.storageId);\n *\n * // 2. Initialize all inputs in parallel using orchestrator functions\n * const initPromises = Object.entries(inputs).map(([nodeId, data]) =>\n * initializeFlowInput({ nodeId, jobId, source: data, ... })\n * );\n *\n * // 3. Upload files in parallel\n * // 4. Finalize all inputs\n * // 5. Return abort controller\n * };\n * ```\n */\nexport type MultiInputFlowUploadFunction = (\n inputs: Record<string, unknown>,\n flowConfig: FlowConfig,\n options: InternalFlowUploadOptions,\n multiInputCallbacks?: MultiInputCallbacks,\n) => Promise<FlowUploadAbortController>;\n\n/**\n * Initial state for a new flow upload\n */\nconst initialState: FlowUploadState = {\n status: \"idle\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n};\n\n/**\n * Platform-agnostic flow execution manager that handles flow state machine,\n * progress tracking, flow event handling, error handling, abort, pause, reset, and retry logic.\n *\n * Supports multiple input types through generic TInput parameter:\n * - File/Blob: Chunked file upload with progress tracking\n * - string (URL): Direct file fetch from external source\n * - object: Structured data for custom input nodes\n *\n * Framework packages (React, Vue, React Native) should wrap this manager\n * with framework-specific hooks/composables.\n *\n * @template TInput - The type of input data accepted by the flow (File, Blob, string, object, etc.)\n *\n * @example\n * ```typescript\n * // File upload flow\n * const fileFlowManager = new FlowManager<File>(...);\n * await fileFlowManager.upload(myFile);\n *\n * // URL fetch flow\n * const urlFlowManager = new FlowManager<string>(...);\n * await urlFlowManager.upload(\"https://example.com/image.jpg\");\n *\n * // Structured data flow\n * const dataFlowManager = new FlowManager<{ text: string }>(...);\n * await dataFlowManager.upload({ text: \"Process this\" });\n * ```\n */\nexport class FlowManager<TInput = FlowUploadInput> {\n private state: FlowUploadState;\n private abortController: FlowUploadAbortController | null = null;\n private inputStates: Map<string, InputExecutionState> = new Map();\n /** Tracks the nodeId when executing a single-input flow via executeFlow() */\n private currentSingleInputNodeId: string | null = null;\n\n /**\n * Create a new FlowManager\n *\n * @param flowUploadFn - Flow upload function to use for uploads\n * @param callbacks - Callbacks to invoke during flow upload lifecycle\n * @param options - Flow upload configuration options\n * @param multiInputUploadFn - Optional multi-input upload function for executeFlow()\n */\n constructor(\n private readonly flowUploadFn: FlowUploadFunction<TInput>,\n private readonly callbacks: FlowManagerCallbacks,\n private readonly options: FlowUploadOptions,\n private readonly multiInputUploadFn?: MultiInputFlowUploadFunction,\n ) {\n this.state = { ...initialState };\n }\n\n /**\n * Get the current flow upload state\n */\n getState(): FlowUploadState {\n return { ...this.state };\n }\n\n /**\n * Check if an upload or flow is currently active\n */\n isUploading(): boolean {\n return (\n this.state.status === \"uploading\" || this.state.status === \"processing\"\n );\n }\n\n /**\n * Check if file upload is in progress\n */\n isUploadingFile(): boolean {\n return this.state.status === \"uploading\";\n }\n\n /**\n * Check if flow processing is in progress\n */\n isProcessing(): boolean {\n return this.state.status === \"processing\";\n }\n\n /**\n * Get the current job ID\n */\n getJobId(): string | null {\n return this.state.jobId;\n }\n\n /**\n * Update the internal state and notify callbacks\n */\n private updateState(update: Partial<FlowUploadState>): void {\n this.state = { ...this.state, ...update };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Handle flow events from the event subscription\n * This method should be called by the framework wrapper when it receives flow events\n *\n * @param event - Flow event to process\n */\n handleFlowEvent(event: FlowEvent): void {\n // For FlowStart, accept if we don't have a jobId yet (first event)\n // This handles the race condition where flow events arrive before onJobStart callback\n if (event.eventType === EventType.FlowStart && !this.state.jobId) {\n this.updateState({\n jobId: event.jobId,\n flowStarted: true,\n status: \"processing\",\n });\n return;\n }\n\n // Only handle events for the current job\n if (!this.state.jobId || event.jobId !== this.state.jobId) {\n // console.warn(\"[FlowManager] IGNORING event - jobId mismatch\");\n return;\n }\n\n switch (event.eventType) {\n case EventType.FlowStart:\n this.updateState({\n flowStarted: true,\n status: \"processing\",\n });\n break;\n\n case EventType.NodeStart:\n this.updateState({\n status: \"processing\",\n currentNodeName: event.nodeName,\n currentNodeType: event.nodeType,\n });\n break;\n\n case EventType.NodePause:\n // When input node pauses, it's waiting for upload - switch to uploading state\n this.updateState({\n status: \"uploading\",\n currentNodeName: event.nodeName,\n // NodePause doesn't have nodeType, keep previous value\n });\n break;\n\n case EventType.NodeResume:\n // When node resumes, upload is complete - switch to processing state\n this.updateState({\n status: \"processing\",\n currentNodeName: event.nodeName,\n currentNodeType: event.nodeType,\n });\n break;\n\n case EventType.NodeEnd:\n this.updateState({\n status:\n this.state.status === \"uploading\"\n ? \"processing\"\n : this.state.status,\n currentNodeName: null,\n currentNodeType: null,\n });\n break;\n\n case EventType.FlowEnd: {\n // Get typed outputs from the event\n const flowOutputs = event.outputs || null;\n\n // Call onFlowComplete with full typed outputs\n if (flowOutputs && this.callbacks.onFlowComplete) {\n this.callbacks.onFlowComplete(flowOutputs);\n }\n\n // Call onSuccess with full typed outputs\n if (flowOutputs && flowOutputs.length > 0 && this.callbacks.onSuccess) {\n this.callbacks.onSuccess(flowOutputs);\n }\n\n this.updateState({\n status: \"success\",\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs,\n });\n\n this.abortController = null;\n break;\n }\n\n case EventType.FlowError: {\n const error = new Error(event.error);\n this.updateState({\n status: \"error\",\n error,\n });\n this.callbacks.onError?.(error);\n this.abortController = null;\n break;\n }\n\n case EventType.NodeError: {\n const error = new Error(event.error);\n this.updateState({\n status: \"error\",\n error,\n });\n this.callbacks.onError?.(error);\n this.abortController = null;\n break;\n }\n\n case EventType.FlowCancel:\n this.updateState({\n status: \"aborted\",\n });\n this.callbacks.onAbort?.();\n this.abortController = null;\n break;\n }\n }\n\n /**\n * Handle upload progress events from the event subscription\n * This method should be called by the framework wrapper when it receives upload progress events\n *\n * @param uploadId - The unique identifier for this upload\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n handleUploadProgress(\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ): void {\n // Calculate progress percentage\n const progress =\n totalBytes && totalBytes > 0\n ? Math.round((bytesUploaded / totalBytes) * 100)\n : 0;\n\n this.updateState({\n bytesUploaded,\n totalBytes,\n progress,\n });\n\n // Also update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(this.currentSingleInputNodeId);\n if (inputState) {\n inputState.status = \"uploading\";\n inputState.progress = progress;\n inputState.bytesUploaded = bytesUploaded;\n inputState.totalBytes = totalBytes;\n }\n }\n\n this.callbacks.onProgress?.(uploadId, bytesUploaded, totalBytes);\n }\n\n /**\n * Execute a flow with the provided input data.\n *\n * The input type and execution behavior depends on the generic TInput type:\n * - File/Blob: Initiates chunked upload with progress tracking\n * - string (URL): Directly passes URL to flow for fetching\n * - object: Passes structured data to flow input nodes\n *\n * @param input - Input data for the flow execution (type determined by TInput generic)\n *\n * @example\n * ```typescript\n * // File upload\n * await manager.upload(fileObject);\n *\n * // URL fetch\n * await manager.upload(\"https://example.com/image.jpg\");\n * ```\n */\n async upload(input: TInput): Promise<void> {\n // Determine totalBytes from input if possible (File/Blob on browser platforms)\n // For non-file inputs (URLs, structured data), totalBytes remains null\n let totalBytes: number | null = null;\n if (input && typeof input === \"object\") {\n if (\"size\" in input && typeof input.size === \"number\") {\n totalBytes = input.size;\n }\n }\n\n // Reset state but keep reference for potential retries\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n });\n\n try {\n // Build internal upload options with our callbacks\n const internalOptions: InternalFlowUploadOptions = {\n onJobStart: (jobId: string) => {\n this.updateState({\n jobId,\n });\n this.options?.onJobStart?.(jobId);\n },\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => {\n this.handleUploadProgress(uploadId, bytesUploaded, totalBytes);\n this.options?.onProgress?.(uploadId, bytesUploaded, totalBytes);\n },\n onChunkComplete: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => {\n this.callbacks.onChunkComplete?.(\n chunkSize,\n bytesAccepted,\n bytesTotal,\n );\n this.options?.onChunkComplete?.(chunkSize, bytesAccepted, bytesTotal);\n },\n onSuccess: (_result: UploadFile) => {\n // Note: This gets called when upload phase completes, not flow completion\n // Flow completion is handled by FlowEnd event\n this.updateState({\n progress: 100,\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"complete\";\n inputState.progress = 100;\n }\n this.currentSingleInputNodeId = null;\n }\n // Don't call callbacks.onSuccess here - wait for FlowEnd event with TOutput\n },\n onError: (error: Error) => {\n this.updateState({\n status: \"error\",\n error,\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = error;\n }\n this.currentSingleInputNodeId = null;\n }\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({\n status: \"aborted\",\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = new Error(\"Upload aborted\");\n }\n this.currentSingleInputNodeId = null;\n }\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n };\n\n // Start the flow upload\n this.abortController = await this.flowUploadFn(\n input,\n this.options.flowConfig,\n internalOptions,\n );\n } catch (error) {\n // Handle errors from upload initiation\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(this.currentSingleInputNodeId);\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = uploadError;\n }\n this.currentSingleInputNodeId = null;\n }\n\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Abort the current flow upload\n */\n abort(): void {\n if (this.abortController) {\n this.abortController.abort();\n // Note: State update happens in onAbort callback or FlowCancel event\n }\n }\n\n /**\n * Pause the current flow upload\n */\n pause(): void {\n if (this.abortController) {\n this.abortController.pause();\n }\n }\n\n /**\n * Reset the flow upload state to idle\n */\n reset(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n // Abort all input-specific controllers\n for (const inputState of this.inputStates.values()) {\n if (inputState.abortController) {\n inputState.abortController.abort();\n }\n }\n this.inputStates.clear();\n this.currentSingleInputNodeId = null;\n\n this.state = { ...initialState };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Aggregate progress across multiple inputs.\n * Uses simple average for Phase 1 (size-weighted can be added in Phase 2).\n */\n private aggregateProgress(): void {\n if (this.inputStates.size === 0) {\n return;\n }\n\n const states = Array.from(this.inputStates.values());\n\n // Calculate average progress across all inputs\n const totalProgress = states.reduce(\n (sum, state) => sum + state.progress,\n 0,\n );\n const avgProgress = Math.round(totalProgress / states.length);\n\n // Calculate total bytes (sum of all inputs)\n const totalBytes = states.reduce(\n (sum, state) => sum + (state.totalBytes || 0),\n 0,\n );\n const bytesUploaded = states.reduce(\n (sum, state) => sum + state.bytesUploaded,\n 0,\n );\n\n this.updateState({\n progress: avgProgress,\n bytesUploaded,\n totalBytes: totalBytes > 0 ? totalBytes : null,\n });\n }\n\n /**\n * Execute a flow with multiple inputs (generic execution path).\n *\n * This method:\n * 1. Builds FlowInputs with auto-detection\n * 2. Validates inputs (optional, to be added in integration)\n * 3. Executes flow with the inputs\n * 4. Tracks multi-input state\n *\n * @param inputs - Map of nodeId to raw input data\n *\n * @example\n * ```typescript\n * await manager.executeFlow({\n * \"file-input\": myFile,\n * \"url-input\": \"https://example.com/image.jpg\"\n * });\n * ```\n */\n async executeFlow(inputs: Record<string, unknown>): Promise<void> {\n const inputEntries = Object.entries(inputs);\n\n if (inputEntries.length === 0) {\n throw new Error(\"No inputs provided to executeFlow\");\n }\n\n // Initialize input states for tracking\n this.inputStates.clear();\n for (const [nodeId, data] of Object.entries(inputs)) {\n const inputType = detectInputType(data);\n this.inputStates.set(nodeId, {\n nodeId,\n type: inputType,\n status: \"pending\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes:\n inputType === \"file\" &&\n data &&\n typeof data === \"object\" &&\n \"size\" in data &&\n typeof data.size === \"number\"\n ? data.size\n : null,\n error: null,\n abortController: null,\n });\n }\n\n // For single input, use the standard upload path\n if (inputEntries.length === 1) {\n const firstEntry = inputEntries[0];\n if (!firstEntry) {\n throw new Error(\"No inputs provided to executeFlow\");\n }\n const [nodeId, firstData] = firstEntry;\n // Track nodeId so upload() callbacks can update inputStates\n this.currentSingleInputNodeId = nodeId;\n await this.upload(firstData as TInput);\n return;\n }\n\n // For multiple inputs, use the multi-input upload function\n if (!this.multiInputUploadFn) {\n throw new Error(\n \"Multi-input flows require multiInputUploadFn to be provided in FlowManager constructor. \" +\n \"Platform packages should implement MultiInputFlowUploadFunction.\",\n );\n }\n\n // Reset state for multi-input flow\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n });\n\n try {\n // Build internal options with callbacks\n const internalOptions: InternalFlowUploadOptions = {\n onJobStart: (jobId: string) => {\n this.updateState({ jobId });\n this.options?.onJobStart?.(jobId);\n },\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => {\n // Global progress tracking (will be overridden by aggregateProgress)\n this.options?.onProgress?.(uploadId, bytesUploaded, totalBytes);\n },\n onSuccess: (_result: UploadFile) => {\n // Flow completion is handled by FlowEnd event\n this.updateState({ progress: 100 });\n },\n onError: (error: Error) => {\n this.updateState({ status: \"error\", error });\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({ status: \"aborted\" });\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n };\n\n // Multi-input callbacks for per-input tracking\n const multiInputCallbacks: MultiInputCallbacks = {\n onInputProgress: (nodeId, progress, bytesUploaded, totalBytes) => {\n // Update input state\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"uploading\";\n inputState.progress = progress;\n inputState.bytesUploaded = bytesUploaded;\n inputState.totalBytes = totalBytes;\n }\n\n // Aggregate progress across all inputs\n this.aggregateProgress();\n },\n onInputComplete: (nodeId) => {\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"complete\";\n inputState.progress = 100;\n }\n this.aggregateProgress();\n },\n onInputError: (nodeId, error) => {\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = error;\n }\n },\n };\n\n // Execute multi-input flow\n this.abortController = await this.multiInputUploadFn(\n inputs,\n this.options.flowConfig,\n internalOptions,\n multiInputCallbacks,\n );\n } catch (error) {\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Get the input execution states (for multi-input flows).\n * @returns Map of nodeId to input state\n */\n getInputStates(): ReadonlyMap<string, InputExecutionState> {\n return this.inputStates;\n }\n\n /**\n * Clean up resources (call when disposing the manager)\n */\n cleanup(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n // Cleanup input-specific controllers\n for (const inputState of this.inputStates.values()) {\n if (inputState.abortController) {\n inputState.abortController.abort();\n }\n }\n this.inputStates.clear();\n this.currentSingleInputNodeId = null;\n }\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadOptions } from \"../types/upload-options\";\n\n/**\n * Upload status representing the current state of an upload\n */\nexport type UploadStatus =\n | \"idle\"\n | \"uploading\"\n | \"success\"\n | \"error\"\n | \"aborted\";\n\n/**\n * Complete upload state\n */\nexport interface UploadState {\n /** Current status of the upload */\n status: UploadStatus;\n /** Upload progress percentage (0-100) */\n progress: number;\n /** Number of bytes uploaded */\n bytesUploaded: number;\n /** Total bytes to upload, null if unknown/deferred */\n totalBytes: number | null;\n /** Error if upload failed */\n error: Error | null;\n /** Result if upload succeeded */\n result: UploadFile | null;\n}\n\n/**\n * Callbacks that UploadManager invokes during the upload lifecycle\n */\nexport interface UploadManagerCallbacks {\n /**\n * Called when the upload state changes\n */\n onStateChange: (state: UploadState) => void;\n\n /**\n * Called when upload progress updates\n * @param uploadId - The unique identifier for this upload\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when a chunk completes\n * @param chunkSize - Size of the completed chunk\n * @param bytesAccepted - Total bytes accepted so far\n * @param bytesTotal - Total bytes to upload, null if unknown\n */\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n\n /**\n * Called when upload completes successfully\n * @param result - The uploaded file result\n */\n onSuccess?: (result: UploadFile) => void;\n\n /**\n * Called when upload fails with an error\n * @param error - The error that occurred\n */\n onError?: (error: Error) => void;\n\n /**\n * Called when upload is aborted\n */\n onAbort?: () => void;\n}\n\n/**\n * Generic upload input type - can be any value that the upload client accepts\n */\nexport type UploadInput = unknown;\n\n/**\n * Abort controller interface for canceling uploads\n */\nexport interface UploadAbortController {\n abort: () => void;\n}\n\n/**\n * Upload function that performs the actual upload.\n * Returns a promise that resolves to an abort controller.\n */\nexport type UploadFunction<\n TInput = UploadInput,\n TOptions extends UploadOptions = UploadOptions,\n> = (input: TInput, options: TOptions) => Promise<UploadAbortController>;\n\n/**\n * Initial state for a new upload\n */\nconst initialState: UploadState = {\n status: \"idle\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n result: null,\n};\n\n/**\n * Platform-agnostic upload manager that handles upload state machine,\n * progress tracking, error handling, abort, reset, and retry logic.\n *\n * Framework packages (React, Vue, React Native) should wrap this manager\n * with framework-specific hooks/composables.\n *\n * @example\n * ```typescript\n * const uploadFn = (input, options) => client.upload(input, options);\n * const manager = new UploadManager(uploadFn, {\n * onStateChange: (state) => setState(state),\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Upload complete:', result),\n * onError: (error) => console.error('Upload failed:', error),\n * });\n *\n * await manager.upload(file);\n * ```\n */\nexport class UploadManager<\n TInput = UploadInput,\n TOptions extends UploadOptions = UploadOptions,\n> {\n private state: UploadState;\n private abortController: UploadAbortController | null = null;\n private lastInput: TInput | null = null;\n private uploadId: string | null = null;\n\n /**\n * Create a new UploadManager\n *\n * @param uploadFn - Upload function to use for uploads\n * @param callbacks - Callbacks to invoke during upload lifecycle\n * @param options - Upload configuration options\n */\n constructor(\n private readonly uploadFn: UploadFunction<TInput, TOptions>,\n private readonly callbacks: UploadManagerCallbacks,\n private readonly options?: TOptions,\n ) {\n this.state = { ...initialState };\n }\n\n /**\n * Get the current upload state\n */\n getState(): UploadState {\n return { ...this.state };\n }\n\n /**\n * Check if an upload is currently active\n */\n isUploading(): boolean {\n return this.state.status === \"uploading\";\n }\n\n /**\n * Check if the upload can be retried\n */\n canRetry(): boolean {\n return (\n (this.state.status === \"error\" || this.state.status === \"aborted\") &&\n this.lastInput !== null\n );\n }\n\n /**\n * Update the internal state and notify callbacks\n */\n private updateState(update: Partial<UploadState>): void {\n this.state = { ...this.state, ...update };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Start uploading a file or input\n *\n * @param input - File or input to upload (type depends on platform)\n */\n async upload(input: TInput): Promise<void> {\n // Determine totalBytes from input if possible (File/Blob on browser platforms)\n let totalBytes: number | null = null;\n if (input && typeof input === \"object\") {\n if (\"size\" in input && typeof input.size === \"number\") {\n totalBytes = input.size;\n }\n }\n\n // Reset state but keep reference for retries\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes,\n error: null,\n result: null,\n });\n\n this.lastInput = input;\n\n try {\n // Build complete options with our callbacks\n const uploadOptions = {\n ...this.options,\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n bytes: number | null,\n ) => {\n // Store uploadId on first progress callback\n if (!this.uploadId) {\n this.uploadId = uploadId;\n }\n\n const progressPercent = bytes\n ? Math.round((bytesUploaded / bytes) * 100)\n : 0;\n\n this.updateState({\n progress: progressPercent,\n bytesUploaded,\n totalBytes: bytes,\n });\n\n this.callbacks.onProgress?.(uploadId, bytesUploaded, bytes);\n this.options?.onProgress?.(uploadId, bytesUploaded, bytes);\n },\n onChunkComplete: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => {\n this.callbacks.onChunkComplete?.(\n chunkSize,\n bytesAccepted,\n bytesTotal,\n );\n this.options?.onChunkComplete?.(chunkSize, bytesAccepted, bytesTotal);\n },\n onSuccess: (result: UploadFile) => {\n this.updateState({\n status: \"success\",\n result,\n progress: 100,\n bytesUploaded: result.size || 0,\n totalBytes: result.size || null,\n });\n\n this.callbacks.onSuccess?.(result);\n this.options?.onSuccess?.(result);\n this.abortController = null;\n },\n onError: (error: Error) => {\n this.updateState({\n status: \"error\",\n error,\n });\n\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({\n status: \"aborted\",\n });\n\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n } as TOptions;\n\n // Start the upload\n this.abortController = await this.uploadFn(input, uploadOptions);\n } catch (error) {\n // Handle errors from upload initiation\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Abort the current upload\n */\n abort(): void {\n if (this.abortController) {\n this.abortController.abort();\n // Note: State update happens in onAbort callback\n }\n }\n\n /**\n * Reset the upload state to idle\n */\n reset(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n this.state = { ...initialState };\n this.lastInput = null;\n this.uploadId = null;\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Retry the last failed or aborted upload\n */\n retry(): void {\n if (this.canRetry() && this.lastInput !== null) {\n this.upload(this.lastInput);\n }\n }\n\n /**\n * Clean up resources (call when disposing the manager)\n */\n cleanup(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n this.uploadId = null;\n }\n}\n"],"mappings":"2OAoEA,IAAa,EAAb,KAAyB,CAWvB,YAAY,EAA2B,aAVR,EAAE,kBACX,mBAEA,EAQpB,KAAK,OAAS,CACZ,aAAc,EAAO,aACrB,cAAe,EAAO,eAAiB,EAAO,aAAe,EAC7D,UAAW,EAAO,WAAa,IAChC,CA2BH,IAAI,EAAyC,CAS3C,OARA,KAAK,OAAO,KAAK,EAAM,CACvB,KAAK,aAAe,EAAM,OAC1B,KAAK,YAAc,KAAK,KAAK,CAEzB,KAAK,aAAa,CACb,KAAK,OAAO,CAGd,KA0BT,OAA8B,CAC5B,GAAI,KAAK,OAAO,SAAW,EACzB,OAAO,KAGT,IAAM,EAAW,IAAI,WAAW,KAAK,YAAY,CAC7C,EAAS,EAEb,IAAK,IAAM,KAAS,KAAK,OACvB,EAAS,IAAI,EAAO,EAAO,CAC3B,GAAU,EAAM,OAGlB,IAAMA,EAAwB,CAC5B,KAAM,EACN,KAAM,KAAK,YACX,UAAW,KAAK,YACjB,CAGD,OADA,KAAK,OAAO,CACL,EAyBT,aAAuB,CAKrB,GAJI,KAAK,aAAe,KAAK,OAAO,cAIhC,KAAK,aAAe,KAAK,OAAO,cAClC,MAAO,GAGT,IAAM,EAAmB,KAAK,KAAK,CAAG,KAAK,YAK3C,OAJI,KAAK,OAAO,OAAS,GAAK,EAAmB,KAAK,OAAO,UA0B/D,eAKE,CACA,MAAO,CACL,KAAM,KAAK,YACX,WAAY,KAAK,OAAO,OACxB,eAAgB,KAAK,aAAa,CAClC,iBAAkB,KAAK,KAAK,CAAG,KAAK,YACrC,CAwBH,gBAA0B,CACxB,OAAO,KAAK,OAAO,OAAS,EAS9B,OAAc,CACZ,KAAK,OAAS,EAAE,CAChB,KAAK,YAAc,EACnB,KAAK,YAAc,EAQrB,iBAA0B,CACxB,OAAO,KAAK,OAAO,eClQV,EAAb,KAAkD,CAChD,YACE,EACA,EACA,CAFQ,KAAA,WAAA,EACA,KAAA,YAAA,EAOV,MAAM,QACJ,EACA,EAA8B,EAAE,CACT,CACvB,GAAI,CAEF,IAAM,EAAuB,MAAM,KAAK,sBACtC,EAAQ,SAAW,EAAE,CACrB,EACD,CAGD,OAAO,MAAM,KAAK,WAAW,QAAQ,EAAK,CACxC,GAAG,EACH,QAAS,EAET,YACE,KAAK,YAAY,SAAS,GAAK,WAC/B,KAAK,YAAY,SAAS,GAAK,mBAC3B,OACC,EAAQ,aAAe,UAC/B,CAAC,OACK,EAAO,CAOd,MALI,aAAiB,OAAS,EAAM,QAAQ,SAAS,OAAO,CACpD,GAYZ,MAAc,sBACZ,EACA,EACiC,CAEjC,GAAI,sBAAuB,KAAK,YAE9B,OAAO,MAAM,KAAK,YAAY,kBAAkB,EAAQ,CAG1D,GAAI,gBAAiB,KAAK,YAAa,CAErC,IAAM,EAAQ,KAAK,oBAAoB,EAAI,CAC3C,OAAO,MAAM,KAAK,YAAY,YAAY,EAAS,EAAM,CAI3D,OAAO,EAOT,oBAA4B,EAAiC,CAO3D,IAAM,EAAc,EAAI,MAAM,0BAA0B,CACxD,GAAI,EACF,OAAO,EAAY,GAGrB,IAAM,EAAY,EAAI,MAAM,wBAAwB,CACpD,GAAI,EACF,OAAO,EAAU,GAGnB,IAAM,EAAW,EAAI,MAAM,wBAAwB,CACnD,GAAI,EACF,OAAO,EAAS,GAUpB,YAAa,CACX,OAAO,KAAK,WAAW,YAAY,CAGrC,oBAAqB,CACnB,OAAO,KAAK,WAAW,oBAAoB,CAG7C,OAAQ,CACN,KAAK,WAAW,OAAO,CAGzB,MAAM,OAAQ,CACZ,MAAM,KAAK,WAAW,OAAO,CAG/B,MAAM,kBAAkB,EAAgB,CACtC,MAAM,KAAK,WAAW,kBAAkB,EAAK,CAM/C,gBAA8B,CAC5B,OAAO,KAAK,cCtJH,EAAb,KAA6B,CAC3B,YAAY,EAAyD,CAAjD,KAAA,KAAA,EAEpB,SAAU,CACR,OAAO,KAAK,OCSH,EAAb,cAAuC,CAAgB,CACrD,YACE,EACA,EACA,EACA,CACA,MAAM,SAAS,CAJP,KAAA,OAAA,EACA,KAAA,gBAAA,EACA,KAAA,OAAA,EAaV,MAAM,kBACJ,EAAkC,EAAE,CACH,CACjC,GAAI,CACF,GAAI,CAAC,KAAK,OAAO,eACf,OAAO,EAIT,IAAM,EAAc,MAAM,QAAQ,QAAQ,KAAK,OAAO,gBAAgB,CAAC,CAGvE,GAAI,CAAC,GAAe,OAAO,GAAgB,SACzC,MAAU,MACR,qEACD,CAIH,IAAM,EAAiB,CAAE,GAAG,EAAS,CAcrC,OAZI,EAAY,UACd,KAAK,gBAAgB,EAAY,QAAQ,CACzC,OAAO,OAAO,EAAgB,EAAY,QAAQ,EAMhD,EAAY,SACd,KAAK,cAAc,EAAgB,EAAY,QAAQ,CAGlD,QACA,EAAO,CAEd,IAAM,EAAU,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACtE,MAAU,MAAM,sCAAsC,IAAU,EAOpE,gBAAwB,EAAuC,CAC7D,GAAI,OAAO,GAAY,WAAY,EACjC,MAAU,MAAM,4BAA4B,CAG9C,IAAK,GAAM,CAAC,EAAK,KAAU,OAAO,QAAQ,EAAQ,CAChD,GAAI,OAAO,GAAQ,UAAY,OAAO,GAAU,SAC9C,MAAU,MACR,sDAAsD,EAAI,IAAI,OAAO,EAAM,GAC5E,CAUP,cACE,EACA,EACM,CAIN,GAFkB,KAAK,gBAAgB,WAAW,CAOhD,KAAK,OAAO,KACV,2IAED,KACI,CAEL,IAAM,EAAe,OAAO,QAAQ,EAAQ,CACzC,KAAK,CAAC,EAAK,KAAW,GAAG,EAAI,GAAG,IAAQ,CACxC,KAAK,KAAK,CAET,IACF,EAAQ,OAAS,MC5GZ,EAAb,cAAmC,CAAgB,CACjD,aAAc,CACZ,MAAM,UAAU,CASlB,MAAM,kBACJ,EAAkC,EAAE,CACH,CACjC,OAAO,EAMT,WAAW,EAAsB,EAOjC,gBAAuB,ICFZ,EAAb,cAAgD,CAAgB,CAO9D,YACE,EACA,EACA,CACA,MAAM,mBAAmB,CAHjB,KAAA,OAAA,EACA,KAAA,WAAA,kBAPW,IAAI,qBAGiB,KAe1C,MAAM,YAAqC,CACzC,GAAI,CAEF,IAAM,EAAW,MAAM,KAAK,WAAW,QACrC,GAAG,KAAK,OAAO,cAAc,GAAG,KAAK,OAAO,WAC5C,CACE,OAAQ,MACR,QAAS,CACP,eAAgB,mBACjB,CACF,CACF,CAGD,GAAI,CAAC,EAAS,GAAI,CAChB,IAAM,EAAY,MAAM,EAAS,MAAM,CACnC,EAAe,wBAAwB,EAAS,SAEpD,GAAI,CACF,IAAM,EAAY,KAAK,MAAM,EAAU,CACvC,EAAe,EAAU,OAAS,EAAU,SAAW,OACjD,CAEN,EAAe,GAAa,EAAS,YAAc,EAGrD,MAAU,MAAM,EAAa,CAI/B,IAAM,EAAQ,MAAM,EAAS,MAAM,CAEnC,GAAI,CAAC,EAAK,OAAS,OAAO,EAAK,OAAU,SACvC,MAAU,MACR,sEACD,CAGH,OAAO,QACA,EAAO,CAKd,MAHI,aAAiB,MACT,MAAM,+BAA+B,EAAM,UAAU,CAEvD,MAAM,+BAA+B,OAAO,EAAM,GAAG,EAUnE,MAAc,gBAAgB,EAAiC,CAE7D,GAAI,EAAO,CACT,IAAM,EAAS,KAAK,WAAW,IAAI,EAAM,CACzC,GAAI,GAAU,CAAC,KAAK,eAAe,EAAO,CACxC,OAAO,EAAO,MAKlB,GAAI,CAAC,GAAS,KAAK,aAAe,CAAC,KAAK,eAAe,KAAK,YAAY,CACtE,OAAO,KAAK,YAAY,MAI1B,IAAM,EAAgB,MAAM,KAAK,YAAY,CAGvC,EAAY,EAAc,UAC5B,KAAK,KAAK,CAAG,EAAc,UAAY,IACvC,IAAA,GAEES,EAA2B,CAC/B,MAAO,EAAc,MACrB,YACD,CASD,OANI,EACF,KAAK,WAAW,IAAI,EAAO,EAAY,CAEvC,KAAK,YAAc,EAGd,EAAc,MAOvB,eAAuB,EAA8B,CAQnD,OAPK,EAAO,UAOL,KAAK,KAAK,CAAG,EAAO,UADV,GAAK,IAJb,GAgBX,MAAM,YACJ,EAAkC,EAAE,CACpC,EACiC,CACjC,GAAI,CAEF,IAAM,EAAQ,MAAM,KAAK,gBAAgB,EAAM,CAG/C,MAAO,CACL,GAAG,EACH,cAAe,UAAU,IAC1B,OACM,EAAO,CACd,IAAM,EAAU,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACtE,MAAU,MAAM,gCAAgC,IAAU,EAU9D,WAAW,EAAqB,CAC9B,KAAK,WAAW,OAAO,EAAM,CAO/B,gBAAuB,CACrB,KAAK,WAAW,OAAO,CACvB,KAAK,YAAc,KAMrB,eAGE,CACA,MAAO,CACL,eAAgB,KAAK,WAAW,KAChC,eAAgB,KAAK,cAAgB,KACtC,GCxKL,MAAMC,MAA6B,GAmCnC,SAAgB,EACd,EACA,EAAqB,EACb,CACR,MAAO,CACL,IAAM,GAAoB,CACpB,GACF,EAAM,EAAQ,EAGlB,KAAO,GAAoB,CACrB,GACF,EAAM,EAAQ,EAGlB,MAAQ,GAAoB,CACtB,GACF,EAAM,EAAQ,EAGnB,CC5FH,IAAa,EAAb,KAAiC,CAC/B,YAAY,EAA6C,CAArC,KAAA,aAAA,EAEpB,iBAAyC,CACvC,OAAO,KAAK,aAGd,uBAAuB,EAAmC,CACxD,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,KAAK,aAAa,wBAC3B,IAAK,SACH,MAAO,GACT,QACE,MAAO,MASf,MAAaE,EAAmD,CAC9D,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,qBAAsB,EACtB,aAAc,GAAK,KACnB,aAAc,IAAM,KAAO,KAC3B,SAAU,IACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACxB,CCuGD,IAAa,EAAb,KAA4B,CAU1B,YAAY,EAA+B,EAAE,CAAE,cATb,EAAE,CAUlC,KAAK,OAAS,CACZ,WAAY,EAAO,YAAc,IACjC,gBAAiB,EAAO,iBAAmB,GAC3C,uBAAwB,EAAO,wBAA0B,EACzD,cAAe,EAAO,eAAiB,GAAK,KAC5C,cAAe,EAAO,eAAiB,EAAI,KAAO,KAClD,kBAAmB,EAAO,mBAAqB,GAChD,CAED,KAAK,gBAAkB,KAAK,oBAAoB,CAWlD,UAAU,EAA4B,CACpC,KAAK,QAAQ,KAAK,EAAO,CAGrB,KAAK,QAAQ,OAAS,KAAK,OAAO,aACpC,KAAK,QAAU,KAAK,QAAQ,MAAM,CAAC,KAAK,OAAO,WAAW,EAG5D,KAAK,eAAe,CAiCtB,aACE,EACA,EACA,EACA,EACM,CACN,KAAK,UAAU,CACb,OACA,WACA,UACA,UAAW,KAAK,KAAK,CACrB,UACD,CAAC,CAmBJ,mBAAoC,CAClC,MAAO,CAAE,GAAG,KAAK,gBAAiB,CA+BpC,qBAAwC,CACtC,GAAI,KAAK,QAAQ,OAAS,KAAK,OAAO,uBACpC,MAAO,CAAE,KAAM,UAAW,WAAY,EAAG,CAG3C,IAAM,EAAgB,KAAK,4BAA4B,CACvD,GAAI,EAAc,OAAS,KAAK,OAAO,uBACrC,MAAO,CAAE,KAAM,UAAW,WAAY,GAAK,CAG7C,IAAM,EAAS,EAAc,IAC1B,GAAW,EAAO,MAAQ,EAAO,SAAW,KAC9C,CACK,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OAGnD,EACJ,EAAO,QAAQ,EAAK,IAAU,GAAO,EAAQ,IAAa,EAAG,EAAE,CAC/D,EAAO,OAEH,EADS,KAAK,KAAK,EAAS,CACM,EAGlC,EAAa,KAAK,IACtB,EACA,KAAK,QAAQ,QAAU,KAAK,OAAO,uBAAyB,GAC7D,CAeD,OAbI,EAAyB,KAAK,OAAO,kBAChC,CAAE,KAAM,WAAY,aAAY,CAGrC,EAAW,KAAK,OAAO,cAClB,CAAE,KAAM,OAAQ,aAAY,CAGjC,EAAW,KAAK,OAAO,cAClB,CAAE,KAAM,OAAQ,aAAY,CAI9B,CAAE,KAAM,OAAQ,WAAY,EAAa,GAAK,CAkBvD,sBAA+B,CAC7B,IAAM,EAAgB,KAAK,2BAA2B,GAAG,CACzD,GAAI,EAAc,SAAW,EAAG,MAAO,GAGvC,IAAI,EAAc,EACd,EAAc,EASlB,OAPA,EAAc,SAAS,EAAQ,IAAU,CACvC,IAAM,EAAS,EAAQ,EACjB,EAAa,EAAO,MAAQ,EAAO,SAAW,KACpD,GAAe,EAAa,EAC5B,GAAe,GACf,CAEK,EAAc,EAAI,EAAc,EAAc,EAmBvD,OAAc,CACZ,KAAK,QAAU,EAAE,CACjB,KAAK,gBAAkB,KAAK,oBAAoB,CAGlD,2BAAmC,EAAgC,CACjE,IAAM,EAAa,KAAK,QAAQ,OAAQ,GAAW,EAAO,QAAQ,CAClE,OAAO,EAAQ,EAAW,MAAM,CAAC,EAAM,CAAG,EAG5C,eAA8B,CAC5B,IAAM,EAAoB,KAAK,QAAQ,OAAQ,GAAW,EAAO,QAAQ,CACnE,EAAgB,KAAK,QAAQ,OAC7B,EAAkB,EAAkB,OAE1C,GAAI,IAAkB,EAAG,CACvB,KAAK,gBAAkB,KAAK,oBAAoB,CAChD,OAGF,IAAM,EAAa,EAAkB,QAClC,EAAK,IAAW,EAAM,EAAO,KAC9B,EACD,CACK,EAAY,EAAkB,QACjC,EAAK,IAAW,EAAM,EAAO,SAC9B,EACD,CAEK,EAAe,EAAY,EAAI,GAAc,EAAY,KAAQ,EACjE,EAAc,EAAkB,EAChC,EAAY,EAAI,EAGhB,EAAqB,KAAK,QAAQ,OACrC,GAAW,EAAO,UAAY,IAAA,GAChC,CASD,KAAK,gBAAkB,CACrB,eACA,QATA,EAAmB,OAAS,EACxB,EAAmB,QAChB,EAAK,IAAW,GAAO,EAAO,SAAW,GAC1C,EACD,CAAG,EAAmB,OACvB,EAKJ,cACA,YACA,gBACA,aACA,YACD,CAGH,oBAA6C,CAC3C,MAAO,CACL,aAAc,EACd,QAAS,EACT,YAAa,EACb,UAAW,EACX,cAAe,EACf,WAAY,EACZ,UAAW,EACZ,GCpZL,MAAMI,EAAuD,CAC3D,aAAc,CACZ,KAAM,eACN,aAAc,GAAK,KACnB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CACD,SAAU,CACR,KAAM,WACN,aAAc,IAAM,KACpB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CACD,WAAY,CACV,KAAM,aACN,aAAc,IAAM,KACpB,aAAc,GAAK,KAAO,KAC1B,iBAAkB,KAAO,KACzB,eAAgB,GACjB,CACF,CAEKC,EAA4D,CAChE,aAAc,CACZ,KAAM,kBACN,aAAc,EAAI,KAAO,KACzB,aAAc,GAAK,KAAO,KAC1B,iBAAkB,EAAI,KAAO,KAC7B,eAAgB,GACjB,CACD,SAAU,CACR,KAAM,cACN,aAAc,EAAI,KAAO,KACzB,aAAc,IAAM,KAAO,KAC3B,iBAAkB,GAAK,KAAO,KAC9B,eAAgB,GACjB,CACD,WAAY,CACV,KAAM,gBACN,aAAc,EAAI,KAAO,KACzB,aAAc,IAAM,KAAO,KAC3B,iBAAkB,GAAK,KAAO,KAC9B,eAAgB,GACjB,CACF,CAED,IAAa,EAAb,KAA0B,CAWxB,YAAY,EAAgC,EAA6B,EAAE,CAAE,mBAL5B,8BACnB,4BACC,yBACuB,KAGpD,KAAK,eAAiB,EACtB,KAAK,OAAS,CACZ,QAAS,EAAO,SAAW,GAC3B,kBAAmB,EAAO,mBAAqB,KAAO,KACtD,aAAc,EAAO,cAAgB,GAAK,KAC1C,aAAc,EAAO,cAAgB,GAAK,KAAO,KACjD,iBAAkB,EAAO,kBAAoB,IAAM,KACnD,kBAAmB,EAAO,mBAAqB,IAC/C,eAAgB,EAAO,gBAAkB,GACzC,iBAAkB,EAAO,kBAAoB,GAC7C,uBAAwB,EAAO,wBAA0B,GACzD,qBAAsB,EAAO,qBAC9B,CAED,KAAK,iBAAmB,KAAK,8BAA8B,CAG7D,8BAA+C,CAO7C,OANI,KAAK,OAAO,qBACP,KAAK,IACV,KAAK,OAAO,iBACZ,KAAK,OAAO,qBAAqB,iBAClC,CAEI,KAAK,OAAO,iBAGrB,0BAAkC,EAAsB,CAOtD,OANI,KAAK,OAAO,qBACP,KAAK,IACV,KAAK,OAAO,qBAAqB,aACjC,KAAK,IAAI,KAAK,OAAO,qBAAqB,aAAc,EAAK,CAC9D,CAEI,EAGT,iBAAiB,EAA4C,CAC3D,GAAI,CAAC,KAAK,OAAO,QACf,MAAO,CACL,KAAM,KAAK,OAAO,kBAClB,SAAU,QACV,OAAQ,0BACR,iBAAkB,CAAE,KAAM,UAAW,WAAY,EAAG,CACrD,CAGH,IAAM,EAAmB,KAAK,eAAe,qBAAqB,CAE9D,EAAU,KAAK,iBACf,EAAW,WACX,EAAS,GAGb,GAAI,EAAiB,OAAS,UAC5B,EAAU,KAAK,OAAO,iBACtB,EAAW,UACX,EAAS,gCACJ,CACL,IAAM,EAAmB,KAAK,eAAe,EAAiB,CAC9D,EAAU,KAAK,0BACb,EACA,EACD,CACD,EAAW,EAAiB,KAC5B,EAAS,sBAAsB,EAAiB,KAAK,gBAAgB,KAAK,MAAM,EAAiB,WAAa,IAAI,CAAC,IA0BrH,OAtBI,GAAkB,EAAiB,IACrC,EAAU,EACV,GAAU,iCAAiC,EAAe,IAI5D,EAAU,KAAK,0BAA0B,EAAQ,CAGjD,EAAU,KAAK,IACb,KAAK,OAAO,aACZ,KAAK,IAAI,KAAK,OAAO,aAAc,EAAQ,CAC5C,CAED,KAAK,iBAAmB,EACxB,KAAK,aAAe,CAClB,KAAM,EACN,WACA,SACA,mBACD,CAEM,KAAK,aAGd,kBAAkB,EAAc,EAAkB,EAAwB,CAExE,KAAK,eAAe,aAAa,EAAM,EAAU,EAAQ,CAGrD,GACF,KAAK,uBACL,KAAK,oBAAsB,IAE3B,KAAK,sBACL,KAAK,qBAAuB,GAI9B,KAAK,eAAe,EAAS,EAAU,EAAK,CAG9C,qBAA8B,CAC5B,OAAO,KAAK,iBAGd,iBAA4C,CAC1C,OAAO,KAAK,aAGd,OAAc,CACZ,KAAK,iBAAmB,KAAK,OAAO,iBACpC,KAAK,oBAAsB,EAC3B,KAAK,qBAAuB,EAC5B,KAAK,aAAe,KACpB,KAAK,kBAAoB,KAM3B,wBAAwB,EAAkC,CACxD,KAAK,kBAAoB,EAM3B,8BAKE,CACA,GAAI,CAAC,KAAK,mBAAqB,CAAC,KAAK,OAAO,uBAC1C,MAAO,CACL,YAAa,GACb,UAAW,EACX,wBAAyB,KAAK,OAAO,aACrC,mBAAoB,EACrB,CAGH,IAAM,EAAY,KAAK,kBAAkB,UACnC,EAAoB,KAAK,kBAAkB,sBAG3C,GAAsB,EAAI,GAAa,EACvC,EAA0B,KAAK,IACnC,KAAK,OAAO,aACZ,KAAK,MAAM,EAAqB,IAAM,CACvC,CAED,MAAO,CACL,YAAa,EAAY,GACzB,YACA,0BACA,qBACD,CAGH,eAAuB,EAAsD,CAC3E,IAAMC,EAAqC,CACzC,KAAM,WACN,aAAc,IAAM,KACpB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CAKK,EADJ,KAAK,OAAO,sBAAsB,eAAiB,EAAI,KAAO,KAE5D,EACA,EAEJ,GAAI,KAAK,OAAO,iBACd,OAAO,EAAiB,cAAgB,EAI1C,IAAIC,EAEJ,OAAQ,EAAiB,KAAzB,CACE,IAAK,OACH,EACE,EAAiB,WAAa,GACzB,EAAiB,YAAc,EAC/B,EAAiB,UAAY,EACpC,MACF,IAAK,OACH,EAAe,EAAiB,cAAgB,EAChD,MACF,IAAK,WACH,EAAe,EAAiB,cAAgB,EAChD,MACF,QACE,EAAe,EAAiB,UAAY,EAQhD,OAJI,KAAK,OAAO,wBAA0B,KAAK,kBACtC,KAAK,qCAAqC,EAAa,CAGzD,EAMT,qCACE,EACkB,CAClB,GAAI,CAAC,KAAK,kBAAmB,OAAO,EAEpC,IAAM,EAAW,KAAK,8BAA8B,CAC9C,EAAY,EAAS,UAuB3B,OApBI,EAAY,GACP,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,oBACvB,aAAc,KAAK,IAAI,EAAS,aAAe,GAAK,GAAK,KAAK,CAC9D,eAAgB,KAAK,IAAI,EAAS,eAAiB,IAAK,GAAI,CAC7D,CAIC,EAAY,GACP,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,kBACvB,aAAc,KAAK,IAAI,EAAS,aAAe,IAAM,GAAK,KAAK,CAC/D,eAAgB,KAAK,IAAI,EAAS,eAAiB,IAAK,GAAI,CAC7D,CAII,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,sBACvB,aAAc,KAAK,IACjB,EAAS,aAAe,IACxB,EAAS,wBACV,CACD,eAAgB,EAAS,eAAiB,GAC3C,CAGH,0BACE,EACA,EACQ,CACR,IAAI,EAAa,KAAK,iBAGhB,EAAoB,KAAK,eAAe,sBAAsB,CAEpE,GAAI,EAAoB,EAAG,CAGzB,IAAM,EACJ,EAFqB,KAAK,uBAAuB,EAAiB,CAE7B,KAAK,OAAO,kBAG7C,EAAc,EAAS,eAC7B,EACE,KAAK,kBAAoB,EAAI,GAC7B,EAAkB,EAUtB,GANA,EAAa,KAAK,IAChB,EAAS,aACT,KAAK,IAAI,EAAS,aAAc,EAAW,CAC5C,CAGG,KAAK,oBAAsB,EAAG,CAEhC,IAAM,EAAkB,KAAK,IAAI,GAAK,KAAK,oBAAsB,GAAI,CACrE,GAAc,EAAI,UACT,KAAK,qBAAuB,EAAG,CAExC,IAAM,EAAiB,KAAK,IAAI,GAAK,KAAK,qBAAuB,IAAK,CACtE,GAAc,EAAI,EAGpB,OAAO,KAAK,MAAM,EAAW,CAG/B,uBAA+B,EAA4C,CACzE,OAAQ,EAAiB,KAAzB,CACE,IAAK,OACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAO,IAIb,eACE,EACA,EACA,EACM,CACN,GAAI,CAAC,EAAS,CAEZ,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,GACzB,CACD,OAIF,IAAM,EAAa,GAAQ,EAAW,KAChC,EAAU,KAAK,eAAe,mBAAmB,CAEvD,GAAI,EAAQ,aAAe,EAAG,CAC5B,IAAM,EAAmB,EAAa,EAAQ,aAE1C,EAAmB,KAAK,OAAO,kBAAoB,GAErD,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,IACzB,CACQ,EAAmB,KAAK,OAAO,kBAAoB,MAE5D,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,IACzB,KC9ZT,MAAM,EAAc,kBAuBpB,SAAgB,EAAgB,EAA0B,CAYxD,OAVI,EAAW,EAAK,CACX,OAIL,OAAO,GAAS,UAAY,EAAY,KAAK,EAAK,CAC7C,MAIF,OAqCT,SAAgB,EAAW,EAAiC,CAC1D,GAAI,OAAO,GAAS,WAAY,EAC9B,MAAO,GAIT,GAAI,OAAO,WAAe,MAEpB,WAAW,MAAQ,aAAgB,WAAW,MAI9C,WAAW,MAAQ,aAAgB,WAAW,MAChD,MAAO,GAKX,IAAM,EAAM,EACZ,OACG,SAAU,GAAO,SAAU,GAAO,SAAU,KAC5C,OAAO,EAAI,MAAS,UAAmB,EAAI,OAAS,QC5CzD,IAAa,EAAb,cAAqC,KAAM,CA8BzC,YAAY,CACV,OACA,UACA,QACA,UAMC,CACD,OAAO,CACP,KAAK,KAAO,EACZ,KAAK,MAAQ,EACb,KAAK,QAAU,EACf,KAAK,OAAS,EAmBhB,gBAA0B,CACxB,OACE,KAAK,OAAS,iBACd,KAAK,OAAS,gCC9IpB,SAAgB,EACd,EACQ,CACR,OAAO,OAAO,QAAQ,EAAS,CAC5B,KAAK,CAAC,EAAK,KAAW,GAAG,EAAI,GAAG,EAAO,OAAO,OAAO,EAAM,CAAC,GAAG,CAC/D,KAAK,IAAI,CAOd,SAAgB,EACd,EACA,EACS,CACT,OAAO,GAAU,GAAY,EAAS,EAAW,IAWnD,SAAgB,EACd,EACA,CAAE,uBAAsB,cACT,CAIf,GAAI,EACF,OAAO,KAGT,GAAI,GAAc,KAChB,OAAO,EAGT,IAAM,EAAO,EACb,GAAI,GAAQ,KACV,MAAM,IAAI,EAAgB,CACxB,KAAM,4BACN,QACE,mJACH,CAAC,CAGJ,OAAO,EAMT,SAAgB,EACd,EACA,EACA,EACgE,CAChE,GAAI,GAAmB,EACrB,MAAO,CAAC,CAAE,UAAW,EAAG,QAAS,EAAU,aAAc,EAAG,CAAC,CAI/D,IAAMC,EAIA,EAAE,CAER,GAAI,EAAmB,CAErB,IAAI,EAAc,EACd,EAAe,EAEnB,KAAO,EAAc,GAAU,CAC7B,IAAM,EAAU,KAAK,IAAI,EAAc,EAAmB,EAAS,CACnE,EAAS,KAAK,CACZ,UAAW,EACX,UACA,eACD,CAAC,CACF,EAAc,EACd,SAEG,CAEL,IAAM,EAAc,KAAK,KAAK,EAAW,EAAgB,CAEzD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAiB,IAAK,CACxC,IAAM,EAAY,EAAI,EAChB,EAAU,KAAK,IAAI,EAAY,EAAa,EAAS,CAEvD,EAAY,GACd,EAAS,KAAK,CACZ,YACA,UACA,aAAc,EACf,CAAC,EAKR,OAAO,ECvFT,eAAsB,EAAY,CAChC,WACA,SACA,SACA,uBACA,kBACA,aACA,eACA,gBACA,UAW0B,CAC1B,IAAM,EAAQ,GAAU,EAClB,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAQ,IAAA,GACrD,EAAoB,EAAa,iBAAiB,EAAe,CAEnE,EAAM,EADe,EAAkB,KAOzC,EAAO,OACN,IAAQ,KAA4B,EAAM,EAAO,OAClD,CAAC,IAED,EAAM,EAAO,MAGf,GAAM,CAAE,QAAO,OAAM,QAAS,MAAM,EAAO,MAAM,EAAO,EAAI,CACtD,EAAc,GAAQ,EACtB,EAAiB,KAAK,KAAK,CAK7B,GAAwB,IAC1B,EAAO,KAAO,EAAS,GAQzB,IAAM,EAAU,EAAS,EACzB,GAAI,CAAC,GAAwB,GAAQ,IAAY,EAAO,KACtD,MAAM,IAAI,EAAgB,CACxB,KAAM,oBACN,QAAS,wCAAwC,EAAK,uCAAuC,EAAQ,QACtG,CAAC,CAGJ,IAAM,EAAS,MAAM,EAAc,YAAY,EAAU,EAAO,CAC9D,YAAa,EAAO,IAAU,CAC5B,IAAa,EAAU,EAAO,EAAM,EAEtC,kBACD,CAAC,CAGI,EAAgB,KAAK,KAAK,CAAG,EAC7B,EAAU,EAAO,QAAU,KAAO,EAAO,OAAS,IAQxD,OANA,EAAa,kBAAkB,EAAa,EAAe,EAAQ,CAEnE,EAAO,IACL,gBAAgB,EAAU,YAAc,SAAS,IAAI,EAAY,YAAY,EAAc,MAAM,EAAkB,SAAS,YAC7H,CAEM,EAUT,SAAgB,EACd,EACA,EACA,EACA,EACA,EACS,CAaT,OAXE,GAAe,MACf,GAAgB,EAAY,QAC5B,CAAC,EAAI,gBAAgB,CAEd,GAGL,EACK,EAAc,EAAK,EAAa,CAGlC,EAAqB,EAAiB,EAAI,CAQnD,SAAgB,EACd,EACA,EACS,CACT,IAAM,EAAS,EAAI,QAAU,EAC7B,OACG,CAAC,EAAiB,EAAQ,IAAI,EAAI,IAAW,KAAO,IAAW,MAChE,EAAgB,UAAU,CClI9B,eAAsB,EAAgB,CACpC,SACA,aACA,gBACA,SACA,kBACA,gBACA,iBACA,GAAG,GAWH,CACA,GAAM,CAAE,SAAQ,aAAc,EAGxB,CAAE,QAAS,MAAM,EAAc,QAAQ,EAAO,CAG9C,EAAY,EAAK,MAAM,KAAM,GAAS,EAAK,OAAS,QAAQ,CAElE,GAAI,CAAC,EAAW,CACd,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,oBACN,QAAS,QAAQ,EAAO,gHACzB,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,IAAM,EAAc,EAAU,GAGxB,EAAW,CACf,aAAc,EAAO,MAAQ,UAC7B,SAAU,EAAO,MAAQ,2BACzB,KAAM,EAAO,MAAQ,EACrB,GAAG,EAAW,SACf,CAED,EAAO,IAAI,iCAAiC,EAAO,SAAS,IAAc,CAE1E,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAAQ,EAAQ,EAAW,EACpE,GAAc,CACb,UAAW,OACX,YACA,WACD,CACF,CAAC,CAEI,EAAQ,EAAI,GAElB,GAAI,CAAC,EAAiB,EAAQ,IAAI,EAAI,CAAC,EAAO,CAC5C,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,mBACN,QAAS,mCACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,EAAU,aAAa,EAAM,CAE7B,EAAO,IAAI,YAAY,EAAM,6BAA6B,CAI1D,EAAc,EAAM,CAEpB,EAAO,IAAI,kCAAkC,CAI7C,IAEI,EAAW,EACX,EAAY,MAAM,EAAc,aAAa,EAAM,CAEvD,KAAO,EAAU,SAAW,UAAY,EAAW,IACjD,MAAM,IAAI,QAAe,GACvB,EAAgB,WAAW,EAAS,IAAa,CAClD,CACD,EAAY,MAAM,EAAc,aAAa,EAAM,CACnD,IAGF,GAAI,EAAU,SAAW,SAAU,CACjC,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,eACN,QAAS,0CAA0C,EAAU,OAAO,GACrE,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAOR,IAAM,EAHqB,EAAU,MAAM,KACxC,GAAS,EAAK,SAAW,EAC3B,EACsC,OAEvC,GAAI,CAAC,GAAY,GAAI,CACnB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,oBACN,QAAS,2CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAUR,OAPA,EAAO,IAAI,uBAAuB,EAAW,KAAK,CAElD,EAAU,UAAU,CAClB,SAAU,EAAW,GACrB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CAAE,QAAO,aAAY,cAAa,CAO3C,eAAsB,EAAkB,CACtC,QACA,aACA,cACA,SACA,SACA,eAAe,EACf,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,GAAG,GAiByB,CAC5B,IAAI,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CAEF,IAAM,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAS,IAAA,GAEtD,EADoB,EAAa,iBAAiB,EAAe,CACnC,KAC9B,EAAU,KAAK,IAAI,EAAS,EAAW,EAAO,MAAQ,EAAE,CACxD,EAAc,MAAM,EAAO,MAAM,EAAQ,EAAQ,CAEvD,GAAI,CAAC,GAAe,CAAC,EAAY,MAC/B,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,iCACV,CAAC,CAGJ,IAAM,EAAY,EAAY,MAGxB,EAAY,KAAK,KAAK,CAEtB,EAAM,MAAM,EAAc,YAAY,EAAW,GAAI,EAAW,CACpE,kBACD,CAAC,CAEI,EAAW,KAAK,KAAK,CAAG,EAE9B,GAAI,CAAC,EAAI,OACP,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,4CACV,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,EAAU,aAAa,EAAW,GAAI,EAAe,EAAO,MAAQ,EAAE,CACtE,EAAU,kBACR,EAAgB,EAChB,EACA,EAAO,MAAQ,EAChB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,EAAS,EAAU,CAEjD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,WACA,MAAO,GAAa,EAAW,KAC/B,QAAS,GACT,WAAY,EACZ,iBACE,EAAa,iBAAiB,EAAE,kBAAkB,KACpD,iBAAkB,EAAa,iBAAiB,EAAE,SACnD,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAIzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHI,GAAQ,EAAO,OAAO,CAGtB,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,0BAA0B,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MAC7J,CAKL,EAAO,IAAI,kCAAkC,IAAQ,CAErD,GAAI,CACF,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,WACX,SAAU,EAAW,GACtB,CACD,CAAE,YAAa,mBAAoB,CACpC,OACM,EAAK,CAEZ,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,uBACN,QAAS,0CAA0C,IACnD,MAAO,EACR,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAER,OAIF,MAAM,EAAkB,CACtB,QACA,aACA,cACA,OAAQ,EACR,SACA,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAejB,GAbE,GAAU,MAAQ,EAAgB,IAElC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,mCACT,MAAO,EACR,CAAC,CAOF,EACA,EACA,EAAU,cACX,CACD,CACA,IAAM,EAAQ,EAAY,GAC1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAkB,CACtB,QACA,aACA,cACA,SACA,SACA,aAAc,EAAe,EAC7B,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,kBACA,GAAG,EACJ,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,kCAAkC,EAAM,aAAa,IAC9D,MAAO,EACR,CAAC,EC5RV,eAAsB,EACpB,EAC8B,CAC9B,GAAM,CACJ,SACA,QACA,SACA,YACA,WAAW,EAAE,CACb,gBACA,SACA,kBACA,aACE,EAGE,EAAgB,CACpB,aAAc,EAAO,MAAQ,UAC7B,SAAU,EAAO,MAAQ,2BACzB,KAAM,EAAO,MAAQ,EACrB,GAAG,EACJ,CAED,EAAO,IAAI,2BAA2B,EAAO,WAAW,IAAQ,CAGhE,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,OACX,YACA,SAAU,EACX,CACD,CAAE,YAAa,mBAAoB,CACpC,CAED,EAAO,IAAI,mCAAmC,IAAS,CAGvD,IAEI,EAAW,EACX,EAAY,MAAM,EAAc,aAAa,EAAM,CAEvD,KAAO,EAAW,IAAa,CAE7B,IAAM,EAAW,EAAU,MAAM,KAAM,GAAS,EAAK,SAAW,EAAO,CAGvE,GACE,GAAU,SAAW,UACrB,EAAS,QACR,EAAS,OAAsB,GAChC,CACA,IAAM,EAAa,EAAS,OAQ5B,OAPA,EAAO,IAAI,+BAA+B,EAAO,IAAI,EAAW,KAAK,CAErE,GAAW,UAAU,CACnB,SAAU,EAAW,GACrB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CAAE,aAAY,SAAQ,CAI/B,GAAI,GAAU,SAAW,SAAU,CACjC,IAAMC,EAAQ,IAAI,EAAgB,CAChC,KAAM,mBACN,QAAS,cAAc,EAAO,+BAC/B,CAAC,CAEF,MADA,GAAW,UAAUA,EAAM,CACrBA,EAGR,MAAM,IAAI,QAAe,GACvB,EAAgB,WAAW,EAAS,IAAa,CAClD,CACD,EAAY,MAAM,EAAc,aAAa,EAAM,CACnD,IAGF,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,eACN,QAAS,cAAc,EAAO,sCAC/B,CAAC,CAEF,MADA,GAAW,UAAU,EAAM,CACrB,EASR,eAAsB,EACpB,EACe,CACf,GAAM,CACJ,SACA,QACA,aACA,SACA,SAAS,EACT,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,aACE,EAEA,EAAe,EAAQ,cAAgB,EACvC,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CAEF,IAAM,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAS,IAAA,GAEtD,EADoB,EAAa,iBAAiB,EAAe,CACnC,KAC9B,EAAU,KAAK,IAAI,EAAS,EAAW,EAAO,MAAQ,EAAE,CACxD,EAAc,MAAM,EAAO,MAAM,EAAQ,EAAQ,CAEvD,GAAI,CAAC,GAAe,CAAC,EAAY,MAC/B,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,2CAA2C,IACrD,CAAC,CAGJ,IAAM,EAAY,EAAY,MAGxB,EAAY,KAAK,KAAK,CAEtB,EAAM,MAAM,EAAc,YAAY,EAAW,GAAI,EAAW,CACpE,kBACD,CAAC,CAEI,EAAW,KAAK,KAAK,CAAG,EAE9B,GAAI,CAAC,EAAI,OACP,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,sDAAsD,IAChE,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,GAAW,aAAa,EAAW,GAAI,EAAe,EAAO,MAAQ,EAAE,CACvE,GAAW,kBACT,EAAgB,EAChB,EACA,EAAO,MAAQ,EAChB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,EAAS,EAAU,CAEjD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,WACA,MAAO,GAAa,EAAW,KAC/B,QAAS,GACT,WAAY,EACZ,iBACE,EAAa,iBAAiB,EAAE,kBAAkB,KACpD,iBAAkB,EAAa,iBAAiB,EAAE,SACnD,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAIzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHA,EAAO,OAAO,CAGV,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,6BAA6B,EAAO,IAAI,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MAC3K,CAIL,OAIF,MAAM,EAAkB,CACtB,GAAG,EACH,OAAQ,EACR,aAAc,EACf,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAejB,GAd0B,EAAgB,IAGxC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,wCAAwC,IACjD,MAAO,EACR,CAAC,CAOF,EACA,EACA,GAAW,cACZ,CACD,CACA,IAAM,EAAQ,EAAY,GAC1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAkB,CACtB,GAAG,EACH,SACA,aAAc,EAAe,EAC9B,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,mCAAmC,EAAO,aAAa,IAChE,MAAO,EACR,CAAC,MAGJ,MAAM,GAWZ,eAAsB,GACpB,EACe,CACf,GAAM,CAAE,SAAQ,QAAO,WAAU,gBAAe,SAAQ,aAAc,EAEtE,EAAO,IAAI,yBAAyB,EAAO,WAAW,IAAQ,CAE9D,GAAI,CACF,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,WACX,WACD,CACD,CAAE,YAAa,mBAAoB,CACpC,CAED,EAAO,IAAI,cAAc,EAAO,yBAAyB,OAClD,EAAK,CACZ,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,uBACN,QAAS,iCAAiC,EAAO,WAAW,IAC5D,MAAO,EACR,CAAC,CAEF,MADA,GAAW,UAAU,EAAM,CACrB,GCzWV,eAAsB,GACpB,EACA,EAC2B,CAC3B,OAAO,EAAc,yBAAyB,EAAY,CAM5D,SAAgB,GAAyB,EAIvC,CACA,MAAO,CACL,SAAU,EAAe,UAAY,KACrC,mBAAoB,EAAe,mBACnC,iBAAkB,EAAe,iBAClC,CAMH,eAAsB,EAA0B,CAC9C,gBACA,cACA,OACA,WACA,mBACA,8BACA,cAS8B,CAK9B,GACE,CAAC,GACD,CAAC,GACD,GAAoB,KAEpB,OAGF,IAAMC,EAA+B,CACnC,OACA,WACA,aAAc,IAAI,MAAM,CAAC,UAAU,CACnC,iBAAkB,EACnB,CAQD,OAN4B,MAAM,EAAc,UAC9C,EACA,EACA,CAAE,aAAY,CACf,CAQH,eAAsB,EACpB,EACA,EACe,CACV,GACL,MAAM,EAAc,aAAa,EAAiB,CCnCpD,eAAsB,EAAc,CAClC,WACA,SACA,SACA,uBACA,eAAe,EACf,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,GAAG,GAgByB,CAC5B,IAAI,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CACF,IAAM,EAAM,MAAM,EAAY,CAC5B,WACA,SACA,SACA,uBACA,WAAY,EAAU,WACtB,kBACA,eACA,gBACA,SACD,CAAC,CAEF,GAAI,CAAC,EAAiB,EAAI,OAAQ,IAAI,EAAI,EAAI,QAAU,KACtD,MAAM,IAAI,EAAgB,CACxB,KAAM,8BACN,QAAS,4CACV,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,EAAU,aAAa,EAAU,EAAe,EAAI,OAAO,MAAQ,EAAE,CACrE,EAAU,kBACR,EAAgB,EAChB,EACA,EAAI,QAAQ,MAAQ,EACrB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,GAAU,EAAgB,GAAU,GAAG,CAC/D,EAAY,EAAgB,EAC5B,EAAgB,KAAK,KAAK,EAAI,KAAK,KAAK,CAAG,KAC3C,EAAe,EAAa,iBAAiB,CAEnD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,SAAU,EACV,MAAO,GAAa,EAAgB,KACpC,QAAS,GACT,WAAY,EACZ,iBAAkB,GAAc,kBAAkB,KAClD,iBAAkB,GAAc,SACjC,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAGzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHI,GAAQ,EAAO,OAAO,CAGtB,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,qBAAqB,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MACxJ,CAIL,EAAU,YAAY,EAAI,OAAO,CACjC,OAGF,MAAM,EAAc,CAClB,WACA,OAAQ,EACR,SACA,uBACA,cACA,eACA,kBACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAkBjB,GAbE,GAAU,MAAQ,EAAgB,IAElC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,gBACT,MAAO,EACR,CAAC,CAOF,EACA,EACA,EAAU,cACX,CACD,CACA,IAAM,EAAQ,EAAY,GAE1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAc,CAClB,WACA,SACA,SACA,aAAc,EAAe,EAC7B,uBACA,cACA,eACA,kBACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,8BAA8B,EAAS,aAAa,IAC7D,MAAO,EACR,CAAC,EAWV,eAAsB,EAAa,CACjC,cACA,YACA,SACA,uBACA,WACA,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,iBACA,kBAAkB,GAClB,oBAAoB,SACpB,kBACA,GAAG,GAkBmD,CACtD,GAAI,CAAC,GAAwB,EAAO,MAAQ,KAAM,CAChD,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,4BACN,QAAS,0BACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAIR,IAAIC,EACJ,GAAI,GAAmB,EAAgB,WAAW,EAAO,MAAM,CAC7D,GAAI,CACF,EAAO,IAAI,6BAA6B,CACxC,EAAW,MAAM,EAAgB,gBAC/B,IAAI,WAAW,EAAO,MAAa,CACpC,CACD,EAAO,IAAI,sBAAsB,IAAW,OACrC,EAAO,CACd,EAAO,IACL,wCAAwC,aAAiB,MAAQ,EAAM,QAAU,kBAClF,CAKL,IAAMC,EAA8B,CAClC,uBACA,YACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAW,EAAe,EAAS,CAAG,IAAA,GAChD,SAAU,EAAO,MAAQ,IAAA,GACzB,KAAM,EAAO,MAAQ,GACrB,aAAc,EAAO,cAAgB,IAAA,GACrC,WACA,kBAAmB,EAAW,EAAoB,IAAA,GACnD,CAEK,CAAE,SAAQ,UAAW,MAAM,EAAc,aAAa,EAAiB,CAE7E,GAAI,CAAC,EAAiB,EAAQ,IAAI,EAAI,GAAU,KAAM,CACpD,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,8BACN,QAAS,4CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAOR,GAJA,EAAO,IAAI,kBAAkB,EAAO,KAAK,CAEzC,EAAc,EAAO,GAAG,CAEpB,EAAO,OAAS,EAAG,CAErB,EAAU,YAAY,EAAO,CACzB,GAAQ,EAAO,OAAO,CAC1B,EAAe,EAAO,GAAG,CACzB,OAGF,IAAM,EAAqB,MAAM,EAA0B,CACzD,gBACA,cACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAO,UAAY,EAAE,CAC/B,iBAAkB,KAClB,8BACA,aACD,CAAC,CAOF,OALA,EAAU,UAAU,CAClB,SAAU,EAAO,GACjB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CACL,qBACA,SAAU,EAAO,GACjB,OAAQ,EAAO,OAChB,CAQH,eAAsB,EAAa,CACjC,WACA,YACA,qBACA,cACA,SACA,uBACA,gBACA,SACA,kBACA,kBACA,gBACA,aACA,8BACA,gBACA,GAAG,GAgBmD,CACtD,IAAM,EAAM,MAAM,EAAc,UAAU,EAAS,CAC7C,EAAS,EAAI,OAEnB,GAAI,CAAC,EAAiB,EAAQ,IAAI,CAAE,CAMlC,GAAI,IAAW,IAAK,CAClB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,gBACN,QAAS,0CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAUR,OAPI,EAAiB,EAAQ,IAAI,EAG/B,MAAM,EAAwB,EAAe,EAAmB,CAI3D,MAAM,EAAa,CACxB,kBACA,cACA,YACA,SACA,uBACA,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,mBAAsB,GACtB,GAAG,EACJ,CAAC,CAGJ,IAAM,EAAS,EAAI,OACnB,GAAI,GAAU,KAAM,CAClB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,8BACN,QAAS,4CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,SAAM,EAA0B,CAC9B,gBACA,cACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAO,UAAY,EAAE,CAC/B,iBAAkB,EAClB,8BACA,aACD,CAAC,CAIE,EAAO,SAAW,EAAO,KAM7B,OAFA,EAAc,EAAO,GAAG,CAEjB,CACL,WACA,qBACA,OAAQ,EAAO,OAChB,CAOH,eAAsB,GAAkB,CACtC,SACA,WACA,qBACA,YACA,cACA,kBACA,uBACA,gBACA,kBACA,SACA,gBACA,aACA,8BACA,gBACA,iBACA,GAAG,GAiBmD,CAyBtD,OAvBI,GAAY,MAAQ,GAAsB,MAC5C,EAAO,IAAI,qCAAqC,IAAW,CACpD,MAAM,EAAa,CACxB,WACA,qBACA,YACA,cACA,SACA,kBACA,uBACA,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,GAAG,EACJ,CAAC,GAIJ,EAAO,IAAI,wBAAwB,CAC5B,MAAM,EAAa,CACxB,cACA,YACA,SACA,uBACA,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,kBACA,gBACA,aACA,8BACA,gBACA,iBACA,GAAG,EACJ,CAAC,ECzfJ,eAAsB,GAAoB,CACxC,SACA,YACA,cACA,uBACA,kBACA,oBACA,cACA,eACA,gBACA,SACA,kBACA,gBACA,UACA,gBACA,aACA,8BACA,gBACA,iBACA,UAAA,EACA,yBACA,kBACA,GAAG,GAuBqD,CACxD,GAAI,CAAC,EAAO,MAAQ,EAAO,OAAS,EAAG,CACrC,EAAU,UACR,IAAI,EAAgB,CAClB,KAAM,4BACN,QAAS,6CACV,CAAC,CACH,CACD,OAIF,IAAM,EAAW,EACf,EAAO,KACP,EACA,EACD,CACD,EAAO,IAAI,iCAAiC,EAAS,OAAO,WAAW,CAGvE,IAAMC,EAAqC,CACzC,SAAU,EAAE,CACZ,cAAe,EACf,UAAW,GACX,OAAQ,GACT,CAGK,EAAkB,IAAI,IACtB,EAAgB,IAAI,IAEpB,MAA4B,CAChC,IAAM,EAAa,MAAM,KAAK,EAAc,QAAQ,CAAC,CAAC,QACnD,EAAK,IAAS,EAAM,EACrB,EACD,CACK,EAAgB,MAAM,KAAK,EAAgB,QAAQ,CAAC,CAAC,QACxD,EAAK,IAAa,EAAM,EACzB,EACD,CACD,EAAc,cACZ,EAAa,EAAI,EAAgB,EAAa,EAG5C,EAAU,YAAc,EAAa,GACvC,EAAU,WAAW,kBAAmB,EAAe,EAAW,EAItE,GAAI,CAEF,IAAM,EAAiB,MAAM,QAAQ,IACnC,EAAS,IAAI,KAAO,IAAY,CAE9B,IAAMC,EAA4B,CAChC,GAAG,EACH,KAAM,EAAQ,QAAU,EAAQ,UAChC,MAAM,MAAM,EAAO,EAAK,CAEtB,IAAM,EAAc,EAAQ,WAAa,GAAS,GAC5C,EAAY,KAAK,IACrB,EAAQ,WAAa,GAAO,EAAQ,QAAU,EAAQ,WACtD,EAAQ,QACT,CACD,OAAO,MAAM,EAAO,MAAM,EAAa,EAAU,EAEpD,CAEK,EAAe,MAAM,EAAa,CACtC,YAAa,GAAG,EAAY,WAAW,EAAQ,eAC/C,YACA,OAAQ,EACR,uBACA,kBACA,SAAU,CACR,eAAgB,OAChB,aAAc,EAAQ,aAAa,UAAU,CAC7C,cAAe,EAAS,OAAO,UAAU,CACzC,kBAAmB,EACpB,CACD,kBACA,gBACA,SACA,gBACA,aACA,8BACA,gBACA,iBACA,cAAiB,GACjB,QAAU,GACR,EAAO,IACL,WAAW,EAAQ,aAAa,mBAAmB,IACpD,CACH,QAAU,GAAS,CACjB,EAAc,IAAI,EAAQ,aAAc,EAAK,MAAQ,EAAE,CACvD,GAAqB,EAExB,CAAC,CAEF,GAAI,CAAC,EACH,MAAM,IAAI,EAAgB,CACxB,KAAM,mCACN,QAAS,mCAAmC,EAAQ,eACrD,CAAC,CAcJ,MAAO,CACL,QAZ6C,CAC7C,SAAU,EAAa,SACvB,mBAAoB,EAAa,mBACjC,aAAc,EAAQ,aACtB,UAAW,EAAQ,UACnB,QAAS,EAAQ,QACjB,OAAQ,EAAa,OACrB,gBAAiB,EAAuB,QAAQ,CAChD,aAAc,KACf,CAIC,OAAQ,EACT,EACD,CACH,CAGD,EAAc,SAAW,EAAe,IAAK,GAAW,EAAO,QAAQ,CAGvE,EAAU,UAAU,CAClB,SAAU,YAAY,EAAc,SAAS,IAAK,GAAM,EAAE,SAAS,CAAC,KAAK,IAAI,GAC7E,KAAM,EAAO,KACd,CAAC,CAGF,IAAM,EAAiB,EAAe,IACpC,MAAO,CAAE,UAAS,OAAQ,KAAoB,CAC5C,GAAI,CACF,MAAM,EAAc,CAClB,SAAU,EAAQ,SAClB,OAAQ,EAAQ,OAChB,OAAQ,EACR,uBACA,gBAAiB,EAAQ,gBACzB,cACA,eACA,gBACA,kBACA,SACA,gBACA,UACA,YAAa,EAAG,EAAO,IAAU,CAC/B,EAAgB,IAAI,EAAQ,aAAc,EAAM,CAC5C,GAAO,EAAc,IAAI,EAAQ,aAAc,EAAM,CACzD,GAAqB,EAEvB,iBAAkB,EAAW,EAAe,IAAe,CACrD,EAAU,iBACZ,EAAU,gBAAgB,EAAW,EAAe,EAAW,EAGnE,UAAY,GAAgB,CAC1B,EAAO,IACL,WAAW,EAAQ,aAAa,yBACjC,CAED,EAAgB,IACd,EAAQ,aACR,EAAc,IAAI,EAAQ,aAAa,EAAI,EAC5C,CACD,GAAqB,EAEvB,eAAgB,EAAO,KACrB,EAAO,IACL,WAAW,EAAQ,aAAa,iBAAiB,EAAa,IAAI,IACnE,CACM,GAAgB,GAAa,QAAU,IAEhD,QAAU,GAAY,CACpB,EAAQ,aAAe,GAEzB,QAAU,GAAU,CAElB,MADA,EAAO,IAAI,WAAW,EAAQ,aAAa,WAAW,IAAQ,CACxD,GAET,CAAC,OACK,EAAO,CAEd,MADA,EAAO,IAAI,WAAW,EAAQ,aAAa,kBAAkB,IAAQ,CAC/D,IAAI,EAAgB,CACxB,KAAM,iCACN,QAAS,WAAW,EAAQ,aAAa,gBACzC,MAAO,EACR,CAAC,GAGP,CAUD,GAPA,MAAM,QAAQ,IAAI,EAAe,CAGjC,EAAc,UAAY,GAC1B,EAAO,IAAI,sDAAsD,CAG7D,EAAU,UAAW,CACvB,IAAMC,EAA+B,CACnC,GAAI,YAAY,EAAc,SAAS,IAAK,GAAM,EAAE,SAAS,CAAC,KAAK,IAAI,GACvE,OAAQ,EAAO,KACf,KAAM,EAAO,KACb,QAAS,CACP,GAAI,EACJ,KAAM,kBACP,CACD,SAAU,CACR,eAAgB,OAChB,cAAe,EAAS,OAAO,UAAU,CACzC,cACD,CACF,CACD,EAAU,UAAU,EAAiB,CAIvC,IAAK,IAAM,KAAU,EACnB,EAAO,OAAO,SAAS,CAGzB,MAAO,CACL,gBACA,MAAO,SAAY,CACjB,MAAM,EACJ,EACA,EACAC,EACA,EACA,EACD,EAEJ,OACM,EAAO,CAcd,KAbA,GAAc,OAAS,GACvB,EAAc,MAAQ,EAGtB,MAAM,EACJ,EACA,EACAA,EACA,EACA,EACD,CAED,EAAU,UAAU,EAAe,CAC7B,GAOV,eAAsB,EACpB,EACA,EACA,EACA,EACA,EACe,CACf,EAAO,IAAI,8BAA8B,CAGzC,IAAK,IAAM,KAAW,EAAM,SAAU,CACpC,EAAQ,gBAAgB,OAAO,CAE/B,AAEE,EAAQ,gBADR,EAAgB,aAAa,EAAQ,aAAa,CAC3B,MAIzB,GAAI,CACF,MAAMA,EAAU,EAAQ,SAAS,OAC1B,EAAO,CACd,EAAO,IACL,+BAA+B,EAAQ,aAAa,IAAI,IACzD,CAIH,EAAe,EAAQ,SAAS,CAGlC,EAAM,UAAY,GAClB,EAAM,OAAS,GACf,EAAO,IAAI,0BAA0B,CCxUvC,eAAsB,EACpB,EACA,EACe,CACf,OAAO,IAAI,QAAe,GACxB,EAAgB,WAAW,EAAS,EAAG,CACxC,CC7CH,eAAsB,EACpB,EACA,EACA,EACA,EACA,EAAe,EACA,CACf,GAAI,CAGF,IAFY,MAAM,EAAc,aAAa,EAAS,EAE9C,SAAW,IACjB,OAGF,MAAM,IAAI,EAAgB,CACxB,KAAM,8BACN,QAAS,+CACV,CAAC,OACK,EAAK,CAGZ,GAAI,CAAC,EAAY,EAFH,EAE2B,EAAc,EAAY,CACjE,MAAM,EAUR,OAFA,MAAM,EAAK,EAFG,IAAc,IAAiB,EAEX,CAE3B,MAAM,EACX,EACA,EACA,EACA,EACA,EAAe,EAChB,EAWL,eAAsB,EAAM,CAC1B,WACA,qBACA,eACA,kBACA,kBACA,gBACA,kBACA,cACA,iBAWgB,CAEhB,KAAgB,OAAO,CAGnB,GAAgB,MAClB,EAAgB,aAAa,EAAa,CAGxC,GAAC,GAAmB,GAAY,QAIpC,MAAM,EAAU,EAAU,EAAe,EAAiB,EAAY,CAElE,GAAsB,MACxB,OAAO,EAAwB,EAAe,EAAmB,CCzFrE,IAAa,EAAb,KAA2B,CAMzB,YAAY,EAA8B,EAAE,CAAE,mBAJP,EAAE,qBACe,EAAE,uBAC/B,EAGzB,KAAK,OAAS,CACZ,gBAAiB,EAAO,iBAAmB,IAC3C,sBAAuB,EAAO,uBAAyB,GACvD,sBAAuB,CACrB,UAAW,IAAM,KACjB,UAAW,EAAI,KAAO,KACtB,cAAe,GACf,GAAG,EAAO,sBACX,CACF,CAGH,aACE,EACA,EACA,EACM,CACN,KAAK,iBAAmB,KAAK,KAAK,CAClC,KAAK,eAAiB,CACpB,WACA,YACA,gBAAiB,EACjB,YAAa,KAAK,KAAK,GAAa,KAAO,MAAM,CACjD,cAAe,EACf,aAAc,EACd,0BACA,UAAW,KAAK,iBACjB,CACD,KAAK,aAAe,EAAE,CAGxB,YAAY,EAAgD,CAC1D,IAAMC,EAA6B,CACjC,GAAG,EACH,UAAW,KAAK,KAAK,CACtB,CAED,KAAK,aAAa,KAAK,EAAa,CAGhC,KAAK,aAAa,OAAS,KAAK,OAAO,kBACzC,KAAK,aAAe,KAAK,aAAa,MAAM,CAAC,KAAK,OAAO,gBAAgB,EAIvE,KAAK,gBAAkB,EAAa,UACtC,KAAK,eAAe,iBACjB,KAAK,eAAe,iBAAmB,GAAK,EAC/C,KAAK,eAAe,eACjB,KAAK,eAAe,eAAiB,GAAK,EAAa,SAC1D,KAAK,eAAe,cACjB,KAAK,eAAe,cAAgB,GAAK,EAAa,YAI7D,YAA0C,CACxC,GAAI,CAAC,KAAK,eAAe,SACvB,OAAO,KAGT,IAAM,EAAU,KAAK,KAAK,CACpB,EAAgB,EAAU,KAAK,iBAC/B,EAAmB,KAAK,aAAa,OAAQ,GAAU,EAAM,QAAQ,CAE3E,GAAI,EAAiB,SAAW,EAC9B,OAAO,KAGT,IAAM,EAAS,EAAiB,IAAK,GAAU,EAAM,MAAM,CACrD,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACnD,EAAY,KAAK,IAAI,GAAG,EAAO,CAC/B,EAAW,KAAK,IAAI,GAAG,EAAO,CAC9B,EAAc,EAAiB,OAAS,KAAK,aAAa,OAE1DC,EAAuC,CAC3C,SAAU,KAAK,eAAe,UAAY,GAC1C,UAAW,KAAK,eAAe,WAAa,EAC5C,gBACA,gBAAiB,EAAiB,OAClC,YAAa,KAAK,aAAa,OAC/B,eACA,YACA,WACA,aAAc,KAAK,eAAe,cAAgB,EAClD,cACA,wBACE,KAAK,eAAe,yBAA2B,GACjD,UAAW,KAAK,eAAe,WAAa,EAC5C,UACD,CAKD,MAFA,MAAK,eAAiB,EAAE,CAEjB,EAGT,0BAA0D,CACxD,MAAO,CAAE,GAAG,KAAK,eAAgB,CAGnC,gBAAgB,EAAgC,CAC9C,IAAM,EAAU,KAAK,aAAa,OAAO,CACzC,OAAO,EAAQ,EAAQ,MAAM,CAAC,EAAM,CAAG,EAGzC,wBAA8C,CAC5C,GAAI,KAAK,aAAa,OAAS,EAC7B,MAAO,CACL,kBAAmB,EACnB,sBAAuB,EACvB,iBAAkB,EAClB,gBAAiB,CAAC,iCAAiC,CACnD,sBAAuB,CAAE,IAAK,IAAM,KAAM,IAAK,EAAI,KAAO,KAAM,CACjE,CAGH,IAAM,EAAmB,KAAK,aAAa,OAAQ,GAAU,EAAM,QAAQ,CACrE,EAAS,EAAiB,IAAK,GAAU,EAAM,MAAM,CAGrD,EACJ,EAAO,OAAS,EACZ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACvD,EACA,EAAgB,KAAK,kBAAkB,EAAO,CAE9C,EADc,KAAK,KAAK,EAAc,CACC,EAGvC,EAAc,EAAiB,OAAS,KAAK,aAAa,OAK1D,EAJa,KAAK,IACtB,EACA,EAAe,KAAK,OAAO,sBAAsB,UAClD,CACsC,GAAM,EAAc,GAGrD,EAAmB,KAAK,IAC5B,EACA,EAAI,KAAK,IAAI,EAAG,EAAuB,CACxC,CAiBD,MAAO,CACL,oBACA,sBAfA,KAAK,+BAA+B,EAAiB,CAgBrD,mBACA,gBAdsB,KAAK,wBAC3B,EACA,EACA,EACD,CAWC,sBAPA,KAAK,+BAA+B,EAAiB,CAQtD,CAGH,eAIE,CACA,MAAO,CACL,QAAS,KAAK,0BAA0B,CACxC,OAAQ,KAAK,iBAAiB,CAC9B,SAAU,KAAK,wBAAwB,CACxC,CAGH,OAAc,CACZ,KAAK,aAAe,EAAE,CACtB,KAAK,eAAiB,EAAE,CACxB,KAAK,iBAAmB,EAG1B,kBAA0B,EAA0B,CAClD,GAAI,EAAO,SAAW,EAAG,MAAO,GAEhC,IAAM,EAAO,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OAEpE,OAD2B,EAAO,IAAK,IAAW,EAAQ,IAAS,EAAE,CAEhD,QAAQ,EAAK,IAAS,EAAM,EAAM,EAAE,CAAG,EAAO,OAIrE,+BAAuC,EAAgC,CACrE,GAAI,EAAO,OAAS,EAAG,MAAO,IAI9B,IAAM,EAAa,KAAK,kBAAkB,EAAO,CAEjD,GAAI,OAAO,KAAK,EAAW,CAAC,OAAS,EAAG,MAAO,IAG/C,IAAM,EAAkB,OAAO,OAAO,EAAW,CAAC,IAAK,GAAU,CAC/D,IAAM,EAAS,EAAM,IAAK,GAAU,EAAM,MAAM,CAC1C,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACnD,EAAW,KAAK,kBAAkB,EAAO,CAC/C,OAAO,KAAK,KAAK,EAAS,CAAG,GAC7B,CAGI,EACJ,EAAgB,QAAQ,EAAK,IAAO,EAAM,EAAI,EAAE,CAAG,EAAgB,OACrE,OAAO,KAAK,IAAI,EAAG,EAAI,KAAK,IAAI,EAAG,EAAiB,CAAC,CAGvD,kBACE,EACgC,CAChC,IAAMC,EAAyC,EAAE,CAmBjD,OAjBA,EAAO,QAAS,GAAU,CAExB,IAAIC,EACJ,AAOK,EAPD,EAAM,KAAO,IAAM,KAAkB,OAChC,EAAM,KAAO,IAAM,KAAkB,QACrC,EAAM,KAAO,IAAM,KAAkB,QACrC,EAAM,KAAO,KAAO,KAAkB,QACtC,EAAM,KAAO,EAAI,KAAO,KAAkB,MAC1C,EAAM,KAAO,EAAI,KAAO,KAAkB,MAC1C,EAAM,KAAO,EAAI,KAAO,KAAkB,MAClC,OAEZ,EAAO,KAAY,EAAO,GAAa,EAAE,EAC9C,IAAM,EAAQ,EAAO,GACjB,GAAO,EAAM,KAAK,EAAM,EAC5B,CAEK,EAGT,wBACE,EACA,EACA,EACU,CACV,IAAMC,EAA4B,EAAE,CAyCpC,OAvCI,EAAe,KAAK,OAAO,sBAAsB,WACnD,EAAgB,KACd,gFACD,CAGC,EAAe,KAAK,OAAO,sBAAsB,WACnD,EAAgB,KACd,8DACD,CAGC,EAAc,IAChB,EAAgB,KACd,4EACD,CAGC,EAAyB,IAC3B,EAAgB,KACd,gFACD,CAID,EAAyB,IACzB,EAAe,KAAK,OAAO,sBAAsB,WAEjD,EAAgB,KACd,iEACD,CAGC,EAAgB,SAAW,GAC7B,EAAgB,KACd,yDACD,CAGI,EAGT,+BAAuC,EAGrC,CACA,GAAI,EAAO,OAAS,EAClB,MAAO,CAAE,IAAK,IAAM,KAAM,IAAK,EAAI,KAAO,KAAM,CAUlD,IAAM,EANgB,EAAO,OAAO,CAAC,MAAM,EAAG,IAAM,EAAE,MAAQ,EAAE,MAAM,CAClC,MAClC,EACA,KAAK,KAAK,EAAO,OAAS,GAAI,CAC/B,CAE8B,IAAK,GAAU,EAAM,KAAK,CACnD,EAAa,KAAK,IAAI,GAAG,EAAS,CAClC,EAAa,KAAK,IAAI,GAAG,EAAS,CAExC,MAAO,CACL,IAAK,KAAK,IAAI,GAAK,KAAM,EAAW,CACpC,IAAK,KAAK,IAAI,GAAK,KAAO,KAAM,EAAW,CAC5C,GC3SL,SAAgB,EACd,EAC0B,CAC1B,OAAO,IAAI,EAAyB,EAAU,iBAAiB,CAAG,GAChE,EAAU,uBAAuB,EAAS,CAC3C,CAuCH,SAAgB,GAAwB,CACtC,eACA,WACA,YACA,kBACA,uBACA,iBACA,UASqB,CACrB,GAAI,GAAgB,8BAAgC,GAAO,CAGzD,IAAM,EAAa,EADG,IAAI,EAAoB,EAAa,CACK,CAE1DC,EAA4C,CAChD,SAAU,GAAY,EACtB,kBACE,GAAgB,oBAAsB,OAClC,IAAA,GACA,GAAgB,kBACtB,mBAAoB,EACpB,kBACA,wBACE,GAAgB,wBAA0B,GAAK,KAAO,KACzD,CAEK,EAAqB,EAAW,kBAAkB,EAAmB,CAG3E,EAAO,IAAI,+BAA+B,EAAmB,WAAW,CACxE,IAAK,IAAM,KAAU,EAAmB,UACtC,EAAO,IAAI,OAAO,IAAS,CAE7B,IAAK,IAAM,KAAW,EAAmB,SACvC,EAAO,IAAI,cAAc,IAAU,CAYrC,OARA,GAAgB,qBAAqB,CACnC,OAAQ,EAAmB,SAC3B,UAAW,EAAmB,UAC9B,gBAAiB,EAAmB,gBACpC,UAAW,EAAmB,UAC9B,SAAU,EAAmB,SAC9B,CAAC,CAEK,MACF,CAEL,IAAM,EACJ,EAAkB,GAClB,GACA,GAAY,GAAgB,wBAA0B,GAAK,KAAO,OAClE,CAAC,EAEH,MAAO,CACL,SAAU,EAA0B,WAAa,SACjD,YACA,gBAAiB,EAA0B,EAAkB,EAC7D,UAAW,CACT,8BAA8B,EAA0B,WAAa,WACtE,CACD,SAAU,EAAE,CACb,EAOL,SAAgB,EACd,EACA,EAAsC,EACtC,EAKA,CACA,IAAMC,EAAmB,EAAE,CACrBC,EAAqB,EAAE,CAMvB,EAFa,EADG,IAAI,EAAoB,EAAa,CACK,CAElC,sBAAsB,CAClD,SAAU,EACV,kBACE,EAAQ,gBAAgB,oBAAsB,OAC1C,IAAA,GACA,EAAQ,gBAAgB,kBAC9B,mBAAoB,EAAQ,UAC5B,gBAAiB,EAAQ,gBAC1B,CAAC,CAyBF,GAvBK,EAAW,OACd,EAAO,KAAK,GAAG,EAAW,OAAO,CAI/B,EAAQ,iBAAmB,EAAQ,gBAAkB,GACvD,EAAO,KAAK,qCAAqC,CAG/C,EAAQ,WAAa,EAAQ,UAAY,MAC3C,EAAS,KAAK,8CAA8C,CAI5D,EAAQ,gBAAgB,oBAAsB,YAC9C,CAAC,EAAQ,iBAET,EAAS,KACP,iEACD,CAIC,EAAO,OAAS,EAAG,CACrB,EAAO,IAAI,mCAAmC,CAC9C,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,YAAY,IAAQ,CAInC,GAAI,EAAS,OAAS,EAAG,CACvB,EAAO,IAAI,qCAAqC,CAChD,IAAK,IAAM,KAAW,EACpB,EAAO,IAAI,cAAc,IAAU,CAIvC,MAAO,CACL,MAAO,EAAO,SAAW,EACzB,SACA,WACD,CAkDH,SAAgB,GACd,EACA,EACM,CACN,IAAM,EAAmB,EACvB,EACA,EACA,EACD,CAED,GAAI,CAAC,EAAiB,MAAO,CAC3B,IAAM,EAAe,kDAAkD,EAAiB,OAAO,KAAK,KAAK,GAEzG,MADA,EAAO,IAAI,EAAa,CAClB,IAAI,EAAgB,CACxB,KAAM,4BACN,QAAS,EACV,CAAC,EClQN,MAAM,GACJ,EACA,IAEK,GAGiD,CACpD,eAAgB,mBAChB,oBAAqB,mBACrB,mBAAoB,gBACpB,gBAAiB,kBACjB,qBAAsB,kBACtB,iBAAkB,kBAClB,4BAA6B,kBAC7B,6BAA8B,kBAC9B,iBAAkB,uBAClB,oBAAqB,kBACtB,CAEe,IAhBQ,EAwV1B,SAAgB,EACd,EACA,EACA,CACE,WAAY,EACZ,SACA,cACA,oBAOa,CAIf,IAAM,EAAa,EACf,IAAI,EAAe,EAAgB,EAAY,CAC/C,EAGE,EAAiB,GAAG,EAAQ,GAAG,EAAkB,aACjD,EAAe,GAAG,EAAQ,GAAG,EAAkB,WAC/C,EAAe,GAAG,EAAQ,GAAG,EAAkB,WAG/C,EAAY,EAAQ,QAAQ,OAAQ,KAAK,CACzC,EAAc,GAAG,EAAU,uBAC3B,EAAY,GAAG,EAAU,qBAMzB,EAA2B,MAC/B,EACA,IAC2B,CAI3B,GAHA,GAAQ,IAAI,4CAA4C,EAAM,GAAG,CAG7D,gBAAiB,EAAS,CAC5B,GAAQ,IAAI,2DAA2D,CAEvE,IAAM,GADU,MAAM,EAAQ,YAAY,EAAE,CAAE,EAAM,EACzB,cAC3B,GAAI,GAAY,WAAW,UAAU,CAInC,OAHA,GAAQ,IACN,sEACD,CACM,EAAW,UAAU,EAAE,CAEhC,GAAQ,IACN,kEAAkE,IACnE,CAIH,GAAI,sBAAuB,EAAS,CAClC,GAAQ,IAAI,wDAAwD,CAEpE,IAAM,GADU,MAAM,EAAQ,kBAAkB,EAAE,CAAC,EACxB,cAC3B,GAAI,EAKF,OAJA,GAAQ,IACN,qEACD,CAEM,EAAW,WAAW,UAAU,CACnC,EAAW,UAAU,EAAE,CACvB,EAEN,GAAQ,IAAI,iDAAiD,CAI/D,OADA,GAAQ,IAAI,qDAAqD,CAC1D,MAGT,MAAO,CAEL,UAAW,KAAO,IAAqB,CACrC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAW,CAErE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,mBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,UAAU,EAAS,YAErB,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAC9B,MAAO,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAM,EAG7C,aAAc,KAAO,IAAqB,CACxC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAY,CACpE,OAAQ,SACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,uBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,2BAA2B,IAE7B,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,MAAO,CAAE,OAAQ,EAAI,OAAQ,EAG/B,aAAc,KAAO,IAAoB,CACvC,GAAQ,IAAI,gBAAgB,KAAK,UAAU,EAAK,GAAG,CACnD,IAAM,EAAM,MAAM,EAAW,QAAQ,EAAgB,CACnD,OAAQ,OACR,QAAS,CACP,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,EAAK,CAC3B,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,uBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,0BAE1C,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAgB,MAAM,EAAI,MAAM,CAEtC,OADA,GAAQ,IAAI,KAAK,UAAU,EAAa,CAAC,CAClC,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAc,EAGrD,YAAa,MAAO,EAAU,EAAM,CAAE,qBAAsB,CAC1D,GAAI,CACF,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAY,CACpE,OAAQ,QACR,QAAS,CACP,eAAgB,2BACjB,CACD,KAAM,EACN,OAAQ,GAAiB,OAC1B,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EACtB,MAAM,CACN,WAAa,EAAE,EAAE,CACpB,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QACE,EAAU,OAAS,EAAU,SAAW,wBAC1C,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAgB,MAAM,EAAI,MAAM,CACtC,MAAO,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAc,OAC5C,EAAK,CAIZ,MAHI,aAAe,EACX,EAEF,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,gBACT,MAAO,EACR,CAAC,GAKN,QAAS,KAAO,IAAmB,CACjC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,IAAS,CAEjE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,iBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,QAAQ,EAAO,YAEzD,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,YAAY,IAAS,CAC1B,CAAE,OAAQ,EAAI,OAAQ,KAAM,EAAM,EAG3C,QAAS,MACP,EACA,EACA,IACG,CACH,GAAQ,IAAI,YAAY,EAAO,iBAAiB,IAAY,CAC5D,IAAM,EAAM,MAAM,EAAW,QAC3B,GAAG,EAAa,GAAG,EAAO,GAAG,IAC7B,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,CAAE,SAAQ,CAAC,CACjC,CACF,CAED,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,kBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,sBAAsB,IAExB,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,qBAAqB,KAAK,UAAU,EAAK,GAAG,CACjD,CAAE,OAAQ,EAAI,OAAQ,IAAK,EAAM,EAG1C,WAAY,MACV,EACA,EACA,EACA,IAGG,CACH,IAAM,EAAc,GAAS,aAAe,mBAExCC,EACJ,AAKE,EALE,IAAgB,2BAEX,EAGA,KAAK,UAAU,CAAE,UAAS,CAAC,CAGpC,IAAM,EAAM,MAAM,EAAW,QAC3B,GAAG,EAAa,GAAG,EAAM,UAAU,IACnC,CACE,OAAQ,QACR,QAAS,CACP,eAAgB,EACjB,CACD,OACD,CACF,CAED,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,sBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,iCAAiC,IAEnC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAIJ,OADc,MAAM,EAAI,MAAM,EAIhC,UAAW,KAAO,IAAkB,CAClC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,QAAS,CACrE,OAAQ,OACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,oBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,gCAAgC,IAElC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,gBAAgB,EAAM,YAAY,EAAK,SAAS,CACrD,GAGT,WAAY,KAAO,IAAkB,CACnC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,SAAU,CACtE,OAAQ,OACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,qBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,iCAAiC,IAEnC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,mBAAmB,EAAM,YAAY,EAAK,SAAS,CACxD,GAIT,aAAc,KAAO,IAAkB,CACrC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,SAAS,CAEvE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,gBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,OAAO,EAAM,YAEvD,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAIJ,OADc,MAAM,EAAI,MAAM,EAKhC,oBAAqB,KAAO,IAAqB,CAC/C,IAAI,EAAQ,GAAG,EAAY,GAAG,IAK9B,GAAI,EACF,GAAI,CACF,IAAM,EAAQ,MAAM,EAAyB,EAAa,EAAS,CAC/D,GACF,GAAS,UAAU,mBAAmB,EAAM,GAC5C,GAAQ,IAAI,wCAAwC,IAAW,EAG/D,GAAQ,IACN,4DAA4D,IAC7D,OAEI,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,GAAQ,IACN,kDAAkD,IACnD,CAED,GAAQ,IACN,qEAAqE,IACtE,CAIL,IAAM,EAAK,EAAiB,OAAO,EAAM,CAczC,MAZA,GAAG,WAAe,CAChB,GAAQ,IAAI,2CAA2C,IAAW,EAGpE,EAAG,YAAgB,CACjB,GAAQ,IAAI,2CAA2C,IAAW,EAGpE,EAAG,QAAW,GAAU,CACtB,GAAQ,IAAI,8BAA8B,EAAS,IAAI,IAAQ,EAG1D,GAGT,kBAAmB,KAAO,IAAkB,CAC1C,IAAI,EAAQ,GAAG,EAAU,GAAG,IAK5B,GAAI,EACF,GAAI,CACF,IAAM,EAAQ,MAAM,EAAyB,EAAa,EAAM,CAC5D,GACF,GAAS,UAAU,mBAAmB,EAAM,GAC5C,GAAQ,IAAI,0CAA0C,IAAQ,EAG9D,GAAQ,IACN,0DAA0D,IAC3D,OAEI,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,GAAQ,IACN,gDAAgD,IACjD,CAED,GAAQ,IACN,mEAAmE,IACpE,CAIL,IAAM,EAAK,EAAiB,OAAO,EAAM,CAczC,MAZA,GAAG,WAAe,CAChB,GAAQ,IAAI,6CAA6C,IAAQ,EAGnE,EAAG,YAAgB,CACjB,GAAQ,IAAI,6CAA6C,IAAQ,EAGnE,EAAG,QAAW,GAAU,CACtB,GAAQ,IAAI,gCAAgC,EAAM,IAAI,IAAQ,EAGzD,GAGT,eAAiB,GAAsB,CACrC,EAAG,OAAO,EAIZ,yBACS,EAAW,YAAY,CAGhC,iCACS,EAAW,oBAAoB,CAGxC,kBAAmB,KAAO,IACjB,EAAW,kBAAkB,EAAK,CAI3C,gBAAiB,KAAO,IAAsB,CAC5C,IAAM,EAAkB,GAAG,EAAe,0BAA0B,mBAAmB,EAAU,GAEjG,GAAI,CACF,IAAM,EAAW,MAAM,EAAW,QAAQ,EAAiB,CACzD,OAAQ,MACR,QAAS,CACP,eAAgB,mBACjB,CACF,CAAC,CAUF,OARK,EAAS,IAOD,MAAM,EAAS,MAAM,EACuB,cAPvD,GAAQ,IACN,iCAAiC,EAAS,OAAO,GAAG,EAAS,aAC9D,CACM,SAKF,EAAO,CAId,OAHA,GAAQ,IACN,wDAAwD,IACzD,CACM,IAGZ,CCl5BH,IAAa,EAAb,KAAwC,CAItC,YACE,EACA,EACA,EACA,CAHQ,KAAA,cAAA,EACA,KAAA,OAAA,EACA,KAAA,QAAA,wBANiB,IAAI,wBACN,IAAI,IAW7B,MAAM,oBAAoB,EAA0C,CAElE,KAAK,qBAAqB,EAAS,CAEnC,IAAM,EAAK,MAAM,KAAK,cAAc,oBAAoB,EAAS,CAkCjE,OAjCA,KAAK,iBAAiB,IAAI,EAAU,EAAG,CAEvC,EAAG,UAAa,GAAU,CACxB,GAAI,CACF,IAAM,EAAc,EAAuB,UACzC,KAAK,MAAM,EAAM,KAAK,CACvB,CAEG,EAAY,QACV,EAAY,KAAK,OAAS,gBAC5B,KAAK,UAAU,EAAY,KAAK,QAAQ,CAG1C,KAAK,OAAO,MACV,+BAA+B,EAAY,MAAM,UAClD,OAEI,EAAO,CACd,KAAK,OAAO,MAAM,+BAA+B,IAAQ,GAI7D,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,MAAM,8BAA8B,EAAS,IAAI,IAAQ,EAGvE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,IACV,+BAA+B,EAAS,aAAa,EAAM,KAAe,YAAY,EAAM,SAC7F,CACD,KAAK,iBAAiB,OAAO,EAAS,EAGjC,EAMT,MAAM,kBAAkB,EAAuC,CAE7D,KAAK,mBAAmB,EAAM,CAE9B,IAAM,EAAK,MAAM,KAAK,cAAc,kBAAkB,EAAM,CAgD5D,OA/CA,KAAK,eAAe,IAAI,EAAO,EAAG,CAElC,EAAG,UAAa,GAAU,CACxB,GAAI,CACF,IAAM,EAAU,KAAK,MAAM,EAAM,KAAK,CAEtC,OAAQ,EAAQ,KAAhB,CACE,IAAK,aACH,KAAK,OAAO,IAAI,qCAAqC,EAAQ,KAAK,CAClE,MACF,IAAK,aACH,KAAK,OAAO,IACV,sCAAsC,EAAQ,QAAQ,QACvD,CACD,MACF,IAAK,QACH,KAAK,OAAO,MACV,yBAAyB,EAAQ,QAAQ,WAAW,EAAM,aAAa,EAAQ,OAChF,CACD,MACF,IAAK,OACH,KAAK,OAAO,IAAI,yCAAyC,IAAQ,CACjE,MACF,IAAK,aACH,KAAK,UAAU,EAAQ,QAAQ,CAC/B,MACF,QACE,KAAK,OAAO,KACV,wCAAwC,EAAQ,OACjD,QAEE,EAAO,CACd,KAAK,OAAO,MAAM,wCAAwC,IAAQ,GAItE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,MAAM,gCAAgC,EAAM,IAAI,IAAQ,EAGtE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,IACV,iCAAiC,EAAM,aAAa,EAAM,KAAe,YAAY,EAAM,SAC5F,CACD,KAAK,eAAe,OAAO,EAAM,EAG5B,EAOT,MAAM,cAAc,EAAoC,CAMtD,OAHI,EAAG,WAAW,UAAU,EAAI,EAAG,SAAS,SAAS,CAC5C,MAAM,KAAK,oBAAoB,EAAG,CAEpC,MAAM,KAAK,kBAAkB,EAAG,CAMzC,qBAAqB,EAAwB,CAC3C,IAAM,EAAK,KAAK,iBAAiB,IAAI,EAAS,CAC1C,IACF,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,iBAAiB,OAAO,EAAS,EAO1C,mBAAmB,EAAqB,CACtC,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CACrC,IACF,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,eAAe,OAAO,EAAM,EAOrC,eAAe,EAAkB,CAE/B,KAAK,qBAAqB,EAAG,CAC7B,KAAK,mBAAmB,EAAG,CAM7B,UAAiB,CAEf,IAAK,GAAM,CAAC,EAAU,KAAO,KAAK,iBAAiB,SAAS,CAC1D,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,iBAAiB,OAAO,EAAS,CAIxC,IAAK,GAAM,CAAC,EAAO,KAAO,KAAK,eAAe,SAAS,CACrD,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,eAAe,OAAO,EAAM,CAOrC,SAAS,EAAwB,CAC/B,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CAUzC,OATI,GAAM,EAAG,aAAe,EAAG,MAC7B,EAAG,KACD,KAAK,UAAU,CACb,KAAM,OACN,UAAW,IAAI,MAAM,CAAC,aAAa,CACpC,CAAC,CACH,CACM,IAEF,GAMT,mBAAmB,EAA6C,CAC9D,OAAO,KAAK,iBAAiB,IAAI,EAAS,CAM5C,iBAAiB,EAA0C,CACzD,OAAO,KAAK,eAAe,IAAI,EAAM,CAMvC,kBAAkB,EAA2B,CAC3C,IAAM,EAAK,KAAK,iBAAiB,IAAI,EAAS,CAC9C,OAAO,GAAI,aAAe,GAAI,KAMhC,gBAAgB,EAAwB,CACtC,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CACzC,OAAO,GAAI,aAAe,GAAI,KAMhC,YAAY,EAAqB,CAC/B,OAAO,KAAK,kBAAkB,EAAG,EAAI,KAAK,gBAAgB,EAAG,CAM/D,oBAA6B,CAC3B,OAAO,KAAK,iBAAiB,KAAO,KAAK,eAAe,KAM1D,0BAIE,CACA,MAAO,CACL,OAAQ,KAAK,iBAAiB,KAC9B,KAAM,KAAK,eAAe,KAC1B,MAAO,KAAK,iBAAiB,KAAO,KAAK,eAAe,KACzD,GCpDL,MAAaI,EAAuD,CAElE,sBAAuB,EAEvB,kBAAmB,IAEnB,iBAAkB,IAElB,YAAa,GAEb,uBAAwB,GACzB,CAuHD,SAAgB,EAAoC,CAClD,QAAS,EACT,qBAAqB,aACrB,YACA,cAAc,CAAC,IAAM,IAAM,IAAK,CAChC,YACA,kBAAkB,EAClB,oBACA,iBACA,gBACA,oBACA,gBACA,kBACA,UACA,aACA,aACA,SAAS,EAAa,GAAK,CAC3B,aACA,qBACA,gBACA,8BAA8B,GAC9B,mBACA,yBACA,kBACA,QACuC,CACvC,IAAM,EAAU,EAAS,QAAQ,MAAO,GAAG,CAGrCC,EAA2B,EAC7B,EAAK,OAAS,SACZ,IAAI,EAAkB,EAAM,EAAiB,EAAO,CACpD,IAAI,EAA2B,EAAM,EAAW,CAClD,IAAI,EAGJ,GACF,EAAO,IACL,6BAA6B,EAAK,KAAK,OAAO,EAAK,OAAS,mBAAqB,aAAa,EAAK,cAAc,GAAK,KACvH,CAIH,IAAM,EAAgB,EAAoB,EAAS,EAAoB,CACrE,SACA,aACA,cACA,mBACD,CAAC,CAGI,EAAiB,IAAI,EAAe,EAAkB,CACtD,EAAU,IAAI,EAAc,EAAc,CAG5CC,EAAmD,KAEjD,EAAkB,SAClB,IAGJ,EAAqB,MAAM,EAAc,gBAAgB,EAAU,CAC5D,GAILC,EACE,EAAyB,SAAY,CACzC,GAAI,EAAc,OAAO,EAEzB,IAAM,EAAe,MAAM,GAAiB,CAEtC,EACJ,EAAa,cACb,EAAa,cACb,EAAa,iBACT,CACE,aAAc,EAAa,aAC3B,aAAc,EAAa,aAC3B,iBAAkB,EAAa,iBAC/B,sBAAuB,EAAa,sBACrC,CACD,IAAA,GAaN,MAXA,GAAe,IAAI,EAAa,EAAgB,CAC9C,QAAS,GACT,GAAG,EACH,kBAAmB,EACnB,uBACD,CAAC,CAEF,EAAO,IACL,yDAAyD,KAAK,UAAU,EAAqB,GAC9F,CAEM,GAIH,EAAY,IAAI,EACpB,EACA,EACA,EACD,CAuiBD,OAlWA,GACE,CACE,UACA,YACA,YACA,kBACA,oBACA,iBACD,CACD,EACD,CAwVM,CAEL,OApiBa,MACb,EACA,CACE,uBAAuB,GACvB,aACA,aACA,kBACA,YACA,gBACA,WAC2B,EAAE,GACI,CACnC,IAAIC,EAA0B,KAC1BC,EAAoC,KAElC,EAAc,MAAM,EAAmB,mBAC3C,EACA,GAAG,EAAQ,GAAG,EAAmB,aAClC,CAGD,GADA,EAAO,IAAI,gBAAgB,IAAc,CACrC,CAAC,EACH,MAAU,MAAM,mDAAmD,CAGrE,IAAM,EAAkB,MAAM,GAC5B,EACA,EACD,CACD,GAAI,EAAgB,OAAS,GAAK,EAAgB,IAAM,KAAM,CAC5D,IAAM,EAAiB,GAAyB,EAAgB,GAAG,CACnE,EAAqB,EAAe,iBACpC,EAAW,EAAe,SAG5B,IAAM,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAEnD,EAAO,EAAkB,EAAO,KAAM,CAC1C,uBACA,aACD,CAAC,CACF,EAAO,KAAO,EAEd,IAAM,EAA0B,MAAM,GAAwB,CAE/B,GAAe,UAAY,IAExD,EAAQ,aAAa,EAAa,GAAQ,EAAG,GAAK,CAKpD,IAAM,EAAqB,GAAwB,CACjD,aAHmB,MAAM,GAAiB,CAI1C,SAAU,EACV,YACA,kBACA,uBACA,eAAgB,EAChB,SACD,CAAC,CAEF,GAAI,EAAmB,WAAa,WAAY,CAC9C,EAAO,IACL,8BAA8B,EAAmB,gBAAgB,UAClE,CAED,IAAM,EAAiB,MAAM,GAAoB,CAC/C,kBACA,SACA,YACA,cACA,uBACA,gBAAiB,EAAmB,gBACpC,oBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,gBACA,aACA,8BACA,yBACA,kBACA,cAAgB,IACd,EAAU,oBAAoB,EAAG,CAG1B,MAET,eAAiB,GAAO,EAAU,qBAAqB,EAAG,CAC1D,UAAY,GACV,EAAU,EAAI,EAAe,EAAiB,EAAY,CAC5D,aACA,kBACA,YACA,UACD,CAAC,CAEF,GAAI,EACF,MAAO,CACL,MAAO,SAAY,CACjB,MAAM,EAAe,OAAO,EAE/B,CAGH,EAAO,IAAI,wDAAwD,CAIrE,IAAM,EAAS,MAAM,GAAkB,CACrC,SACA,YACA,WACA,kBACA,qBACA,kBACA,cACA,uBACA,gBACA,SACA,gBACA,aACA,8BACA,cAAgB,IACd,EAAU,oBAAoB,EAAG,CAG1B,MAET,eAAiB,GAAO,EAAU,qBAAqB,EAAG,CAC1D,aACA,kBACA,YACA,UACD,CAAC,CAEF,GAAI,EAAQ,CACV,IAAM,EAAkB,EAAuB,QAAQ,CACjD,CAAE,SAAA,EAAU,mBAAA,EAAoB,UAAW,EAE7CC,EAA4B,KAyBhC,OAvBA,EAAc,CACZ,kBACA,SAAA,EACA,SACA,SACA,uBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,kBACA,aACA,kBACA,YACA,gBACA,QAAU,GAAY,CACpB,EAAY,GAEd,UACD,CAAC,CAEK,CACL,UAAa,CACX,EAAM,CACJ,kBACA,SAAA,EACA,mBAAA,EACA,aAAc,EACd,gBAAiB,GACjB,kBACA,gBACA,cACA,gBACD,CAAC,EAEL,CAGH,MAAO,CACL,UAAa,GACd,EAyWD,eAtVqB,MACrB,EACA,EACA,CACE,aACA,kBACA,YACA,gBACA,aACA,WAIE,EAAE,GAKF,CACJ,IAAM,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAEnD,EAA0B,MAAM,GAAwB,CAG9D,GAD+B,GAAe,UAAY,GAC9B,CAC1B,IAAM,EAAc,MAAM,EAAmB,mBAC3C,EACA,GAAG,EAAQ,GAAG,EAAmB,WAClC,CACD,EAAQ,aAAa,GAAe,UAAW,EAAO,MAAQ,EAAG,GAAK,CAGxE,IAAM,EAAS,MAAM,EAAgB,CACnC,SACA,aACA,gBACA,SACA,kBACA,cAAgB,GAAO,EAAU,kBAAkB,EAAG,CACtD,eAAiB,GAAO,EAAU,eAAe,EAAG,CACpD,aACA,kBACA,YACA,aACA,UACD,CAAC,CAEF,GAAI,CAAC,EACH,MAAO,CACL,MAAO,SAAY,GACnB,MAAO,SAAY,GACnB,MAAO,GACR,CAGH,GAAM,CAAE,QAAO,aAAY,eAAgB,EACrC,EAAkB,EAAuB,QAAQ,CAGvD,MAAM,EAAU,oBAAoB,EAAW,GAAG,CAElD,IAAIA,EAA4B,KA0BhC,OAxBA,EAAkB,CAChB,QACA,aACA,cACA,OAAQ,EAAW,OACnB,SACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,kBACA,kBACA,aACA,kBACA,YACA,gBACA,QAAU,GAAY,CACpB,EAAY,GAEd,UACD,CAAC,CAEK,CACL,MAAO,SAAY,CAEjB,GAAI,CACF,MAAM,EAAc,WAAW,EAAM,CACrC,EAAO,IAAI,6BAA6B,IAAQ,OACzC,EAAK,CAEZ,EAAO,IAAI,oCAAoC,IAAM,CAIvD,EAAgB,OAAO,CACnB,GACF,EAAgB,aAAa,EAAU,CAGzC,EAAU,eAAe,EAAM,CAC/B,EAAU,qBAAqB,EAAW,GAAG,EAE/C,MAAO,SAAY,CACjB,MAAM,EAAc,UAAU,EAAM,EAEtC,QACD,EAwOD,qBA5N2B,MAC3B,EACA,EACA,CACE,aACA,kBACA,gBACA,aACA,UACA,kBACA,kBACA,gBAaE,EAAE,GAKF,CAEJ,GAAM,CAAE,OAAQ,MAAM,EAAc,QAClC,EAAW,OACX,EAAW,WAAa,EACxB,EAAE,CACH,CACK,EAAQ,EAAI,GAElB,EAAO,IAAI,6BAA6B,IAAQ,CAChD,IAAa,EAAM,CAGnB,MAAM,EAAU,kBAAkB,EAAM,CAExC,IAAMC,EAAkF,IAAI,IACtFC,EAAiC,IAAI,IACrCC,EAAwB,EAAE,CAEhC,GAAI,CAGF,IAAM,EADe,OAAO,QAAQ,EAAO,CACT,IAAI,MAAO,CAAC,EAAQ,KAAU,CAC9D,IAAM,EAAY,EAAgB,EAAK,CAEvC,GAAI,IAAc,OAAQ,CAExB,IAAM,EAAO,EACP,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAqBzD,MAAO,CAAE,SAAQ,YAnBE,MAAM,EAAoB,CAC3C,SACA,QACA,SACA,UAAW,EAAW,WAAa,EACnC,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,UAAW,CACT,SAAU,CAAE,cAAe,CACzB,EAAU,IAAI,EAAQ,EAAS,CAE/B,EAAU,oBAAoB,EAAS,EAEzC,UACD,CACF,CAAC,EAEsC,WAAY,SAAQ,YAAW,SAC9D,IAAc,MAavB,OAXA,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,MACX,IAAK,EACL,UAAW,EAAW,WAAa,EACpC,CACD,CAAE,YAAa,mBAAoB,CACpC,CAEM,CAAE,SAAQ,WAAY,KAAM,OAAQ,KAAM,YAAW,MAU5D,OAPA,MAAM,EAAc,WAClB,EACA,EACA,EACA,CAAE,YAAa,mBAAoB,CACpC,CAEM,CAAE,SAAQ,WAAY,KAAM,OAAQ,KAAM,YAAW,EAE9D,CAEI,EAAoB,MAAM,QAAQ,IAAI,EAAa,CAGnD,EAA0B,MAAM,GAAwB,CACxD,EAAiB,EACpB,OAAQ,GAAU,EAAM,YAAc,QAAU,EAAM,YAAc,EAAM,OAAO,CACjF,IAAI,MAAO,CAAE,SAAQ,aAAY,YAAa,CAC7C,IAAM,EAAkB,EAAuB,QAAQ,CACvD,EAAiB,IAAI,EAAQ,EAAgB,CAE7C,IAAMC,EAAU,IAAI,EAAc,CAChC,sBAAuB,GAAe,UAAY,GACnD,CAAC,CAEF,GAAI,CAAC,GAAc,CAAC,EAClB,MAAU,MAAM,yCAAyC,IAAS,CAGpE,GAAI,CACF,MAAM,EAAkB,CACtB,SACA,QACA,aACA,SACA,OAAQ,EAAW,OACnB,aAAc,EACd,kBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,QAAA,EACA,kBACA,QAAU,GAAY,CACpB,EAAW,KAAK,EAAQ,EAE1B,UAAW,CACT,YAAa,EAAU,EAAe,IAAe,CACnD,IAAa,EAAU,EAAe,EAAW,CAGjD,IAAM,EAAW,EAAa,KAAK,MAAO,EAAgB,EAAc,IAAI,CAAG,EAC/E,IAAkB,EAAQ,EAAU,EAAe,EAAW,EAEhE,kBACA,gBACD,CACF,CAAC,CAGF,MAAM,GAAkB,CACtB,SACA,QACA,SAAU,EAAW,GACrB,gBACA,SACA,UAAW,CAAE,UAAS,CACvB,CAAC,CAEF,IAAkB,EAAO,OAClB,EAAK,CACZ,IAAM,EAAQ,aAAe,MAAQ,EAAU,MAAM,OAAO,EAAI,CAAC,CAEjE,MADA,IAAe,EAAQ,EAAM,CACvB,IAER,CAEJ,MAAM,QAAQ,IAAI,EAAe,CAEjC,EAAO,IAAI,gCAAgC,IAAQ,OAC5C,EAAK,CACZ,IAAM,EAAQ,aAAe,MAAQ,EAAU,MAAM,OAAO,EAAI,CAAC,CAGjE,MAFA,EAAO,IAAI,mCAAmC,EAAM,UAAU,CAC9D,IAAU,EAAM,CACV,EAGR,MAAO,CACL,MAAO,SAAY,CACjB,GAAI,CACF,MAAM,EAAc,WAAW,EAAM,CACrC,EAAO,IAAI,6BAA6B,IAAQ,OACzC,EAAK,CACZ,EAAO,IAAI,oCAAoC,IAAM,CAIvD,IAAK,IAAM,KAAc,EAAiB,QAAQ,CAChD,EAAW,OAAO,CAIpB,IAAK,IAAM,KAAa,EACtB,EAAgB,aAAa,EAAU,CAIzC,EAAU,eAAe,EAAM,CAC/B,IAAK,IAAM,KAAY,EAAU,QAAQ,CACvC,EAAU,qBAAqB,EAAS,EAG5C,MAAO,SAAY,CACjB,MAAM,EAAc,UAAU,EAAM,EAEtC,QACD,EAQD,MAAQ,GAAwC,EAAM,EAAO,CAG7D,QAAS,KAAO,IAAmB,CACjC,GAAM,CAAE,SAAQ,QAAS,MAAM,EAAc,QAAQ,EAAO,CAC5D,MAAO,CAAE,SAAQ,OAAM,EAGzB,QAAS,MAAO,CACd,SACA,SACA,UAAW,KAKP,CACJ,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAC1C,EACA,GAAiB,EACjB,EACD,CACD,MAAO,CAAE,SAAQ,MAAK,EAGxB,WAAY,MAAO,CACjB,QACA,SACA,UACA,iBAOO,EAAc,WAAW,EAAO,EAAQ,EAAS,CACtD,cACD,CAAC,CAGJ,UAAW,KAAO,IACT,EAAc,UAAU,EAAM,CAGvC,WAAY,KAAO,IACV,EAAc,WAAW,EAAM,CAyBxC,cAAe,KAAO,IAAmB,CACvC,GAAM,CAAE,QAAS,MAAM,EAAc,QAAQ,EAAO,CAC9C,EAAa,EAAK,MACrB,OAAQ,GAAS,EAAK,OAAS,QAAQ,CACvC,IAAK,IAAU,CACd,GAAI,EAAK,GACT,KAAM,EAAK,KACX,KAAM,EAAK,KACZ,EAAE,CAEL,MAAO,CACL,aACA,OAAQ,EAAW,SAAW,EAC/B,EAmCH,sBAAuB,MACrB,EACA,EACA,IAIG,CAEH,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAC1C,EACA,GAAS,WAAa,EACtB,EACD,CAOD,OAJI,GAAK,IAAM,GAAS,YACtB,EAAQ,WAAW,EAAI,GAAG,CAGrB,CAAE,SAAQ,MAAK,EAIxB,aAAc,KAAO,IACZ,EAAc,aAAa,EAAM,CAI1C,oBAAsB,GACpB,EAAU,oBAAoB,EAAS,CACzC,kBAAoB,GAAkB,EAAU,kBAAkB,EAAM,CACxE,cAAgB,GAAe,EAAU,cAAc,EAAG,CAC1D,eAAiB,GAAe,EAAU,eAAe,EAAG,CAC5D,uBAA0B,EAAU,UAAU,CAC9C,SAAW,GAAkB,EAAU,SAAS,EAAM,CACtD,qBAAuB,GAAe,EAAU,YAAY,EAAG,CAC/D,gCAAmC,EAAU,oBAAoB,CACjE,sCACE,EAAU,0BAA0B,CAGtC,sBAAyB,EAAe,mBAAmB,CAC3D,wBAA2B,EAAe,qBAAqB,CAC/D,wBAA2B,EAAQ,wBAAwB,CAC3D,kBAAqB,EAAQ,eAAe,CAG5C,yBAA4B,EAAc,sBAAsB,CAChE,iCACE,EAAc,8BAA8B,CAC9C,kBAAoB,GAClB,EAAc,kBAAkB,EAAK,CAGvC,6BAA8B,UACZ,MAAM,GAAwB,EAC/B,8BAA8B,CAG/C,aAAc,SAAY,CACxB,EAAe,OAAO,EACN,MAAM,GAAwB,EACtC,OAAO,CACf,EAAQ,OAAO,EAIjB,sBAAwB,GACf,EAAsB,EAAS,EAA2B,EAAO,CAG1E,2BAA4B,KAC1B,IACG,CACH,IAAMC,EAAmB,EAAE,CACrBC,EAAqB,EAAE,CAGvB,EAAe,MAAM,EAAc,gBACvC,EAAQ,UACT,CAEK,EAAa,EAAsB,EAAS,EAAc,EAAO,CAIvE,OAHA,EAAO,KAAK,GAAG,EAAW,OAAO,CACjC,EAAS,KAAK,GAAG,EAAW,SAAS,CAE9B,CACL,MAAO,EAAO,SAAW,EACzB,SACA,WACA,eACD,EAGH,kBACD,CC7rCH,MAAa,EAAuB,EAAE,OAAO,CAC3C,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,SAAU,EAAE,OACV,EAAE,QAAQ,CACV,EAAE,MAAM,CAAC,EAAE,QAAQ,CAAE,EAAE,QAAQ,CAAE,EAAE,SAAS,CAAC,CAAC,CAC/C,CACD,aAAc,EAAE,QAAQ,CACxB,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,mBAAoB,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU,CAClD,iBAAkB,EAAE,QAAQ,CAC7B,CAAC,CCuIF,SAAgB,EACd,EACe,CACf,MAAO,CACL,eAAgB,SAAY,CAC1B,IAAM,EAAQ,MAAM,EAAe,KAAK,eAAe,CACvD,OAAO,OAAO,OAAO,EAAM,CAAC,IAAK,GAC/B,EAAqB,MAAM,KAAK,MAAM,EAAK,CAAC,CAC7C,EAEH,yBAA0B,KAAO,IAAwB,CACvD,IAAM,EAAQ,MAAM,EAAe,KAAK,eAAe,IAAc,CACrE,OAAO,OAAO,OAAO,EAAM,CAAC,IAAK,GAC/B,EAAqB,MAAM,KAAK,MAAM,EAAK,CAAC,CAC7C,EAEH,aAAe,GACb,EAAe,WAAW,EAAiB,CAC7C,UAAW,MACT,EACA,EACA,CAAE,gBACC,CAEH,IAAM,EAAmB,eAAe,EAAY,IADxC,EAAW,UAAU,GAGjC,OADA,MAAM,EAAe,QAAQ,EAAkB,KAAK,UAAU,EAAO,CAAC,CAC/D,GAEV,CClLH,SAAgB,GAA+C,CAC7D,IAAM,EAAU,IAAI,IAEpB,MAAO,CACL,MAAM,QAAQ,EAAqC,CACjD,OAAO,EAAQ,IAAI,EAAI,EAAI,MAG7B,MAAM,QAAQ,EAAa,EAA8B,CACvD,EAAQ,IAAI,EAAK,EAAM,EAGzB,MAAM,WAAW,EAA4B,CAC3C,EAAQ,OAAO,EAAI,EAGrB,MAAM,SAA2C,CAC/C,OAAO,OAAO,YAAY,EAAQ,SAAS,CAAC,EAG9C,MAAM,KAAK,EAAiD,CAC1D,OAAO,OAAO,YACZ,MAAM,KAAK,EAAQ,SAAS,CAAC,CAAC,QAAQ,CAAC,KAAS,EAAI,WAAW,EAAO,CAAC,CACxE,EAEJ,CC2EH,IAAa,GAAb,KAA6E,CAQ3E,YAAY,EAA8C,CAA7B,KAAA,YAAA,qBAPkB,EAAE,CA2BjD,UACE,EACA,EACqB,CAErB,IAAME,EAA+C,GAAa,CAC5D,KAAK,kBAAkB,EAAO,EAAO,EACvC,EAAQ,EAAM,EAKZ,EAAc,KAAK,YAAY,UAAU,EAAe,CAGxDC,EAAoC,CACxC,cACA,QAAS,EACT,SACD,CAKD,OAHA,KAAK,cAAc,KAAK,EAAa,KAGxB,CACX,IAAM,EAAQ,KAAK,cAAc,QAAQ,EAAa,CAClD,IAAU,IACZ,KAAK,cAAc,OAAO,EAAO,EAAE,CAErC,GAAa,EAWjB,kBAA0B,EAAU,EAAsC,CACxE,GAAI,CAAC,EACH,MAAO,GAIT,GAAI,EAAO,WAAa,EAAM,OAAS,EAAO,UAC5C,MAAO,GAIT,GAAI,EAAO,WAAa,IAAA,GAAW,CAEjC,IAAM,EADY,EAAM,MACG,GAI3B,GAAI,EAAO,WAAa,SAClB,IAAY,IAAA,GACd,MAAO,WAEA,IAAY,EAAO,SAC5B,MAAO,GAUX,OALI,EAAO,aAEF,EAAO,aAAa,EAAiC,CAGvD,GAQT,sBAA+B,CAC7B,OAAO,KAAK,cAAc,OAQ5B,kBAA4B,CAC1B,OAAO,KAAK,cAAc,OAAS,EAwBrC,SAAgB,CACd,IAAK,IAAM,KAAgB,KAAK,cAC9B,EAAa,aAAa,CAE5B,KAAK,cAAgB,EAAE,CAoBzB,qBAAqB,EAAkC,CACrD,IAAK,IAAM,KAAgB,KAAK,cAC1B,EAAa,QAAU,EAAa,OAAO,WAAa,IAAA,KAC1D,EAAa,OAAO,SAAW,KCSvC,MAAMC,GAAgC,CACpC,OAAQ,OACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CA+BD,IAAa,GAAb,KAAmD,CAejD,YACE,EACA,EACA,EACA,EACA,CAJiB,KAAA,aAAA,EACA,KAAA,UAAA,EACA,KAAA,QAAA,EACA,KAAA,mBAAA,uBAjByC,sBACJ,IAAI,kCAEV,KAgBhD,KAAK,MAAQ,CAAE,GAAGK,GAAc,CAMlC,UAA4B,CAC1B,MAAO,CAAE,GAAG,KAAK,MAAO,CAM1B,aAAuB,CACrB,OACE,KAAK,MAAM,SAAW,aAAe,KAAK,MAAM,SAAW,aAO/D,iBAA2B,CACzB,OAAO,KAAK,MAAM,SAAW,YAM/B,cAAwB,CACtB,OAAO,KAAK,MAAM,SAAW,aAM/B,UAA0B,CACxB,OAAO,KAAK,MAAM,MAMpB,YAAoB,EAAwC,CAC1D,KAAK,MAAQ,CAAE,GAAG,KAAK,MAAO,GAAG,EAAQ,CACzC,KAAK,UAAU,cAAc,KAAK,MAAM,CAS1C,gBAAgB,EAAwB,CAGtC,GAAI,EAAM,YAAc,EAAU,WAAa,CAAC,KAAK,MAAM,MAAO,CAChE,KAAK,YAAY,CACf,MAAO,EAAM,MACb,YAAa,GACb,OAAQ,aACT,CAAC,CACF,OAIE,MAAC,KAAK,MAAM,OAAS,EAAM,QAAU,KAAK,MAAM,OAKpD,OAAQ,EAAM,UAAd,CACE,KAAK,EAAU,UACb,KAAK,YAAY,CACf,YAAa,GACb,OAAQ,aACT,CAAC,CACF,MAEF,KAAK,EAAU,UACb,KAAK,YAAY,CACf,OAAQ,aACR,gBAAiB,EAAM,SACvB,gBAAiB,EAAM,SACxB,CAAC,CACF,MAEF,KAAK,EAAU,UAEb,KAAK,YAAY,CACf,OAAQ,YACR,gBAAiB,EAAM,SAExB,CAAC,CACF,MAEF,KAAK,EAAU,WAEb,KAAK,YAAY,CACf,OAAQ,aACR,gBAAiB,EAAM,SACvB,gBAAiB,EAAM,SACxB,CAAC,CACF,MAEF,KAAK,EAAU,QACb,KAAK,YAAY,CACf,OACE,KAAK,MAAM,SAAW,YAClB,aACA,KAAK,MAAM,OACjB,gBAAiB,KACjB,gBAAiB,KAClB,CAAC,CACF,MAEF,KAAK,EAAU,QAAS,CAEtB,IAAM,EAAc,EAAM,SAAW,KAGjC,GAAe,KAAK,UAAU,gBAChC,KAAK,UAAU,eAAe,EAAY,CAIxC,GAAe,EAAY,OAAS,GAAK,KAAK,UAAU,WAC1D,KAAK,UAAU,UAAU,EAAY,CAGvC,KAAK,YAAY,CACf,OAAQ,UACR,gBAAiB,KACjB,gBAAiB,KACjB,cACD,CAAC,CAEF,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,UAAW,CACxB,IAAM,EAAY,MAAM,EAAM,MAAM,CACpC,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CACF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,UAAW,CACxB,IAAM,EAAY,MAAM,EAAM,MAAM,CACpC,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CACF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,WACb,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CACF,KAAK,UAAU,WAAW,CAC1B,KAAK,gBAAkB,KACvB,OAYN,qBACE,EACA,EACA,EACM,CAEN,IAAM,EACJ,GAAc,EAAa,EACvB,KAAK,MAAO,EAAgB,EAAc,IAAI,CAC9C,EASN,GAPA,KAAK,YAAY,CACf,gBACA,aACA,WACD,CAAC,CAGE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAAI,KAAK,yBAAyB,CAClE,IACF,EAAW,OAAS,YACpB,EAAW,SAAW,EACtB,EAAW,cAAgB,EAC3B,EAAW,WAAa,GAI5B,KAAK,UAAU,aAAa,EAAU,EAAe,EAAW,CAsBlE,MAAM,OAAO,EAA8B,CAGzC,IAAIC,EAA4B,KAC5B,GAAS,OAAO,GAAU,UACxB,SAAU,GAAS,OAAO,EAAM,MAAS,WAC3C,EAAa,EAAM,MAKvB,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,aACA,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CAAC,CAEF,GAAI,CAEF,IAAMC,EAA6C,CACjD,WAAa,GAAkB,CAC7B,KAAK,YAAY,CACf,QACD,CAAC,CACF,KAAK,SAAS,aAAa,EAAM,EAEnC,YACE,EACA,EACA,IACG,CACH,KAAK,qBAAqB,EAAU,EAAeC,EAAW,CAC9D,KAAK,SAAS,aAAa,EAAU,EAAeA,EAAW,EAEjE,iBACE,EACA,EACA,IACG,CACH,KAAK,UAAU,kBACb,EACA,EACA,EACD,CACD,KAAK,SAAS,kBAAkB,EAAW,EAAe,EAAW,EAEvE,UAAY,GAAwB,CAOlC,GAJA,KAAK,YAAY,CACf,SAAU,IACX,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,WACpB,EAAW,SAAW,KAExB,KAAK,yBAA2B,OAIpC,QAAU,GAAiB,CAMzB,GALA,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,GAErB,KAAK,yBAA2B,KAElC,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CAKb,GAJA,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,QACpB,EAAW,MAAY,MAAM,iBAAiB,EAEhD,KAAK,yBAA2B,KAElC,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAGD,KAAK,gBAAkB,MAAM,KAAK,aAChC,EACA,KAAK,QAAQ,WACb,EACD,OACM,EAAO,CAEd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAO3D,GANA,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CAGE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAAI,KAAK,yBAAyB,CAClE,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,GAErB,KAAK,yBAA2B,KAGlC,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAO3B,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAQhC,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAOhC,OAAc,CACZ,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAIzB,IAAK,IAAM,KAAc,KAAK,YAAY,QAAQ,CAC5C,EAAW,iBACb,EAAW,gBAAgB,OAAO,CAGtC,KAAK,YAAY,OAAO,CACxB,KAAK,yBAA2B,KAEhC,KAAK,MAAQ,CAAE,GAAGH,GAAc,CAChC,KAAK,UAAU,cAAc,KAAK,MAAM,CAO1C,mBAAkC,CAChC,GAAI,KAAK,YAAY,OAAS,EAC5B,OAGF,IAAM,EAAS,MAAM,KAAK,KAAK,YAAY,QAAQ,CAAC,CAG9C,EAAgB,EAAO,QAC1B,EAAK,IAAU,EAAM,EAAM,SAC5B,EACD,CACK,EAAc,KAAK,MAAM,EAAgB,EAAO,OAAO,CAGvD,EAAa,EAAO,QACvB,EAAK,IAAU,GAAO,EAAM,YAAc,GAC3C,EACD,CACK,EAAgB,EAAO,QAC1B,EAAK,IAAU,EAAM,EAAM,cAC5B,EACD,CAED,KAAK,YAAY,CACf,SAAU,EACV,gBACA,WAAY,EAAa,EAAI,EAAa,KAC3C,CAAC,CAsBJ,MAAM,YAAY,EAAgD,CAChE,IAAM,EAAe,OAAO,QAAQ,EAAO,CAE3C,GAAI,EAAa,SAAW,EAC1B,MAAU,MAAM,oCAAoC,CAItD,KAAK,YAAY,OAAO,CACxB,IAAK,GAAM,CAAC,EAAQ,KAAS,OAAO,QAAQ,EAAO,CAAE,CACnD,IAAM,EAAY,EAAgB,EAAK,CACvC,KAAK,YAAY,IAAI,EAAQ,CAC3B,SACA,KAAM,EACN,OAAQ,UACR,SAAU,EACV,cAAe,EACf,WACE,IAAc,QACd,GACA,OAAO,GAAS,UAChB,SAAU,GACV,OAAO,EAAK,MAAS,SACjB,EAAK,KACL,KACN,MAAO,KACP,gBAAiB,KAClB,CAAC,CAIJ,GAAI,EAAa,SAAW,EAAG,CAC7B,IAAM,EAAa,EAAa,GAChC,GAAI,CAAC,EACH,MAAU,MAAM,oCAAoC,CAEtD,GAAM,CAAC,EAAQ,GAAa,EAE5B,KAAK,yBAA2B,EAChC,MAAM,KAAK,OAAO,EAAoB,CACtC,OAIF,GAAI,CAAC,KAAK,mBACR,MAAU,MACR,2JAED,CAIH,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CAAC,CAEF,GAAI,CAEF,IAAME,EAA6C,CACjD,WAAa,GAAkB,CAC7B,KAAK,YAAY,CAAE,QAAO,CAAC,CAC3B,KAAK,SAAS,aAAa,EAAM,EAEnC,YACE,EACA,EACA,IACG,CAEH,KAAK,SAAS,aAAa,EAAU,EAAe,EAAW,EAEjE,UAAY,GAAwB,CAElC,KAAK,YAAY,CAAE,SAAU,IAAK,CAAC,EAErC,QAAU,GAAiB,CACzB,KAAK,YAAY,CAAE,OAAQ,QAAS,QAAO,CAAC,CAC5C,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CACb,KAAK,YAAY,CAAE,OAAQ,UAAW,CAAC,CACvC,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAmCD,KAAK,gBAAkB,MAAM,KAAK,mBAChC,EACA,KAAK,QAAQ,WACb,EAnC+C,CAC/C,iBAAkB,EAAQ,EAAU,EAAe,IAAe,CAEhE,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,YACpB,EAAW,SAAW,EACtB,EAAW,cAAgB,EAC3B,EAAW,WAAa,GAI1B,KAAK,mBAAmB,EAE1B,gBAAkB,GAAW,CAC3B,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,WACpB,EAAW,SAAW,KAExB,KAAK,mBAAmB,EAE1B,cAAe,EAAQ,IAAU,CAC/B,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,IAGxB,CAQA,OACM,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAC3D,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CACF,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAQ3B,gBAA2D,CACzD,OAAO,KAAK,YAMd,SAAgB,CACd,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAIzB,IAAK,IAAM,KAAc,KAAK,YAAY,QAAQ,CAC5C,EAAW,iBACb,EAAW,gBAAgB,OAAO,CAGtC,KAAK,YAAY,OAAO,CACxB,KAAK,yBAA2B,OCx3BpC,MAAME,GAA4B,CAChC,OAAQ,OACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,OAAQ,KACT,CAsBD,IAAa,GAAb,KAGE,CAaA,YACE,EACA,EACA,EACA,CAHiB,KAAA,SAAA,EACA,KAAA,UAAA,EACA,KAAA,QAAA,uBAdqC,oBACrB,mBACD,KAchC,KAAK,MAAQ,CAAE,GAAG,GAAc,CAMlC,UAAwB,CACtB,MAAO,CAAE,GAAG,KAAK,MAAO,CAM1B,aAAuB,CACrB,OAAO,KAAK,MAAM,SAAW,YAM/B,UAAoB,CAClB,OACG,KAAK,MAAM,SAAW,SAAW,KAAK,MAAM,SAAW,YACxD,KAAK,YAAc,KAOvB,YAAoB,EAAoC,CACtD,KAAK,MAAQ,CAAE,GAAG,KAAK,MAAO,GAAG,EAAQ,CACzC,KAAK,UAAU,cAAc,KAAK,MAAM,CAQ1C,MAAM,OAAO,EAA8B,CAEzC,IAAII,EAA4B,KAC5B,GAAS,OAAO,GAAU,UACxB,SAAU,GAAS,OAAO,EAAM,MAAS,WAC3C,EAAa,EAAM,MAKvB,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,aACA,MAAO,KACP,OAAQ,KACT,CAAC,CAEF,KAAK,UAAY,EAEjB,GAAI,CAEF,IAAM,EAAgB,CACpB,GAAG,KAAK,QACR,YACE,EACA,EACA,IACG,CAEH,AACE,KAAK,WAAW,EAGlB,IAAM,EAAkB,EACpB,KAAK,MAAO,EAAgB,EAAS,IAAI,CACzC,EAEJ,KAAK,YAAY,CACf,SAAU,EACV,gBACA,WAAY,EACb,CAAC,CAEF,KAAK,UAAU,aAAa,EAAU,EAAe,EAAM,CAC3D,KAAK,SAAS,aAAa,EAAU,EAAe,EAAM,EAE5D,iBACE,EACA,EACA,IACG,CACH,KAAK,UAAU,kBACb,EACA,EACA,EACD,CACD,KAAK,SAAS,kBAAkB,EAAW,EAAe,EAAW,EAEvE,UAAY,GAAuB,CACjC,KAAK,YAAY,CACf,OAAQ,UACR,SACA,SAAU,IACV,cAAe,EAAO,MAAQ,EAC9B,WAAY,EAAO,MAAQ,KAC5B,CAAC,CAEF,KAAK,UAAU,YAAY,EAAO,CAClC,KAAK,SAAS,YAAY,EAAO,CACjC,KAAK,gBAAkB,MAEzB,QAAU,GAAiB,CACzB,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CAEF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CACb,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CAEF,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAGD,KAAK,gBAAkB,MAAM,KAAK,SAAS,EAAO,EAAc,OACzD,EAAO,CAEd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAC3D,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CAEF,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAO3B,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAQhC,OAAc,CACZ,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAGzB,KAAK,MAAQ,CAAE,GAAG,GAAc,CAChC,KAAK,UAAY,KACjB,KAAK,SAAW,KAChB,KAAK,UAAU,cAAc,KAAK,MAAM,CAM1C,OAAc,CACR,KAAK,UAAU,EAAI,KAAK,YAAc,MACxC,KAAK,OAAO,KAAK,UAAU,CAO/B,SAAgB,CACd,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAEzB,KAAK,SAAW"}
1
+ {"version":3,"file":"index.mjs","names":["result: BufferedChunk","httpClient: HttpClient","authManager: AuthManager","type: \"direct\" | \"uploadista-cloud\" | \"no-auth\"","config: DirectAuthConfig","platformService: PlatformService","logger: Logger","config: UploadistaCloudAuthConfig","httpClient: HttpClient","cachedToken: CachedToken","noopLog: LogFunction","capabilities: DataStoreCapabilities","defaultClientCapabilities: DataStoreCapabilities","s3LikeCapabilities: DataStoreCapabilities","gcsLikeCapabilities: DataStoreCapabilities","filesystemLikeCapabilities: DataStoreCapabilities","DEFAULT_STRATEGIES: Record<string, ChunkingStrategy>","S3_OPTIMIZED_STRATEGIES: Record<string, ChunkingStrategy>","fallbackStrategy: ChunkingStrategy","baseStrategy: ChunkingStrategy","segments: {\n startByte: number;\n endByte: number;\n segmentIndex: number;\n }[]","error","storedUpload: PreviousUpload","checksum: string | undefined","createUploadData: InputFile","parallelState: ParallelUploadState","segmentSource: FileSource","aggregatedResult: UploadFile","terminate","chunkMetrics: ChunkMetrics","sessionMetrics: UploadSessionMetrics","groups: Record<string, ChunkMetrics[]>","sizeGroup: string","recommendations: string[]","negotiationOptions: UploadStrategyOptions","errors: string[]","warnings: string[]","body: RequestBody","uploadistaApi: UploadistaApi","logger: Logger","onEvent?: UploadistaWebSocketEventHandler","defaultConnectionPoolingConfig: ConnectionPoolConfig","authManager: AuthManager","cachedCapabilities: DataStoreCapabilities | null","smartChunker: SmartChunker","uploadId: string | null","uploadIdStorageKey: string | null","timeoutId: Timeout | null","abortControllers: Map<\n string,\n ReturnType<typeof abortControllerFactory.create>\n >","uploadIds: Map<string, string>","timeoutIds: Timeout[]","metrics","errors: string[]","warnings: string[]","eventSource: EventSource<T>","wrappedHandler: SubscriptionEventHandler<T>","subscription: SubscriptionInfo<T>","initialState: FlowUploadState","flowUploadFn: FlowUploadFunction<TInput>","callbacks: FlowManagerCallbacks","options: FlowUploadOptions","multiInputUploadFn?: MultiInputFlowUploadFunction","initialState","totalBytes: number | null","internalOptions: InternalFlowUploadOptions","totalBytes","initialState: UploadState","uploadFn: UploadFunction<TInput, TOptions>","callbacks: UploadManagerCallbacks","options?: TOptions","totalBytes: number | null"],"sources":["../src/chunk-buffer.ts","../src/auth/auth-http-client.ts","../src/auth/types.ts","../src/auth/direct-auth.ts","../src/auth/no-auth.ts","../src/auth/uploadista-cloud-auth.ts","../src/logger.ts","../src/mock-data-store.ts","../src/network-monitor.ts","../src/smart-chunker.ts","../src/error.ts","../src/upload/upload-utils.ts","../src/upload/chunk-upload.ts","../src/upload/flow-upload.ts","../src/upload/flow-upload-orchestrator.ts","../src/upload/upload-storage.ts","../src/upload/single-upload.ts","../src/upload/parallel-upload.ts","../src/services/platform-service.ts","../src/upload/upload-manager.ts","../src/upload/upload-metrics.ts","../src/upload/upload-strategy.ts","../src/utils/input-detection.ts","../src/client/uploadista-api.ts","../src/client/uploadista-websocket-manager.ts","../src/client/create-uploadista-client.ts","../src/managers/event-subscription-manager.ts","../src/managers/flow-manager.ts","../src/managers/upload-manager.ts","../src/types/previous-upload.ts","../src/storage/client-storage.ts","../src/storage/in-memory-storage-service.ts"],"sourcesContent":["import type { BufferedChunk } from \"./types/buffered-chunk\";\n\n/**\n * Configuration options for ChunkBuffer.\n *\n * Controls how the buffer accumulates chunks before flushing them to the datastore.\n * This is essential for datastores with minimum chunk size requirements (e.g., AWS S3's 5MB minimum).\n */\nexport interface ChunkBufferConfig {\n /**\n * Minimum chunk size required by the datastore before flushing (in bytes).\n * For example, AWS S3 requires a minimum of 5MB per multipart upload part.\n */\n minThreshold: number;\n\n /**\n * Maximum buffer size before forcing a flush (in bytes).\n * Defaults to 2x minThreshold. Prevents memory issues with very slow uploads.\n */\n maxBufferSize?: number;\n\n /**\n * Maximum time to wait before flushing pending data (in milliseconds).\n * Defaults to 30000ms (30 seconds). Ensures timely uploads even with slow data arrival.\n */\n timeoutMs?: number;\n}\n\n/**\n * ChunkBuffer accumulates small chunks until they meet the minimum threshold\n * required by the datastore (e.g., S3's 5MB minimum part size).\n *\n * This prevents inefficient upload/download cycles of incomplete parts by buffering\n * small chunks in memory until they reach the datastore's minimum size requirement.\n * The buffer automatically flushes when the threshold is met, the maximum buffer\n * size is exceeded, or a timeout occurs.\n *\n * @example Basic usage with S3's 5MB minimum\n * ```typescript\n * const buffer = new ChunkBuffer({\n * minThreshold: 5 * 1024 * 1024, // 5MB\n * maxBufferSize: 10 * 1024 * 1024, // 10MB\n * timeoutMs: 30000, // 30 seconds\n * });\n *\n * // Add chunks as they arrive\n * const chunk1 = new Uint8Array(2 * 1024 * 1024); // 2MB\n * buffer.add(chunk1); // Returns null (below threshold)\n *\n * const chunk2 = new Uint8Array(3 * 1024 * 1024); // 3MB\n * const buffered = buffer.add(chunk2); // Returns combined 5MB chunk\n * ```\n *\n * @example Handling incomplete uploads\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 5 * 1024 * 1024 });\n *\n * // After adding several small chunks\n * buffer.add(smallChunk1);\n * buffer.add(smallChunk2);\n *\n * // Force flush remaining data at end of upload\n * if (buffer.hasPendingData()) {\n * const finalChunk = buffer.flush();\n * await uploadFinalChunk(finalChunk);\n * }\n * ```\n */\nexport class ChunkBuffer {\n private buffer: Uint8Array[] = [];\n private currentSize = 0;\n private config: Required<ChunkBufferConfig>;\n private lastAddTime = 0;\n\n /**\n * Creates a new ChunkBuffer instance.\n *\n * @param config - Buffer configuration including thresholds and timeout\n */\n constructor(config: ChunkBufferConfig) {\n this.config = {\n minThreshold: config.minThreshold,\n maxBufferSize: config.maxBufferSize ?? config.minThreshold * 2,\n timeoutMs: config.timeoutMs ?? 30000, // 30 seconds\n };\n }\n\n /**\n * Adds a chunk to the buffer and returns the accumulated chunk if the flush threshold is met.\n *\n * The buffer will automatically flush (return the combined chunk) when:\n * - The total buffered size meets or exceeds minThreshold\n * - The total buffered size exceeds maxBufferSize\n * - The time since the last chunk exceeds timeoutMs\n *\n * @param chunk - The chunk data to add to the buffer\n * @returns The combined buffered chunk if flush conditions are met, null otherwise\n *\n * @example Progressive buffering\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 }); // 1MB\n *\n * // First chunk doesn't meet threshold\n * const result1 = buffer.add(new Uint8Array(512 * 1024)); // 512KB\n * console.log(result1); // null\n *\n * // Second chunk triggers flush\n * const result2 = buffer.add(new Uint8Array(512 * 1024)); // 512KB\n * console.log(result2?.size); // 1048576 (1MB total)\n * ```\n */\n add(chunk: Uint8Array): BufferedChunk | null {\n this.buffer.push(chunk);\n this.currentSize += chunk.length;\n this.lastAddTime = Date.now();\n\n if (this.shouldFlush()) {\n return this.flush();\n }\n\n return null;\n }\n\n /**\n * Forces the buffer to flush immediately, returning all accumulated data.\n *\n * This is typically called at the end of an upload to ensure any remaining\n * buffered data is sent, even if it hasn't reached the minimum threshold.\n *\n * @returns The combined buffered chunk, or null if the buffer is empty\n *\n * @example Flushing at upload completion\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 5 * 1024 * 1024 });\n *\n * // Upload file in chunks\n * for (const chunk of fileChunks) {\n * const buffered = buffer.add(chunk);\n * if (buffered) await uploadChunk(buffered);\n * }\n *\n * // Upload any remaining data\n * const final = buffer.flush();\n * if (final) await uploadChunk(final);\n * ```\n */\n flush(): BufferedChunk | null {\n if (this.buffer.length === 0) {\n return null;\n }\n\n const combined = new Uint8Array(this.currentSize);\n let offset = 0;\n\n for (const chunk of this.buffer) {\n combined.set(chunk, offset);\n offset += chunk.length;\n }\n\n const result: BufferedChunk = {\n data: combined,\n size: this.currentSize,\n timestamp: this.lastAddTime,\n };\n\n this.reset();\n return result;\n }\n\n /**\n * Checks if the buffer should be flushed based on size, max buffer, or timeout conditions.\n *\n * Returns true if any of these conditions are met:\n * - Current size >= minThreshold\n * - Current size >= maxBufferSize\n * - Time since last add > timeoutMs\n *\n * @returns True if the buffer should be flushed\n *\n * @example Manual flush control\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 });\n *\n * buffer.add(smallChunk);\n *\n * if (buffer.shouldFlush()) {\n * const data = buffer.flush();\n * await upload(data);\n * }\n * ```\n */\n shouldFlush(): boolean {\n if (this.currentSize >= this.config.minThreshold) {\n return true;\n }\n\n if (this.currentSize >= this.config.maxBufferSize) {\n return true;\n }\n\n const timeSinceLastAdd = Date.now() - this.lastAddTime;\n if (this.buffer.length > 0 && timeSinceLastAdd > this.config.timeoutMs) {\n return true;\n }\n\n return false;\n }\n\n /**\n * Returns the current buffer state without flushing.\n *\n * Useful for monitoring buffer status and making informed decisions\n * about when to manually flush or adjust upload strategies.\n *\n * @returns Object containing buffer metrics\n *\n * @example Monitoring buffer state\n * ```typescript\n * const buffer = new ChunkBuffer({ minThreshold: 1024 * 1024 });\n * buffer.add(chunk);\n *\n * const info = buffer.getBufferInfo();\n * console.log(`Buffered: ${info.size} bytes in ${info.chunkCount} chunks`);\n * console.log(`Ready to flush: ${info.isReadyToFlush}`);\n * console.log(`Time since last add: ${info.timeSinceLastAdd}ms`);\n * ```\n */\n getBufferInfo(): {\n size: number;\n chunkCount: number;\n isReadyToFlush: boolean;\n timeSinceLastAdd: number;\n } {\n return {\n size: this.currentSize,\n chunkCount: this.buffer.length,\n isReadyToFlush: this.shouldFlush(),\n timeSinceLastAdd: Date.now() - this.lastAddTime,\n };\n }\n\n /**\n * Checks if the buffer has any pending data that hasn't been flushed.\n *\n * Useful for determining if a final flush is needed at upload completion.\n *\n * @returns True if there are chunks waiting in the buffer\n *\n * @example Ensuring complete upload\n * ```typescript\n * // Upload all chunks\n * for (const chunk of chunks) {\n * const buffered = buffer.add(chunk);\n * if (buffered) await upload(buffered);\n * }\n *\n * // Don't forget the last partial chunk!\n * if (buffer.hasPendingData()) {\n * await upload(buffer.flush());\n * }\n * ```\n */\n hasPendingData(): boolean {\n return this.buffer.length > 0;\n }\n\n /**\n * Clears the buffer without returning data.\n *\n * This discards all buffered chunks and resets the buffer state.\n * Use with caution as this will lose any pending data.\n */\n reset(): void {\n this.buffer = [];\n this.currentSize = 0;\n this.lastAddTime = 0;\n }\n\n /**\n * Returns the minimum threshold this buffer is configured for.\n *\n * @returns Minimum chunk size in bytes before flushing\n */\n getMinThreshold(): number {\n return this.config.minThreshold;\n }\n}\n","import type {\n HttpClient,\n HttpRequestOptions,\n HttpResponse,\n} from \"../services/http-client\";\nimport type { DirectAuthManager } from \"./direct-auth\";\nimport type { NoAuthManager } from \"./no-auth\";\nimport type { UploadistaCloudAuthManager } from \"./uploadista-cloud-auth\";\n\n/**\n * Union type of all auth managers\n */\nexport type AuthManager =\n | DirectAuthManager\n | UploadistaCloudAuthManager\n | NoAuthManager;\n\n/**\n * Auth-aware HTTP client wrapper.\n *\n * Wraps a standard HttpClient and automatically attaches authentication\n * credentials/tokens to all HTTP requests based on the configured auth manager.\n *\n * The wrapper delegates all non-auth concerns (connection pooling, metrics, etc.)\n * to the underlying HttpClient and only adds the auth layer on top.\n */\nexport class AuthHttpClient implements HttpClient {\n constructor(\n private httpClient: HttpClient,\n private authManager: AuthManager,\n ) {}\n\n /**\n * Make an HTTP request with authentication credentials attached.\n * Calls the auth manager to attach credentials before delegating to the underlying client.\n */\n async request(\n url: string,\n options: HttpRequestOptions = {},\n ): Promise<HttpResponse> {\n try {\n // Attach auth credentials to request headers\n const authenticatedHeaders = await this.attachAuthCredentials(\n options.headers || {},\n url,\n );\n\n // Delegate to underlying HTTP client with authenticated headers\n return await this.httpClient.request(url, {\n ...options,\n headers: authenticatedHeaders,\n // include credentials for cors if needed\n credentials:\n this.authManager.getType() === \"no-auth\" ||\n this.authManager.getType() === \"uploadista-cloud\"\n ? \"omit\"\n : (options.credentials ?? \"include\"),\n });\n } catch (error) {\n // If auth fails, wrap error with context\n if (error instanceof Error && error.message.includes(\"auth\")) {\n throw error; // Re-throw auth errors as-is\n }\n\n // For other errors, let them propagate\n throw error;\n }\n }\n\n /**\n * Attach authentication credentials to request headers.\n * Delegates to the appropriate auth manager method.\n */\n private async attachAuthCredentials(\n headers: Record<string, string>,\n url: string,\n ): Promise<Record<string, string>> {\n // Check if this is a DirectAuthManager or UploadistaCloudAuthManager\n if (\"attachCredentials\" in this.authManager) {\n // DirectAuthManager or NoAuthManager\n return await this.authManager.attachCredentials(headers);\n }\n\n if (\"attachToken\" in this.authManager) {\n // UploadistaCloudAuthManager - extract job ID from URL if present\n const jobId = this.extractJobIdFromUrl(url);\n return await this.authManager.attachToken(headers, jobId);\n }\n\n // Fallback - return headers unchanged\n return headers;\n }\n\n /**\n * Extract job ID from URL for SaaS mode token caching.\n * Looks for patterns like /upload/{id} or /jobs/{id} in the URL.\n */\n private extractJobIdFromUrl(url: string): string | undefined {\n // Match patterns like:\n // - /api/upload/{uploadId}\n // - /api/flow/{flowId}/{storageId}\n // - /api/jobs/{jobId}/status\n // - /api/jobs/{jobId}/resume/{nodeId}\n\n const uploadMatch = url.match(/\\/api\\/upload\\/([^/?]+)/);\n if (uploadMatch) {\n return uploadMatch[1];\n }\n\n const flowMatch = url.match(/\\/api\\/flow\\/([^/?]+)/);\n if (flowMatch) {\n return flowMatch[1];\n }\n\n const jobMatch = url.match(/\\/api\\/jobs\\/([^/?]+)/);\n if (jobMatch) {\n return jobMatch[1];\n }\n\n // No job ID found - UploadistaCloud mode will use global token\n return undefined;\n }\n\n /**\n * Delegate metrics methods to underlying HTTP client\n */\n getMetrics() {\n return this.httpClient.getMetrics();\n }\n\n getDetailedMetrics() {\n return this.httpClient.getDetailedMetrics();\n }\n\n reset() {\n this.httpClient.reset();\n }\n\n async close() {\n await this.httpClient.close();\n }\n\n async warmupConnections(urls: string[]) {\n await this.httpClient.warmupConnections(urls);\n }\n\n /**\n * Get the underlying auth manager for advanced use cases\n */\n getAuthManager(): AuthManager {\n return this.authManager;\n }\n}\n","export class BaseAuthManager {\n constructor(private type: \"direct\" | \"uploadista-cloud\" | \"no-auth\") {}\n\n getType() {\n return this.type;\n }\n}\n/**\n * Credentials that can be attached to HTTP requests.\n * Supports headers and cookies for maximum flexibility.\n */\nexport type RequestCredentials = {\n /** HTTP headers to attach (e.g., Authorization, X-API-Key) */\n headers?: Record<string, string>;\n /** Cookies to attach (primarily for browser environments) */\n cookies?: Record<string, string>;\n};\n\n/**\n * Direct auth mode configuration.\n * Users provide a function that returns credentials to attach to every request.\n * This mode supports any authentication protocol (OAuth, JWT, sessions, API keys, etc.)\n *\n * @example Bearer token\n * ```typescript\n * {\n * mode: 'direct',\n * getCredentials: async () => ({\n * headers: {\n * 'Authorization': `Bearer ${await getAccessToken()}`\n * }\n * })\n * }\n * ```\n *\n * @example API key\n * ```typescript\n * {\n * mode: 'direct',\n * getCredentials: () => ({\n * headers: {\n * 'X-API-Key': process.env.API_KEY\n * }\n * })\n * }\n * ```\n */\nexport type DirectAuthConfig = {\n mode: \"direct\";\n /**\n * Function called before each HTTP request to obtain credentials.\n * Can be async to support token refresh or other async operations.\n * Should not throw - return empty object if credentials unavailable.\n */\n getCredentials?: () => RequestCredentials | Promise<RequestCredentials>;\n};\n\n/**\n * UploadistaCloud auth mode configuration.\n * Client requests JWT tokens from a user-controlled auth server,\n * which validates credentials and issues tokens using a secure API key.\n *\n * Token exchange flow:\n * 1. Client calls getCredentials() to get user credentials\n * 2. Client sends credentials to authServerUrl\n * 3. Auth server validates and returns JWT token\n * 4. Client attaches token to uploadista engine requests\n *\n * @example\n * ```typescript\n * {\n * mode: 'uploadista-cloud',\n * authServerUrl: 'https://auth.myapp.com/token',\n * clientId: 'my-client-id'\n * }\n * ```\n */\nexport type UploadistaCloudAuthConfig = {\n mode: \"uploadista-cloud\";\n /**\n * URL of the user's auth server that issues JWT tokens.\n * Should be a GET endpoint that accepts client id and returns { token, expiresIn }.\n */\n authServerUrl: string;\n /**\n * Client ID to use for authentication. It will be used to compare the API Key with the client id on the auth server.\n */\n clientId: string;\n};\n\n/**\n * Authentication configuration for the uploadista client.\n * Supports two modes:\n * - Direct: Bring your own auth (any protocol)\n * - UploadistaCloud: Standard JWT token exchange with auth server\n *\n * Use a discriminated union to ensure type safety - TypeScript will\n * enforce that the correct fields are present for each mode.\n */\nexport type AuthConfig = DirectAuthConfig | UploadistaCloudAuthConfig;\n","import type { Logger } from \"../logger\";\nimport type { PlatformService } from \"../services/platform-service\";\nimport type { DirectAuthConfig } from \"./types\";\nimport { BaseAuthManager } from \"./types\";\n\n/**\n * Direct auth manager - handles credential attachment for \"bring your own auth\" mode.\n *\n * This manager calls the user-provided getCredentials() function before each request\n * and attaches the returned credentials (headers, cookies) to the HTTP request.\n *\n * Supports any authentication protocol: OAuth, JWT, API keys, session cookies, etc.\n */\nexport class DirectAuthManager extends BaseAuthManager {\n constructor(\n private config: DirectAuthConfig,\n private platformService: PlatformService,\n private logger: Logger,\n ) {\n super(\"direct\");\n }\n\n /**\n * Attach credentials to an HTTP request by calling getCredentials() and\n * merging the returned headers/cookies with the request.\n *\n * @param headers - Existing request headers\n * @returns Updated headers with credentials attached\n * @throws Error if getCredentials() throws or returns invalid data\n */\n async attachCredentials(\n headers: Record<string, string> = {},\n ): Promise<Record<string, string>> {\n try {\n if (!this.config.getCredentials) {\n return headers;\n }\n\n // Call user's credential provider (may be async)\n const credentials = await Promise.resolve(this.config.getCredentials());\n\n // Validate credentials\n if (!credentials || typeof credentials !== \"object\") {\n throw new Error(\n \"getCredentials() must return an object with headers and/or cookies\",\n );\n }\n\n // Merge credential headers with existing headers\n const updatedHeaders = { ...headers };\n\n if (credentials.headers) {\n this.validateHeaders(credentials.headers);\n Object.assign(updatedHeaders, credentials.headers);\n }\n\n // Note: Cookie handling would be browser-specific\n // For now, we only support headers as cookies are automatically\n // handled by the browser when using fetch()\n if (credentials.cookies) {\n this.attachCookies(updatedHeaders, credentials.cookies);\n }\n\n return updatedHeaders;\n } catch (error) {\n // Wrap errors with context\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to attach auth credentials: ${message}`);\n }\n }\n\n /**\n * Validate that headers is a valid object with string keys and values\n */\n private validateHeaders(headers: Record<string, string>): void {\n if (typeof headers !== \"object\" || headers === null) {\n throw new Error(\"headers must be an object\");\n }\n\n for (const [key, value] of Object.entries(headers)) {\n if (typeof key !== \"string\" || typeof value !== \"string\") {\n throw new Error(\n `Invalid header: key and value must be strings (got ${key}: ${typeof value})`,\n );\n }\n }\n }\n\n /**\n * Attach cookies to request headers.\n * In browser environments, cookies are automatically handled by fetch().\n * In Node.js, we need to manually add them to the Cookie header.\n */\n private attachCookies(\n headers: Record<string, string>,\n cookies: Record<string, string>,\n ): void {\n // Check if we're in a browser environment\n const isBrowser = this.platformService.isBrowser();\n\n if (isBrowser) {\n // In browsers, fetch() automatically sends cookies for same-origin requests\n // For cross-origin, the server needs to set CORS headers and credentials: 'include'\n // We can't manually set cookies in headers for security reasons\n // So we just warn if cookies are provided in direct mode\n this.logger.warn(\n \"DirectAuth: Cookies are automatically handled by the browser. \" +\n \"Ensure your server has proper CORS configuration with credentials support.\",\n );\n } else {\n // In Node.js, we can manually build the Cookie header\n const cookieString = Object.entries(cookies)\n .map(([key, value]) => `${key}=${value}`)\n .join(\"; \");\n\n if (cookieString) {\n headers.Cookie = cookieString;\n }\n }\n }\n}\n","import { BaseAuthManager } from \"./types\";\n\n/**\n * No-auth manager - pass-through implementation for backward compatibility.\n *\n * When no auth configuration is provided, this manager is used to maintain\n * a consistent interface without adding any authentication to requests.\n */\nexport class NoAuthManager extends BaseAuthManager {\n constructor() {\n super(\"no-auth\");\n }\n\n /**\n * Pass through headers without modification.\n *\n * @param headers - Existing request headers\n * @returns Same headers unchanged\n */\n async attachCredentials(\n headers: Record<string, string> = {},\n ): Promise<Record<string, string>> {\n return headers;\n }\n\n /**\n * No-op for clearing tokens (NoAuthManager doesn't cache anything)\n */\n clearToken(_jobId: string): void {\n // No-op\n }\n\n /**\n * No-op for clearing all tokens\n */\n clearAllTokens(): void {\n // No-op\n }\n}\n","import type { HttpClient } from \"../services/http-client\";\nimport { BaseAuthManager, type UploadistaCloudAuthConfig } from \"./types\";\n\n/**\n * Token response from the auth server\n */\nexport type TokenResponse = {\n /** JWT token to use for authentication */\n token: string;\n /** Token expiration time in seconds (optional) */\n expiresIn?: number;\n};\n\n/**\n * Cached token information\n */\ntype CachedToken = {\n token: string;\n expiresAt?: number; // Unix timestamp in milliseconds\n};\n\n/**\n * UploadistaCloud auth manager - handles JWT token exchange with an auth server.\n *\n * Token exchange flow:\n * 1. Client calls getCredentials() to get user credentials\n * 2. Manager sends credentials to authServerUrl\n * 3. Auth server validates credentials and returns JWT token\n * 4. Manager caches token and attaches it to uploadista requests\n * 5. Token is cached per job to minimize auth overhead\n *\n * Security: API keys are kept server-side in the auth server, never exposed to clients.\n */\nexport class UploadistaCloudAuthManager extends BaseAuthManager {\n /** Token cache: maps job ID to cached token */\n private tokenCache = new Map<string, CachedToken>();\n\n /** Global token for requests without a specific job ID */\n private globalToken: CachedToken | null = null;\n\n constructor(\n private config: UploadistaCloudAuthConfig,\n private httpClient: HttpClient,\n ) {\n super(\"uploadista-cloud\");\n }\n\n /**\n * Fetch a JWT token from the auth server using user credentials.\n *\n * @returns Token response with JWT and optional expiry\n * @throws Error if auth server is unreachable or returns an error\n */\n async fetchToken(): Promise<TokenResponse> {\n try {\n // Make POST request to auth server\n const response = await this.httpClient.request(\n `${this.config.authServerUrl}/${this.config.clientId}`,\n {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n },\n );\n\n // Handle error responses\n if (!response.ok) {\n const errorText = await response.text();\n let errorMessage = `Auth server returned ${response.status}`;\n\n try {\n const errorJson = JSON.parse(errorText);\n errorMessage = errorJson.error || errorJson.message || errorMessage;\n } catch {\n // If response is not JSON, use status text\n errorMessage = errorText || response.statusText || errorMessage;\n }\n\n throw new Error(errorMessage);\n }\n\n // Parse token response\n const data = (await response.json()) as TokenResponse;\n\n if (!data.token || typeof data.token !== \"string\") {\n throw new Error(\n \"Auth server response missing 'token' field or token is not a string\",\n );\n }\n\n return data;\n } catch (error) {\n // Wrap errors with context\n if (error instanceof Error) {\n throw new Error(`Failed to fetch auth token: ${error.message}`);\n }\n throw new Error(`Failed to fetch auth token: ${String(error)}`);\n }\n }\n\n /**\n * Get a cached token for a specific job, or fetch a new one if not cached.\n *\n * @param jobId - Optional job ID to cache token for specific job\n * @returns Cached or newly fetched token\n */\n private async getOrFetchToken(jobId?: string): Promise<string> {\n // Check if we have a cached token for this job\n if (jobId) {\n const cached = this.tokenCache.get(jobId);\n if (cached && !this.isTokenExpired(cached)) {\n return cached.token;\n }\n }\n\n // Check global token cache\n if (!jobId && this.globalToken && !this.isTokenExpired(this.globalToken)) {\n return this.globalToken.token;\n }\n\n // No valid cached token - fetch a new one\n const tokenResponse = await this.fetchToken();\n\n // Calculate expiration time if provided\n const expiresAt = tokenResponse.expiresIn\n ? Date.now() + tokenResponse.expiresIn * 1000\n : undefined;\n\n const cachedToken: CachedToken = {\n token: tokenResponse.token,\n expiresAt,\n };\n\n // Cache the token\n if (jobId) {\n this.tokenCache.set(jobId, cachedToken);\n } else {\n this.globalToken = cachedToken;\n }\n\n return tokenResponse.token;\n }\n\n /**\n * Check if a cached token is expired.\n * Adds a 60-second buffer to avoid using tokens that are about to expire.\n */\n private isTokenExpired(cached: CachedToken): boolean {\n if (!cached.expiresAt) {\n // No expiry set - assume token is valid\n return false;\n }\n\n // Add 60-second buffer before actual expiry\n const bufferMs = 60 * 1000;\n return Date.now() > cached.expiresAt - bufferMs;\n }\n\n /**\n * Attach JWT token to an HTTP request as Authorization Bearer header.\n *\n * @param headers - Existing request headers\n * @param jobId - Optional job ID to use cached token for specific job\n * @returns Updated headers with Authorization header\n * @throws Error if token fetch fails\n */\n async attachToken(\n headers: Record<string, string> = {},\n jobId?: string,\n ): Promise<Record<string, string>> {\n try {\n // Get token (from cache or fetch new)\n const token = await this.getOrFetchToken(jobId);\n\n // Attach as Bearer token\n return {\n ...headers,\n Authorization: `Bearer ${token}`,\n };\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to attach auth token: ${message}`);\n }\n }\n\n /**\n * Clear cached token for a specific job.\n * Should be called when a job completes to free memory.\n *\n * @param jobId - Job ID to clear token for\n */\n clearToken(jobId: string): void {\n this.tokenCache.delete(jobId);\n }\n\n /**\n * Clear all cached tokens.\n * Useful for logout or when switching users.\n */\n clearAllTokens(): void {\n this.tokenCache.clear();\n this.globalToken = null;\n }\n\n /**\n * Get cache statistics for debugging and monitoring.\n */\n getCacheStats(): {\n cachedJobCount: number;\n hasGlobalToken: boolean;\n } {\n return {\n cachedJobCount: this.tokenCache.size,\n hasGlobalToken: this.globalToken !== null,\n };\n }\n}\n","/**\n * Logger interface for Uploadista client operations.\n *\n * Provides structured logging capabilities for debugging upload progress,\n * flow execution, and client operations. Platform implementations should\n * provide their own logging functions (e.g., console.log, custom loggers).\n *\n * @example Using with console\n * ```typescript\n * const logger = createLogger(true, console.log);\n * logger.log('Upload started');\n * logger.warn('Retrying failed chunk');\n * logger.error('Upload failed');\n * ```\n */\nexport type Logger = {\n /**\n * Log informational messages (e.g., upload progress, state changes)\n */\n log: (message: string) => void;\n\n /**\n * Log warning messages (e.g., retry attempts, degraded performance)\n */\n warn: (message: string) => void;\n\n /**\n * Log error messages (e.g., upload failures, network errors)\n */\n error: (message: string) => void;\n};\n\n/**\n * Platform-specific logging function type.\n *\n * Accepts a message string and outputs it to the appropriate logging destination.\n * This abstraction allows the client to work across different platforms\n * (browser, Node.js, React Native) with their own logging mechanisms.\n */\nexport type LogFunction = (message: string) => void;\n\n/**\n * Default no-op logger function.\n *\n * Used when no custom logging function is provided.\n * Platform implementations should provide their own (e.g., console.log).\n */\nconst noopLog: LogFunction = () => {\n // No-op by default - platforms will override\n};\n\n/**\n * Creates a Logger instance with configurable output.\n *\n * This factory function creates a logger that can be enabled/disabled\n * and customized with a platform-specific logging function.\n *\n * @param enabled - Whether logging is enabled. When false, all log calls are no-ops\n * @param logFn - Optional custom logging function. Defaults to no-op. Pass console.log for browser/Node.js\n * @returns A Logger instance with log, warn, and error methods\n *\n * @example Basic usage with console\n * ```typescript\n * const logger = createLogger(true, console.log);\n * logger.log('Upload started');\n * ```\n *\n * @example Disabled logger (no output)\n * ```typescript\n * const logger = createLogger(false);\n * logger.log('This will not be logged');\n * ```\n *\n * @example Custom logging function\n * ```typescript\n * const customLog = (msg: string) => {\n * // Send to analytics service\n * analytics.track('upload_log', { message: msg });\n * };\n * const logger = createLogger(true, customLog);\n * ```\n */\nexport function createLogger(\n enabled: boolean,\n logFn: LogFunction = noopLog,\n): Logger {\n return {\n log: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n warn: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n error: (message: string) => {\n if (enabled) {\n logFn(message);\n }\n },\n };\n}\n","import type {\n DataStoreCapabilities,\n UploadStrategy,\n} from \"@uploadista/core/types\";\n\n/**\n * Mock data store implementation for client-side capability negotiation.\n * This doesn't perform actual data store operations but provides capability information\n * for upload strategy decisions.\n */\nexport class MockClientDataStore {\n constructor(private capabilities: DataStoreCapabilities) {}\n\n getCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n validateUploadStrategy(strategy: UploadStrategy): boolean {\n switch (strategy) {\n case \"parallel\":\n return this.capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n }\n}\n\n/**\n * Default capabilities that assume basic parallel upload support\n * (conservative defaults that work with most backends)\n */\nexport const defaultClientCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 6, // Browser-safe default\n minChunkSize: 64 * 1024, // 64KB\n maxChunkSize: 100 * 1024 * 1024, // 100MB\n maxParts: 10000,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: false,\n};\n\n/**\n * Capabilities for S3-compatible backends\n */\nexport const s3LikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: true,\n supportsConcatenation: true,\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: true,\n maxConcurrentUploads: 60,\n minChunkSize: 5 * 1024 * 1024, // 5MiB S3 minimum\n maxChunkSize: 5 * 1024 * 1024 * 1024, // 5GiB S3 maximum\n maxParts: 10000,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: false,\n};\n\n/**\n * Capabilities for GCS-compatible backends\n */\nexport const gcsLikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: false, // GCS doesn't have native multipart\n supportsConcatenation: true, // Can combine files\n supportsDeferredLength: true,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1,\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB\n requiresOrderedChunks: true,\n};\n\n/**\n * Capabilities for filesystem-based backends\n */\nexport const filesystemLikeCapabilities: DataStoreCapabilities = {\n supportsParallelUploads: false, // Sequential operations\n supportsConcatenation: false,\n supportsDeferredLength: false,\n supportsResumableUploads: true,\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1,\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 1024 * 1024, // 1MB\n requiresOrderedChunks: true,\n};\n","/**\n * Assessment of current network conditions based on upload performance.\n *\n * Used by smart chunking algorithms to adapt chunk sizes based on network quality.\n */\nexport interface NetworkCondition {\n /**\n * Classification of network speed and stability:\n * - \"slow\": Average speed below slowThreshold (default 50 KB/s)\n * - \"fast\": Average speed above fastThreshold (default 5 MB/s)\n * - \"unstable\": High variance in upload speeds\n * - \"unknown\": Insufficient data to determine condition\n */\n type: \"slow\" | \"fast\" | \"unstable\" | \"unknown\";\n\n /**\n * Confidence level in the assessment (0-1).\n * Higher values indicate more samples and more reliable assessment.\n */\n confidence: number;\n}\n\n/**\n * Aggregated network performance metrics.\n *\n * Provides a comprehensive view of upload performance over time,\n * useful for debugging connectivity issues and optimizing upload strategies.\n */\nexport interface NetworkMetrics {\n /** Average upload speed in bytes per second */\n averageSpeed: number;\n\n /** Average network latency in milliseconds */\n latency: number;\n\n /** Ratio of successful uploads (0-1) */\n successRate: number;\n\n /** Ratio of failed uploads (0-1) */\n errorRate: number;\n\n /** Total number of upload requests made */\n totalRequests: number;\n\n /** Total bytes uploaded successfully */\n totalBytes: number;\n\n /** Total time spent uploading in milliseconds */\n totalTime: number;\n}\n\n/**\n * Individual upload sample for network analysis.\n *\n * Each successful or failed upload is recorded as a sample,\n * which is used to calculate network metrics and conditions.\n */\nexport interface UploadSample {\n /** Size of the uploaded chunk in bytes */\n size: number;\n\n /** Time taken to upload in milliseconds */\n duration: number;\n\n /** Whether the upload succeeded */\n success: boolean;\n\n /** Unix timestamp when the upload occurred */\n timestamp: number;\n\n /** Optional network latency measurement in milliseconds */\n latency?: number;\n}\n\n/**\n * Configuration options for NetworkMonitor.\n *\n * Controls how network conditions are assessed and how upload samples\n * are analyzed to determine optimal chunking strategies.\n */\nexport interface NetworkMonitorConfig {\n /** Maximum number of samples to keep in memory. Defaults to 100. */\n maxSamples?: number;\n\n /** Smoothing factor for exponential moving average (0-1). Defaults to 0.1. */\n smoothingFactor?: number;\n\n /** Minimum samples required before assessing network condition. Defaults to 5. */\n minSamplesForCondition?: number;\n\n /** Upload speed threshold for \"slow\" classification in bytes/second. Defaults to 50 KB/s. */\n slowThreshold?: number;\n\n /** Upload speed threshold for \"fast\" classification in bytes/second. Defaults to 5 MB/s. */\n fastThreshold?: number;\n\n /** Coefficient of variation threshold for \"unstable\" classification. Defaults to 0.5. */\n unstableThreshold?: number;\n}\n\n/**\n * Monitors network performance during uploads to enable adaptive chunking.\n *\n * Tracks upload samples over time and analyzes them to determine network conditions\n * (slow, fast, unstable). This information is used by smart chunking algorithms to\n * dynamically adjust chunk sizes for optimal upload performance.\n *\n * The monitor maintains a rolling window of recent samples and calculates various\n * metrics including average speed, latency, success rate, and throughput stability.\n *\n * @example Basic usage with smart chunking\n * ```typescript\n * const monitor = new NetworkMonitor({\n * maxSamples: 100,\n * slowThreshold: 50 * 1024, // 50 KB/s\n * fastThreshold: 5 * 1024 * 1024, // 5 MB/s\n * });\n *\n * // Record each upload\n * monitor.recordUpload(\n * chunkSize, // bytes\n * duration, // milliseconds\n * true, // success\n * latency // optional latency\n * );\n *\n * // Get current network condition\n * const condition = monitor.getNetworkCondition();\n * if (condition.type === 'slow') {\n * // Use smaller chunks\n * chunkSize = 256 * 1024;\n * } else if (condition.type === 'fast') {\n * // Use larger chunks\n * chunkSize = 5 * 1024 * 1024;\n * }\n * ```\n *\n * @example Monitoring network metrics\n * ```typescript\n * const monitor = new NetworkMonitor();\n *\n * // After some uploads\n * const metrics = monitor.getCurrentMetrics();\n * console.log(`Average speed: ${metrics.averageSpeed / 1024} KB/s`);\n * console.log(`Success rate: ${metrics.successRate * 100}%`);\n * console.log(`Average latency: ${metrics.latency}ms`);\n * ```\n */\nexport class NetworkMonitor {\n private samples: UploadSample[] = [];\n private config: Required<NetworkMonitorConfig>;\n private _currentMetrics: NetworkMetrics;\n\n /**\n * Creates a new NetworkMonitor instance.\n *\n * @param config - Optional configuration for thresholds and sample management\n */\n constructor(config: NetworkMonitorConfig = {}) {\n this.config = {\n maxSamples: config.maxSamples ?? 100,\n smoothingFactor: config.smoothingFactor ?? 0.1,\n minSamplesForCondition: config.minSamplesForCondition ?? 5,\n slowThreshold: config.slowThreshold ?? 50 * 1024, // 50 KB/s\n fastThreshold: config.fastThreshold ?? 5 * 1024 * 1024, // 5 MB/s\n unstableThreshold: config.unstableThreshold ?? 0.5, // 50% coefficient of variation\n };\n\n this._currentMetrics = this.createEmptyMetrics();\n }\n\n /**\n * Adds a raw upload sample to the monitor.\n *\n * This is called internally by recordUpload but can also be used\n * to add pre-constructed samples for testing or custom tracking.\n *\n * @param sample - The upload sample to add\n */\n addSample(sample: UploadSample): void {\n this.samples.push(sample);\n\n // Keep only the most recent samples\n if (this.samples.length > this.config.maxSamples) {\n this.samples = this.samples.slice(-this.config.maxSamples);\n }\n\n this.updateMetrics();\n }\n\n /**\n * Records an upload operation for network analysis.\n *\n * This is the primary method for tracking upload performance. Each chunk upload\n * should be recorded to build an accurate picture of network conditions.\n *\n * @param size - Size of the uploaded chunk in bytes\n * @param duration - Time taken to upload in milliseconds\n * @param success - Whether the upload succeeded\n * @param latency - Optional network latency measurement in milliseconds\n *\n * @example Recording successful upload\n * ```typescript\n * const startTime = Date.now();\n * await uploadChunk(data);\n * const duration = Date.now() - startTime;\n * monitor.recordUpload(data.length, duration, true);\n * ```\n *\n * @example Recording failed upload\n * ```typescript\n * try {\n * const startTime = Date.now();\n * await uploadChunk(data);\n * monitor.recordUpload(data.length, Date.now() - startTime, true);\n * } catch (error) {\n * monitor.recordUpload(data.length, Date.now() - startTime, false);\n * }\n * ```\n */\n recordUpload(\n size: number,\n duration: number,\n success: boolean,\n latency?: number,\n ): void {\n this.addSample({\n size,\n duration,\n success,\n timestamp: Date.now(),\n latency,\n });\n }\n\n /**\n * Returns the current network metrics.\n *\n * Provides aggregated statistics about all recorded uploads including\n * average speed, latency, success rate, and totals.\n *\n * @returns A snapshot of current network performance metrics\n *\n * @example\n * ```typescript\n * const metrics = monitor.getCurrentMetrics();\n * console.log(`Speed: ${(metrics.averageSpeed / 1024).toFixed(2)} KB/s`);\n * console.log(`Success: ${(metrics.successRate * 100).toFixed(1)}%`);\n * console.log(`Latency: ${metrics.latency.toFixed(0)}ms`);\n * ```\n */\n getCurrentMetrics(): NetworkMetrics {\n return { ...this._currentMetrics };\n }\n\n /**\n * Analyzes recent upload samples to determine current network condition.\n *\n * Uses statistical analysis (coefficient of variation, average speed) to classify\n * the network as slow, fast, unstable, or unknown. The confidence level indicates\n * how reliable the assessment is based on the number of samples collected.\n *\n * @returns Current network condition with confidence level\n *\n * @example Adaptive chunking based on network condition\n * ```typescript\n * const condition = monitor.getNetworkCondition();\n *\n * if (condition.confidence > 0.7) {\n * switch (condition.type) {\n * case 'fast':\n * chunkSize = 10 * 1024 * 1024; // 10MB\n * break;\n * case 'slow':\n * chunkSize = 256 * 1024; // 256KB\n * break;\n * case 'unstable':\n * chunkSize = 1 * 1024 * 1024; // 1MB, conservative\n * break;\n * }\n * }\n * ```\n */\n getNetworkCondition(): NetworkCondition {\n if (this.samples.length < this.config.minSamplesForCondition) {\n return { type: \"unknown\", confidence: 0 };\n }\n\n const recentSamples = this.getRecentSuccessfulSamples();\n if (recentSamples.length < this.config.minSamplesForCondition) {\n return { type: \"unknown\", confidence: 0.3 };\n }\n\n const speeds = recentSamples.map(\n (sample) => sample.size / (sample.duration / 1000),\n );\n const avgSpeed =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n\n // Calculate coefficient of variation for stability assessment\n const variance =\n speeds.reduce((sum, speed) => sum + (speed - avgSpeed) ** 2, 0) /\n speeds.length;\n const stdDev = Math.sqrt(variance);\n const coefficientOfVariation = stdDev / avgSpeed;\n\n // Determine network condition\n const confidence = Math.min(\n 1,\n this.samples.length / (this.config.minSamplesForCondition * 2),\n );\n\n if (coefficientOfVariation > this.config.unstableThreshold) {\n return { type: \"unstable\", confidence };\n }\n\n if (avgSpeed < this.config.slowThreshold) {\n return { type: \"slow\", confidence };\n }\n\n if (avgSpeed > this.config.fastThreshold) {\n return { type: \"fast\", confidence };\n }\n\n // Default to slow for conservative chunking\n return { type: \"slow\", confidence: confidence * 0.7 };\n }\n\n /**\n * Calculates the optimal upload throughput based on recent successful uploads.\n *\n * Uses a weighted average that gives more weight to recent samples,\n * providing a responsive measure of current network capacity.\n *\n * @returns Optimal throughput in bytes per second, or 0 if no successful samples\n *\n * @example Using for chunk size calculation\n * ```typescript\n * const throughput = monitor.getOptimalThroughput();\n * // Target 1 second per chunk\n * const optimalChunkSize = Math.min(throughput, MAX_CHUNK_SIZE);\n * ```\n */\n getOptimalThroughput(): number {\n const recentSamples = this.getRecentSuccessfulSamples(10);\n if (recentSamples.length === 0) return 0;\n\n // Calculate weighted average with recent samples having higher weight\n let totalWeight = 0;\n let weightedSum = 0;\n\n recentSamples.forEach((sample, index) => {\n const weight = index + 1; // More recent samples get higher weight\n const throughput = sample.size / (sample.duration / 1000);\n weightedSum += throughput * weight;\n totalWeight += weight;\n });\n\n return totalWeight > 0 ? weightedSum / totalWeight : 0;\n }\n\n /**\n * Resets all samples and metrics to initial state.\n *\n * Useful when network conditions change significantly or when\n * starting a new upload session.\n *\n * @example Resetting between uploads\n * ```typescript\n * // Complete first upload\n * await uploadFile1();\n *\n * // Reset metrics before starting a new upload\n * monitor.reset();\n * await uploadFile2();\n * ```\n */\n reset(): void {\n this.samples = [];\n this._currentMetrics = this.createEmptyMetrics();\n }\n\n private getRecentSuccessfulSamples(count?: number): UploadSample[] {\n const successful = this.samples.filter((sample) => sample.success);\n return count ? successful.slice(-count) : successful;\n }\n\n private updateMetrics(): void {\n const successfulSamples = this.samples.filter((sample) => sample.success);\n const totalRequests = this.samples.length;\n const totalSuccessful = successfulSamples.length;\n\n if (totalRequests === 0) {\n this._currentMetrics = this.createEmptyMetrics();\n return;\n }\n\n const totalBytes = successfulSamples.reduce(\n (sum, sample) => sum + sample.size,\n 0,\n );\n const totalTime = successfulSamples.reduce(\n (sum, sample) => sum + sample.duration,\n 0,\n );\n\n const averageSpeed = totalTime > 0 ? totalBytes / (totalTime / 1000) : 0;\n const successRate = totalSuccessful / totalRequests;\n const errorRate = 1 - successRate;\n\n // Calculate average latency from samples that have latency data\n const samplesWithLatency = this.samples.filter(\n (sample) => sample.latency !== undefined,\n );\n const averageLatency =\n samplesWithLatency.length > 0\n ? samplesWithLatency.reduce(\n (sum, sample) => sum + (sample.latency || 0),\n 0,\n ) / samplesWithLatency.length\n : 0;\n\n this._currentMetrics = {\n averageSpeed,\n latency: averageLatency,\n successRate,\n errorRate,\n totalRequests,\n totalBytes,\n totalTime,\n };\n }\n\n private createEmptyMetrics(): NetworkMetrics {\n return {\n averageSpeed: 0,\n latency: 0,\n successRate: 0,\n errorRate: 0,\n totalRequests: 0,\n totalBytes: 0,\n totalTime: 0,\n };\n }\n}\n","import type { NetworkCondition, NetworkMonitor } from \"./network-monitor\";\nimport type { ConnectionMetrics } from \"./services/http-client\";\n\nexport interface ChunkingStrategy {\n name: string;\n minChunkSize: number;\n maxChunkSize: number;\n initialChunkSize: number;\n adaptationRate: number; // how quickly to adapt (0-1)\n}\n\nexport interface DatastoreConstraints {\n minChunkSize: number;\n maxChunkSize: number;\n optimalChunkSize: number;\n requiresOrderedChunks?: boolean;\n}\n\nexport interface SmartChunkerConfig {\n enabled?: boolean;\n fallbackChunkSize?: number;\n minChunkSize?: number;\n maxChunkSize?: number;\n initialChunkSize?: number;\n targetUtilization?: number; // target bandwidth utilization (0-1)\n adaptationRate?: number;\n conservativeMode?: boolean;\n connectionPoolingAware?: boolean; // enable connection pooling optimizations\n datastoreConstraints?: DatastoreConstraints;\n}\n\nexport interface ChunkSizeDecision {\n size: number;\n strategy: string;\n reason: string;\n networkCondition: NetworkCondition;\n}\n\nconst DEFAULT_STRATEGIES: Record<string, ChunkingStrategy> = {\n conservative: {\n name: \"conservative\",\n minChunkSize: 64 * 1024, // 64 KB\n maxChunkSize: 2 * 1024 * 1024, // 2 MB\n initialChunkSize: 256 * 1024, // 256 KB\n adaptationRate: 0.1,\n },\n balanced: {\n name: \"balanced\",\n minChunkSize: 128 * 1024, // 128 KB\n maxChunkSize: 8 * 1024 * 1024, // 8 MB\n initialChunkSize: 512 * 1024, // 512 KB\n adaptationRate: 0.2,\n },\n aggressive: {\n name: \"aggressive\",\n minChunkSize: 256 * 1024, // 256 KB\n maxChunkSize: 32 * 1024 * 1024, // 32 MB\n initialChunkSize: 1024 * 1024, // 1 MB\n adaptationRate: 0.3,\n },\n};\n\nconst S3_OPTIMIZED_STRATEGIES: Record<string, ChunkingStrategy> = {\n conservative: {\n name: \"s3-conservative\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 64 * 1024 * 1024, // 64MB\n initialChunkSize: 8 * 1024 * 1024, // 8MB\n adaptationRate: 0.1,\n },\n balanced: {\n name: \"s3-balanced\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 128 * 1024 * 1024, // 128MB\n initialChunkSize: 16 * 1024 * 1024, // 16MB\n adaptationRate: 0.2,\n },\n aggressive: {\n name: \"s3-aggressive\",\n minChunkSize: 5 * 1024 * 1024, // 5MB - S3 minimum\n maxChunkSize: 256 * 1024 * 1024, // 256MB\n initialChunkSize: 32 * 1024 * 1024, // 32MB\n adaptationRate: 0.3,\n },\n};\n\nexport class SmartChunker {\n private config: Required<Omit<SmartChunkerConfig, \"datastoreConstraints\">> & {\n datastoreConstraints?: DatastoreConstraints;\n };\n private networkMonitor: NetworkMonitor;\n private currentChunkSize: number;\n private lastDecision: ChunkSizeDecision | null = null;\n private consecutiveFailures = 0;\n private consecutiveSuccesses = 0;\n private connectionMetrics: ConnectionMetrics | null = null;\n\n constructor(networkMonitor: NetworkMonitor, config: SmartChunkerConfig = {}) {\n this.networkMonitor = networkMonitor;\n this.config = {\n enabled: config.enabled ?? true,\n fallbackChunkSize: config.fallbackChunkSize ?? 1024 * 1024, // 1 MB\n minChunkSize: config.minChunkSize ?? 64 * 1024, // 64 KB\n maxChunkSize: config.maxChunkSize ?? 32 * 1024 * 1024, // 32 MB\n initialChunkSize: config.initialChunkSize ?? 512 * 1024, // 512 KB\n targetUtilization: config.targetUtilization ?? 0.85, // 85%\n adaptationRate: config.adaptationRate ?? 0.2,\n conservativeMode: config.conservativeMode ?? false,\n connectionPoolingAware: config.connectionPoolingAware ?? true, // Enable by default\n datastoreConstraints: config.datastoreConstraints,\n };\n\n this.currentChunkSize = this.getEffectiveInitialChunkSize();\n }\n\n private getEffectiveInitialChunkSize(): number {\n if (this.config.datastoreConstraints) {\n return Math.max(\n this.config.initialChunkSize,\n this.config.datastoreConstraints.optimalChunkSize,\n );\n }\n return this.config.initialChunkSize;\n }\n\n private applyDatastoreConstraints(size: number): number {\n if (this.config.datastoreConstraints) {\n return Math.max(\n this.config.datastoreConstraints.minChunkSize,\n Math.min(this.config.datastoreConstraints.maxChunkSize, size),\n );\n }\n return size;\n }\n\n getNextChunkSize(remainingBytes?: number): ChunkSizeDecision {\n if (!this.config.enabled) {\n return {\n size: this.config.fallbackChunkSize,\n strategy: \"fixed\",\n reason: \"Smart chunking disabled\",\n networkCondition: { type: \"unknown\", confidence: 0 },\n };\n }\n\n const networkCondition = this.networkMonitor.getNetworkCondition();\n\n let newSize = this.currentChunkSize;\n let strategy = \"adaptive\";\n let reason = \"\";\n\n // If we don't have enough data, use initial strategy\n if (networkCondition.type === \"unknown\") {\n newSize = this.config.initialChunkSize;\n strategy = \"initial\";\n reason = \"Insufficient network data\";\n } else {\n const chunkingStrategy = this.selectStrategy(networkCondition);\n newSize = this.calculateOptimalChunkSize(\n networkCondition,\n chunkingStrategy,\n );\n strategy = chunkingStrategy.name;\n reason = `Network condition: ${networkCondition.type} (confidence: ${Math.round(networkCondition.confidence * 100)}%)`;\n }\n\n // Apply remaining bytes limit\n if (remainingBytes && remainingBytes < newSize) {\n newSize = remainingBytes;\n reason += `, limited by remaining bytes (${remainingBytes})`;\n }\n\n // Apply datastore constraints first\n newSize = this.applyDatastoreConstraints(newSize);\n\n // Ensure bounds\n newSize = Math.max(\n this.config.minChunkSize,\n Math.min(this.config.maxChunkSize, newSize),\n );\n\n this.currentChunkSize = newSize;\n this.lastDecision = {\n size: newSize,\n strategy,\n reason,\n networkCondition,\n };\n\n return this.lastDecision;\n }\n\n recordChunkResult(size: number, duration: number, success: boolean): void {\n // Record the result in network monitor\n this.networkMonitor.recordUpload(size, duration, success);\n\n // Update our internal state\n if (success) {\n this.consecutiveSuccesses++;\n this.consecutiveFailures = 0;\n } else {\n this.consecutiveFailures++;\n this.consecutiveSuccesses = 0;\n }\n\n // Adjust chunk size based on recent performance\n this.adaptChunkSize(success, duration, size);\n }\n\n getCurrentChunkSize(): number {\n return this.currentChunkSize;\n }\n\n getLastDecision(): ChunkSizeDecision | null {\n return this.lastDecision;\n }\n\n reset(): void {\n this.currentChunkSize = this.config.initialChunkSize;\n this.consecutiveFailures = 0;\n this.consecutiveSuccesses = 0;\n this.lastDecision = null;\n this.connectionMetrics = null;\n }\n\n /**\n * Update connection metrics for connection pooling aware optimizations\n */\n updateConnectionMetrics(metrics: ConnectionMetrics): void {\n this.connectionMetrics = metrics;\n }\n\n /**\n * Get insights about connection pooling impact on chunking\n */\n getConnectionPoolingInsights(): {\n isOptimized: boolean;\n reuseRate: number;\n recommendedMinChunkSize: number;\n connectionOverhead: number;\n } {\n if (!this.connectionMetrics || !this.config.connectionPoolingAware) {\n return {\n isOptimized: false,\n reuseRate: 0,\n recommendedMinChunkSize: this.config.minChunkSize,\n connectionOverhead: 0,\n };\n }\n\n const reuseRate = this.connectionMetrics.reuseRate;\n const avgConnectionTime = this.connectionMetrics.averageConnectionTime;\n\n // With good connection reuse, we can afford smaller chunks\n const connectionOverhead = (1 - reuseRate) * avgConnectionTime;\n const recommendedMinChunkSize = Math.max(\n this.config.minChunkSize,\n Math.floor(connectionOverhead * 10000), // 10KB per ms of overhead\n );\n\n return {\n isOptimized: reuseRate > 0.7,\n reuseRate,\n recommendedMinChunkSize,\n connectionOverhead,\n };\n }\n\n private selectStrategy(networkCondition: NetworkCondition): ChunkingStrategy {\n const fallbackStrategy: ChunkingStrategy = {\n name: \"fallback\",\n minChunkSize: 128 * 1024,\n maxChunkSize: 4 * 1024 * 1024,\n initialChunkSize: 512 * 1024,\n adaptationRate: 0.2,\n };\n\n // Use S3-optimized strategies if datastore constraints indicate S3 (5MB minimum)\n const isS3Like =\n this.config.datastoreConstraints?.minChunkSize === 5 * 1024 * 1024;\n const strategiesSource = isS3Like\n ? S3_OPTIMIZED_STRATEGIES\n : DEFAULT_STRATEGIES;\n\n if (this.config.conservativeMode) {\n return strategiesSource.conservative ?? fallbackStrategy;\n }\n\n // Enhanced strategy selection with connection pooling awareness\n let baseStrategy: ChunkingStrategy;\n\n switch (networkCondition.type) {\n case \"fast\":\n baseStrategy =\n networkCondition.confidence > 0.7\n ? (strategiesSource.aggressive ?? fallbackStrategy)\n : (strategiesSource.balanced ?? fallbackStrategy);\n break;\n case \"slow\":\n baseStrategy = strategiesSource.conservative ?? fallbackStrategy;\n break;\n case \"unstable\":\n baseStrategy = strategiesSource.conservative ?? fallbackStrategy;\n break;\n default:\n baseStrategy = strategiesSource.balanced ?? fallbackStrategy;\n }\n\n // Apply connection pooling optimizations\n if (this.config.connectionPoolingAware && this.connectionMetrics) {\n return this.optimizeStrategyForConnectionPooling(baseStrategy);\n }\n\n return baseStrategy;\n }\n\n /**\n * Optimize chunking strategy based on connection pooling performance\n */\n private optimizeStrategyForConnectionPooling(\n strategy: ChunkingStrategy,\n ): ChunkingStrategy {\n if (!this.connectionMetrics) return strategy;\n\n const insights = this.getConnectionPoolingInsights();\n const reuseRate = insights.reuseRate;\n\n // High connection reuse allows for more aggressive chunking\n if (reuseRate > 0.8) {\n return {\n ...strategy,\n name: `${strategy.name}-pooled-aggressive`,\n minChunkSize: Math.max(strategy.minChunkSize * 0.5, 32 * 1024), // Smaller min chunks\n adaptationRate: Math.min(strategy.adaptationRate * 1.3, 0.5), // Faster adaptation\n };\n }\n\n // Good connection reuse allows moderate optimization\n if (reuseRate > 0.5) {\n return {\n ...strategy,\n name: `${strategy.name}-pooled-moderate`,\n minChunkSize: Math.max(strategy.minChunkSize * 0.75, 64 * 1024),\n adaptationRate: Math.min(strategy.adaptationRate * 1.1, 0.4),\n };\n }\n\n // Poor connection reuse requires conservative approach\n return {\n ...strategy,\n name: `${strategy.name}-pooled-conservative`,\n minChunkSize: Math.max(\n strategy.minChunkSize * 1.5,\n insights.recommendedMinChunkSize,\n ),\n adaptationRate: strategy.adaptationRate * 0.8,\n };\n }\n\n private calculateOptimalChunkSize(\n networkCondition: NetworkCondition,\n strategy: ChunkingStrategy,\n ): number {\n let targetSize = this.currentChunkSize;\n\n // Base calculation on current throughput\n const optimalThroughput = this.networkMonitor.getOptimalThroughput();\n\n if (optimalThroughput > 0) {\n // Calculate target chunk duration (aim for 2-5 seconds per chunk)\n const targetDuration = this.getTargetChunkDuration(networkCondition);\n const theoreticalSize =\n optimalThroughput * targetDuration * this.config.targetUtilization;\n\n // Blend current size with theoretical optimal size\n const blendFactor = strategy.adaptationRate;\n targetSize =\n this.currentChunkSize * (1 - blendFactor) +\n theoreticalSize * blendFactor;\n }\n\n // Apply strategy constraints\n targetSize = Math.max(\n strategy.minChunkSize,\n Math.min(strategy.maxChunkSize, targetSize),\n );\n\n // Apply failure-based adjustments\n if (this.consecutiveFailures > 0) {\n // Reduce size on failures\n const reductionFactor = Math.min(0.5, this.consecutiveFailures * 0.2);\n targetSize *= 1 - reductionFactor;\n } else if (this.consecutiveSuccesses > 2) {\n // Gradually increase size on consistent success\n const increaseFactor = Math.min(0.3, this.consecutiveSuccesses * 0.05);\n targetSize *= 1 + increaseFactor;\n }\n\n return Math.round(targetSize);\n }\n\n private getTargetChunkDuration(networkCondition: NetworkCondition): number {\n switch (networkCondition.type) {\n case \"fast\":\n return 3; // 3 seconds for fast connections\n case \"slow\":\n return 5; // 5 seconds for slow connections to reduce overhead\n case \"unstable\":\n return 2; // 2 seconds for unstable connections for quick recovery\n default:\n return 3; // Default to 3 seconds\n }\n }\n\n private adaptChunkSize(\n success: boolean,\n duration: number,\n size: number,\n ): void {\n if (!success) {\n // On failure, be more conservative\n this.currentChunkSize = Math.max(\n this.config.minChunkSize,\n this.currentChunkSize * 0.8,\n );\n return;\n }\n\n // On success, check if we should adjust based on performance\n const throughput = size / (duration / 1000); // bytes per second\n const metrics = this.networkMonitor.getCurrentMetrics();\n\n if (metrics.averageSpeed > 0) {\n const utilizationRatio = throughput / metrics.averageSpeed;\n\n if (utilizationRatio < this.config.targetUtilization * 0.8) {\n // We're not utilizing bandwidth well, try larger chunks\n this.currentChunkSize = Math.min(\n this.config.maxChunkSize,\n this.currentChunkSize * 1.1,\n );\n } else if (utilizationRatio > this.config.targetUtilization * 1.2) {\n // We might be overloading, try smaller chunks\n this.currentChunkSize = Math.max(\n this.config.minChunkSize,\n this.currentChunkSize * 0.95,\n );\n }\n }\n }\n}\n","/**\n * Specific error types that can occur during upload and flow operations.\n *\n * These error names provide fine-grained categorization of failures,\n * allowing applications to implement targeted error handling and recovery strategies.\n *\n * @example Error handling by type\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError) {\n * if (error.isNetworkError()) {\n * // Retry network-related failures\n * console.log('Network issue, retrying...');\n * } else if (error.name === 'UPLOAD_NOT_FOUND') {\n * // Handle missing upload\n * console.log('Upload not found, starting fresh');\n * }\n * }\n * }\n * ```\n */\nexport type UploadistaErrorName =\n | \"UPLOAD_SIZE_NOT_SPECIFIED\"\n | \"NETWORK_ERROR\"\n | \"NETWORK_UNEXPECTED_RESPONSE\"\n | \"UPLOAD_CHUNK_FAILED\"\n | \"WRONG_UPLOAD_SIZE\"\n | \"UPLOAD_LOCKED\"\n | \"UPLOAD_NOT_FOUND\"\n | \"CREATE_UPLOAD_FAILED\"\n | \"DELETE_UPLOAD_FAILED\"\n | \"PARALLEL_SEGMENT_CREATION_FAILED\"\n | \"PARALLEL_SEGMENT_UPLOAD_FAILED\"\n | \"FLOW_NOT_FOUND\"\n | \"FLOW_INIT_FAILED\"\n | \"FLOW_RUN_FAILED\"\n | \"FLOW_RESUMED_FAILED\"\n | \"FLOW_PAUSE_FAILED\"\n | \"FLOW_CANCEL_FAILED\"\n | \"FLOW_UNEXPECTED_STATE\"\n | \"FLOW_INCOMPATIBLE\"\n | \"FLOW_NO_UPLOAD_ID\"\n | \"FLOW_TIMEOUT\"\n | \"FLOW_FINALIZE_FAILED\"\n | \"VALIDATION_ERROR\"\n | \"JOB_NOT_FOUND\"\n | \"WEBSOCKET_AUTH_FAILED\";\n\n/**\n * Custom error class for all Uploadista client operations.\n *\n * Extends the standard Error class with additional context including\n * typed error names, HTTP status codes, and underlying error causes.\n * This allows for precise error handling and debugging.\n *\n * @example Basic error handling\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError) {\n * console.log(`Error: ${error.name} - ${error.message}`);\n * console.log(`HTTP Status: ${error.status}`);\n * }\n * }\n * ```\n *\n * @example Network error detection\n * ```typescript\n * try {\n * await client.upload(file);\n * } catch (error) {\n * if (error instanceof UploadistaError && error.isNetworkError()) {\n * // Implement retry logic for network failures\n * await retryWithBackoff(() => client.upload(file));\n * }\n * }\n * ```\n */\nexport class UploadistaError extends Error {\n /**\n * Typed error name indicating the specific type of failure\n */\n name: UploadistaErrorName;\n\n /**\n * Human-readable error message describing what went wrong\n */\n message: string;\n\n /**\n * The underlying error that caused this failure, if any\n */\n cause: Error | undefined;\n\n /**\n * HTTP status code from the server response, if applicable\n */\n status: number | undefined;\n\n /**\n * Creates a new UploadistaError instance.\n *\n * @param options - Error configuration\n * @param options.name - Typed error name for categorization\n * @param options.message - Descriptive error message\n * @param options.cause - Optional underlying error that caused this failure\n * @param options.status - Optional HTTP status code from server response\n */\n constructor({\n name,\n message,\n cause,\n status,\n }: {\n name: UploadistaErrorName;\n message: string;\n cause?: Error;\n status?: number;\n }) {\n super();\n this.name = name;\n this.cause = cause;\n this.message = message;\n this.status = status;\n }\n\n /**\n * Checks if this error is related to network connectivity issues.\n *\n * Network errors are typically transient and may succeed on retry,\n * making them good candidates for automatic retry logic.\n *\n * @returns True if this is a network-related error\n *\n * @example\n * ```typescript\n * if (error.isNetworkError()) {\n * // Safe to retry\n * await retry(() => uploadChunk());\n * }\n * ```\n */\n isNetworkError(): boolean {\n return (\n this.name === \"NETWORK_ERROR\" ||\n this.name === \"NETWORK_UNEXPECTED_RESPONSE\"\n );\n }\n}\n","import { Base64 } from \"js-base64\";\nimport { UploadistaError } from \"../error\";\n\n/**\n * Encodes metadata for upload headers\n */\nexport function encodeMetadata(\n metadata: Record<string, string | null>,\n): string {\n return Object.entries(metadata)\n .map(([key, value]) => `${key} ${Base64.encode(String(value))}`)\n .join(\",\");\n}\n\n/**\n * Checks whether a given status is in the range of the expected category.\n * For example, only a status between 200 and 299 will satisfy the category 200.\n */\nexport function inStatusCategory(\n status: number,\n category: 100 | 200 | 300 | 400 | 500,\n): boolean {\n return status >= category && status < category + 100;\n}\n\nexport type CalculateFileSizeOptions = {\n uploadLengthDeferred?: boolean;\n uploadSize?: number;\n};\n\n/**\n * Calculate the final file size for upload based on options\n */\nexport function calculateFileSize(\n originalSize: number | null,\n { uploadLengthDeferred, uploadSize }: CalculateFileSizeOptions,\n): number | null {\n // First, we look at the uploadLengthDeferred option.\n // Next, we check if the caller has supplied a manual upload size.\n // Finally, we try to use the calculated size from the source object.\n if (uploadLengthDeferred) {\n return null;\n }\n\n if (uploadSize != null) {\n return uploadSize;\n }\n\n const size = originalSize;\n if (size == null) {\n throw new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message:\n \"cannot automatically derive upload's size from input. Specify it manually using the `uploadSize` option or use the `uploadLengthDeferred` option\",\n });\n }\n\n return size;\n}\n\n/**\n * Calculate segments for parallel upload\n */\nexport function calculateSegments(\n fileSize: number,\n parallelUploads: number,\n parallelChunkSize?: number,\n): { startByte: number; endByte: number; segmentIndex: number }[] {\n if (parallelUploads <= 1) {\n return [{ startByte: 0, endByte: fileSize, segmentIndex: 0 }];\n }\n\n // Use parallelChunkSize if provided, otherwise divide file equally\n const segments: {\n startByte: number;\n endByte: number;\n segmentIndex: number;\n }[] = [];\n\n if (parallelChunkSize) {\n // Fixed segment size approach\n let currentByte = 0;\n let segmentIndex = 0;\n\n while (currentByte < fileSize) {\n const endByte = Math.min(currentByte + parallelChunkSize, fileSize);\n segments.push({\n startByte: currentByte,\n endByte,\n segmentIndex,\n });\n currentByte = endByte;\n segmentIndex++;\n }\n } else {\n // Equal division approach\n const segmentSize = Math.ceil(fileSize / parallelUploads);\n\n for (let i = 0; i < parallelUploads; i++) {\n const startByte = i * segmentSize;\n const endByte = Math.min(startByte + segmentSize, fileSize);\n\n if (startByte < fileSize) {\n segments.push({\n startByte,\n endByte,\n segmentIndex: i,\n });\n }\n }\n }\n\n return segments;\n}\n","import type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService } from \"../services/platform-service\";\nimport type { SmartChunker } from \"../smart-chunker\";\nimport type { UploadResponse } from \"../types/upload-response\";\nimport { inStatusCategory } from \"./upload-utils\";\n\nexport type OnProgress = (\n uploadId: string,\n bytesSent: number,\n bytesTotal: number | null,\n) => void;\n\nexport type OnShouldRetry = (\n error: UploadistaError,\n retryAttempt: number,\n) => boolean;\n\n/**\n * uploadChunk reads a chunk from the source and sends it using the\n * supplied request object. It will not handle the response.\n */\nexport async function uploadChunk({\n uploadId,\n source,\n offset,\n uploadLengthDeferred,\n abortController,\n onProgress,\n smartChunker,\n uploadistaApi,\n logger,\n}: {\n uploadId: string;\n source: FileSource;\n offset: number;\n uploadLengthDeferred: boolean | undefined;\n abortController: AbortControllerLike;\n onProgress?: OnProgress;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n}): Promise<UploadResponse> {\n const start = offset ?? 0;\n const remainingBytes = source.size ? source.size - start : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const currentChunkSize = chunkSizeDecision.size;\n let end = start + currentChunkSize;\n\n // The specified chunkSize may be Infinity or the calcluated end position\n // may exceed the file's size. In both cases, we limit the end position to\n // the input's total size for simpler calculations and correctness.\n if (\n source.size &&\n (end === Number.POSITIVE_INFINITY || end > source.size) &&\n !uploadLengthDeferred\n ) {\n end = source.size;\n }\n\n const { value, size, done } = await source.slice(start, end);\n const sizeOfValue = size ?? 0;\n const chunkStartTime = Date.now();\n\n // If the upload length is deferred, the upload size was not specified during\n // upload creation. So, if the file reader is done reading, we know the total\n // upload size and can tell the tus server.\n if (uploadLengthDeferred && done) {\n source.size = offset + sizeOfValue;\n }\n\n // The specified uploadSize might not match the actual amount of data that a source\n // provides. In these cases, we cannot successfully complete the upload, so we\n // rather error out and let the user know. If not, tus-js-client will be stuck\n // in a loop of repeating empty PATCH requests.\n // See https://community.transloadit.com/t/how-to-abort-hanging-companion-uploads/16488/13\n const newSize = offset + sizeOfValue;\n if (!uploadLengthDeferred && done && newSize !== source.size) {\n throw new UploadistaError({\n name: \"WRONG_UPLOAD_SIZE\",\n message: `upload was configured with a size of ${size} bytes, but the source is done after ${newSize} bytes`,\n });\n }\n\n const result = await uploadistaApi.uploadChunk(uploadId, value, {\n onProgress: (bytes, total) => {\n onProgress?.(uploadId, bytes, total);\n },\n abortController,\n });\n\n // Record performance metrics\n const chunkDuration = Date.now() - chunkStartTime;\n const success = result.status >= 200 && result.status < 300;\n\n smartChunker.recordChunkResult(sizeOfValue, chunkDuration, success);\n\n logger.log(\n `Chunk upload ${success ? \"succeeded\" : \"failed\"}: ${sizeOfValue} bytes in ${chunkDuration}ms (${chunkSizeDecision.strategy} strategy)`,\n );\n\n return result;\n}\n\n/**\n * Checks whether or not it is ok to retry a request.\n * @param {UploadistaError} err the error returned from the last request\n * @param {number} retryAttempt the number of times the request has already been retried\n * @param {number[]} retryDelays configured retry delays\n * @param {OnShouldRetry} onShouldRetry optional custom retry logic\n */\nexport function shouldRetry(\n platformService: PlatformService,\n err: UploadistaError,\n retryAttempt: number,\n retryDelays?: number[],\n onShouldRetry?: OnShouldRetry,\n): boolean {\n if (\n retryDelays == null ||\n retryAttempt >= retryDelays.length ||\n !err.isNetworkError()\n ) {\n return false;\n }\n\n if (onShouldRetry) {\n return onShouldRetry(err, retryAttempt);\n }\n\n return defaultOnShouldRetry(platformService, err);\n}\n\n/**\n * determines if the request should be retried. Will only retry if not a status 4xx except a 409 or 423\n * @param {UploadistaError} err\n * @returns {boolean}\n */\nexport function defaultOnShouldRetry(\n platformService: PlatformService,\n err: UploadistaError,\n): boolean {\n const status = err.status ?? 0;\n return (\n (!inStatusCategory(status, 400) || status === 409 || status === 423) &&\n platformService.isOnline()\n );\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { FlowUploadConfig } from \"../types/flow-upload-config\";\n\nimport { shouldRetry } from \"./chunk-upload\";\nimport type { Callbacks } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { inStatusCategory } from \"./upload-utils\";\n\n/**\n * Start a flow-based upload by initializing the streaming input node\n */\nexport async function startFlowUpload({\n source,\n flowConfig,\n uploadistaApi,\n logger,\n platformService,\n openWebSocket,\n closeWebSocket,\n ...callbacks\n}: {\n source: FileSource;\n flowConfig: FlowUploadConfig;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n platformService: PlatformService;\n openWebSocket: (jobId: string) => void;\n closeWebSocket: (jobId: string) => void;\n} & Callbacks): Promise<\n { jobId: string; uploadFile: UploadFile; inputNodeId: string } | undefined\n> {\n const { flowId, storageId } = flowConfig;\n\n // Get the flow to find the streaming input node\n const { flow } = await uploadistaApi.getFlow(flowId);\n\n // Find the streaming-input-node in the flow\n const inputNode = flow.nodes.find((node) => node.type === \"input\");\n\n if (!inputNode) {\n const error = new UploadistaError({\n name: \"FLOW_INCOMPATIBLE\",\n message: `Flow ${flowId} does not have a streaming input node. The flow must contain a node with type \"input\" to support flow uploads.`,\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n const inputNodeId = inputNode.id;\n\n // Step 1: Initialize the flow with init operation\n const metadata = {\n originalName: source.name ?? \"unknown\",\n mimeType: source.type ?? \"application/octet-stream\",\n size: source.size ?? 0,\n ...flowConfig.metadata,\n };\n\n logger.log(`Starting flow upload for flow ${flowId}, node ${inputNodeId}`);\n\n const { status, job } = await uploadistaApi.runFlow(flowId, storageId, {\n [inputNodeId]: {\n operation: \"init\",\n storageId,\n metadata,\n },\n });\n\n const jobId = job.id;\n\n if (!inStatusCategory(status, 200) || !jobId) {\n const error = new UploadistaError({\n name: \"FLOW_INIT_FAILED\",\n message: \"Failed to initialize flow upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n callbacks.onJobStart?.(jobId);\n\n logger.log(`Flow job ${jobId} created, opening WebSocket`);\n\n // Open WebSocket to listen for flow events\n // Events are buffered in the Durable Object until connection is established\n openWebSocket(jobId);\n\n logger.log(`Waiting for upload ID from node`);\n\n // Step 2: Wait for the streaming-input-node to pause and return the upload file\n // Poll job status until paused (with timeout)\n const maxAttempts = 60; // 30 seconds total\n const pollInterval = 500; // 0.5 second\n let attempts = 0;\n let jobStatus = await uploadistaApi.getJobStatus(jobId);\n\n while (jobStatus.status !== \"paused\" && attempts < maxAttempts) {\n await new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, pollInterval),\n );\n jobStatus = await uploadistaApi.getJobStatus(jobId);\n attempts++;\n }\n\n if (jobStatus.status !== \"paused\") {\n const error = new UploadistaError({\n name: \"FLOW_TIMEOUT\",\n message: `Flow did not pause after init (status: ${jobStatus.status})`,\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n // Get the upload file from streaming input node task result\n const streamingInputTask = jobStatus.tasks.find(\n (task) => task.nodeId === inputNodeId,\n );\n const uploadFile = streamingInputTask?.result as UploadFile;\n\n if (!uploadFile?.id) {\n const error = new UploadistaError({\n name: \"FLOW_NO_UPLOAD_ID\",\n message: \"Flow did not return upload ID after init\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n logger.log(`Upload ID received: ${uploadFile.id}`);\n\n callbacks.onStart?.({\n uploadId: uploadFile.id,\n size: source.size ?? null,\n });\n\n return { jobId, uploadFile, inputNodeId };\n}\n\n/**\n * Upload chunks directly to the upload API (not through resumeFlow)\n * This is more efficient and reuses the existing upload infrastructure\n */\nexport async function performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset,\n source,\n retryAttempt = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n ...callbacks\n}: {\n jobId: string;\n uploadFile: UploadFile;\n inputNodeId: string;\n offset: number;\n retryAttempt?: number;\n source: FileSource;\n abortController: AbortControllerLike;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n} & Callbacks): Promise<void> {\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n // Get optimal chunk size\n const remainingBytes = source.size ? source.size - offset : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const chunkSize = chunkSizeDecision.size;\n const endByte = Math.min(offset + chunkSize, source.size ?? 0);\n const sliceResult = await source.slice(offset, endByte);\n\n if (!sliceResult || !sliceResult.value) {\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Failed to read chunk from file\",\n });\n }\n\n const chunkData = sliceResult.value;\n\n // Upload chunk directly to upload API (bypassing flow)\n const startTime = Date.now();\n\n const res = await uploadistaApi.uploadChunk(uploadFile.id, chunkData, {\n abortController,\n });\n\n const duration = Date.now() - startTime;\n\n if (!res.upload) {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: \"Upload chunk response missing upload data\",\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks.onProgress?.(uploadFile.id, currentOffset, source.size ?? 0);\n callbacks.onChunkComplete?.(\n currentOffset - offset,\n offset,\n source.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / chunkSize);\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration,\n speed: chunkSize / (duration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition:\n smartChunker.getLastDecision()?.networkCondition?.type,\n chunkingStrategy: smartChunker.getLastDecision()?.strategy,\n });\n\n // Update smart chunker with connection metrics\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n // Check if upload is complete after uploading the chunk\n if (currentOffset >= (source.size ?? 0)) {\n if (source) source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Flow upload completed: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n // Upload is complete - finalize the flow\n logger.log(`Finalizing flow upload for job ${jobId}`);\n\n try {\n await uploadistaApi.resumeFlow(\n jobId,\n inputNodeId,\n {\n operation: \"finalize\",\n uploadId: uploadFile.id,\n },\n { contentType: \"application/json\" },\n );\n } catch (err) {\n // Finalization errors should not trigger chunk retry logic\n const error = new UploadistaError({\n name: \"FLOW_FINALIZE_FAILED\",\n message: `Failed to finalize flow upload for job ${jobId}`,\n cause: err as Error,\n });\n callbacks.onError?.(error);\n throw error;\n }\n return;\n }\n\n // Continue uploading next chunk\n await performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset: currentOffset,\n source,\n platformService,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n } catch (err) {\n // Retry logic similar to single-upload\n if (retryDelays != null) {\n const shouldResetDelays =\n offset != null && currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error during flow upload\",\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset,\n source,\n retryAttempt: retryAttempt + 1,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n abortController,\n ...callbacks,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Failed to upload chunk for job ${jobId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n }\n }\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\n\nimport { shouldRetry } from \"./chunk-upload\";\nimport type { Callbacks } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { inStatusCategory } from \"./upload-utils\";\n\n/**\n * Result from initializing a flow input node\n */\nexport interface FlowInputInitResult {\n uploadFile: UploadFile;\n nodeId: string;\n}\n\n/**\n * Options for initializing a flow input node\n */\nexport interface InitializeFlowInputOptions {\n nodeId: string;\n jobId: string;\n source: FileSource;\n storageId: string;\n metadata?: Record<string, unknown>;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n platformService: PlatformService;\n callbacks?: Pick<Callbacks, \"onStart\" | \"onError\">;\n}\n\n/**\n * Options for uploading chunks for a flow input\n */\nexport interface UploadInputChunksOptions {\n nodeId: string;\n jobId: string;\n uploadFile: UploadFile;\n source: FileSource;\n offset?: number;\n retryAttempt?: number;\n abortController: AbortControllerLike;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n callbacks?: Callbacks;\n}\n\n/**\n * Options for finalizing a flow input\n */\nexport interface FinalizeFlowInputOptions {\n nodeId: string;\n jobId: string;\n uploadId: string;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n callbacks?: Pick<Callbacks, \"onError\">;\n}\n\n/**\n * Initialize a single flow input node with the init operation.\n * This starts the input processing and waits for the upload ID.\n *\n * @param options - Configuration for initializing the input\n * @returns Upload file metadata and node ID\n */\nexport async function initializeFlowInput(\n options: InitializeFlowInputOptions,\n): Promise<FlowInputInitResult> {\n const {\n nodeId,\n jobId,\n source,\n storageId,\n metadata = {},\n uploadistaApi,\n logger,\n platformService,\n callbacks,\n } = options;\n\n // Build metadata for the input\n const inputMetadata = {\n originalName: source.name ?? \"unknown\",\n mimeType: source.type ?? \"application/octet-stream\",\n size: source.size ?? 0,\n ...metadata,\n };\n\n logger.log(`Initializing input node ${nodeId} for job ${jobId}`);\n\n // Resume the job with init operation for this specific node\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"init\",\n storageId,\n metadata: inputMetadata,\n },\n { contentType: \"application/json\" },\n );\n\n logger.log(`Waiting for upload ID from node ${nodeId}`);\n\n // Poll job status until this node's task is paused with upload file\n const maxAttempts = 60; // 30 seconds total\n const pollInterval = 500; // 0.5 second\n let attempts = 0;\n let jobStatus = await uploadistaApi.getJobStatus(jobId);\n\n while (attempts < maxAttempts) {\n // Find this specific node's task\n const nodeTask = jobStatus.tasks.find((task) => task.nodeId === nodeId);\n\n // Check if this node is paused and has a result\n if (\n nodeTask?.status === \"paused\" &&\n nodeTask.result &&\n (nodeTask.result as UploadFile).id\n ) {\n const uploadFile = nodeTask.result as UploadFile;\n logger.log(`Upload ID received for node ${nodeId}: ${uploadFile.id}`);\n\n callbacks?.onStart?.({\n uploadId: uploadFile.id,\n size: source.size ?? null,\n });\n\n return { uploadFile, nodeId };\n }\n\n // If task failed, throw error\n if (nodeTask?.status === \"failed\") {\n const error = new UploadistaError({\n name: \"FLOW_INIT_FAILED\",\n message: `Input node ${nodeId} failed during initialization`,\n });\n callbacks?.onError?.(error);\n throw error;\n }\n\n await new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, pollInterval),\n );\n jobStatus = await uploadistaApi.getJobStatus(jobId);\n attempts++;\n }\n\n const error = new UploadistaError({\n name: \"FLOW_TIMEOUT\",\n message: `Input node ${nodeId} did not return upload ID after init`,\n });\n callbacks?.onError?.(error);\n throw error;\n}\n\n/**\n * Upload chunks for a single flow input.\n * This uploads file data directly to the upload API with smart chunking and retry logic.\n *\n * @param options - Configuration for uploading chunks\n */\nexport async function uploadInputChunks(\n options: UploadInputChunksOptions,\n): Promise<void> {\n const {\n nodeId,\n jobId,\n uploadFile,\n source,\n offset = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n callbacks,\n } = options;\n\n let retryAttempt = options.retryAttempt ?? 0;\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n // Get optimal chunk size\n const remainingBytes = source.size ? source.size - offset : undefined;\n const chunkSizeDecision = smartChunker.getNextChunkSize(remainingBytes);\n const chunkSize = chunkSizeDecision.size;\n const endByte = Math.min(offset + chunkSize, source.size ?? 0);\n const sliceResult = await source.slice(offset, endByte);\n\n if (!sliceResult || !sliceResult.value) {\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: `Failed to read chunk from file for node ${nodeId}`,\n });\n }\n\n const chunkData = sliceResult.value;\n\n // Upload chunk directly to upload API\n const startTime = Date.now();\n\n const res = await uploadistaApi.uploadChunk(uploadFile.id, chunkData, {\n abortController,\n });\n\n const duration = Date.now() - startTime;\n\n if (!res.upload) {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Upload chunk response missing upload data for node ${nodeId}`,\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks?.onProgress?.(uploadFile.id, currentOffset, source.size ?? 0);\n callbacks?.onChunkComplete?.(\n currentOffset - offset,\n offset,\n source.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / chunkSize);\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration,\n speed: chunkSize / (duration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition:\n smartChunker.getLastDecision()?.networkCondition?.type,\n chunkingStrategy: smartChunker.getLastDecision()?.strategy,\n });\n\n // Update smart chunker with connection metrics\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n // Check if upload is complete\n if (currentOffset >= (source.size ?? 0)) {\n source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Upload completed for node ${nodeId}: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n return;\n }\n\n // Continue uploading next chunk\n await uploadInputChunks({\n ...options,\n offset: currentOffset,\n retryAttempt: 0, // Reset retry count on successful chunk\n });\n } catch (err) {\n // Retry logic\n if (retryDelays != null) {\n const shouldResetDelays = currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n // biome-ignore lint: mutation needed for retry logic\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: `Network error during upload for node ${nodeId}`,\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks?.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await uploadInputChunks({\n ...options,\n offset,\n retryAttempt: retryAttempt + 1,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `Failed to upload chunk for node ${nodeId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n } else {\n throw err;\n }\n }\n}\n\n/**\n * Finalize a flow input by sending the finalize operation.\n * This tells the flow that this input has completed uploading.\n *\n * @param options - Configuration for finalizing the input\n */\nexport async function finalizeFlowInput(\n options: FinalizeFlowInputOptions,\n): Promise<void> {\n const { nodeId, jobId, uploadId, uploadistaApi, logger, callbacks } = options;\n\n logger.log(`Finalizing input node ${nodeId} for job ${jobId}`);\n\n try {\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"finalize\",\n uploadId,\n },\n { contentType: \"application/json\" },\n );\n\n logger.log(`Input node ${nodeId} finalized successfully`);\n } catch (err) {\n const error = new UploadistaError({\n name: \"FLOW_FINALIZE_FAILED\",\n message: `Failed to finalize input node ${nodeId} for job ${jobId}`,\n cause: err as Error,\n });\n callbacks?.onError?.(error);\n throw error;\n }\n}\n","import type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport type { PreviousUpload } from \"../types/previous-upload\";\n\n/**\n * Find previous uploads by fingerprint\n */\nexport async function findPreviousUploads(\n clientStorage: ClientStorage,\n fingerprint: string,\n): Promise<PreviousUpload[]> {\n return clientStorage.findUploadsByFingerprint(fingerprint);\n}\n\n/**\n * Resume from a previous upload\n */\nexport function resumeFromPreviousUpload(previousUpload: PreviousUpload): {\n uploadId: string | null;\n parallelUploadUrls: string[] | undefined;\n clientStorageKey: string | null;\n} {\n return {\n uploadId: previousUpload.uploadId ?? null,\n parallelUploadUrls: previousUpload.parallelUploadUrls,\n clientStorageKey: previousUpload.clientStorageKey,\n };\n}\n\n/**\n * Add the upload URL to the URL storage, if possible.\n */\nexport async function saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size,\n metadata,\n clientStorageKey,\n storeFingerprintForResuming,\n generateId,\n}: {\n clientStorage: ClientStorage;\n fingerprint: string;\n size: number;\n metadata: Record<string, string | number | boolean>;\n clientStorageKey: string | null;\n storeFingerprintForResuming: boolean;\n generateId: IdGenerationService;\n}): Promise<string | undefined> {\n // We do not store the upload key\n // - if it was disabled in the option, or\n // - if no fingerprint was calculated for the input (i.e. a stream), or\n // - if the key is already stored.\n if (\n !storeFingerprintForResuming ||\n !fingerprint ||\n clientStorageKey != null\n ) {\n return undefined;\n }\n\n const storedUpload: PreviousUpload = {\n size,\n metadata,\n creationTime: new Date().toString(),\n clientStorageKey: fingerprint,\n };\n\n const newClientStorageKey = await clientStorage.addUpload(\n fingerprint,\n storedUpload,\n { generateId },\n );\n\n return newClientStorageKey;\n}\n\n/**\n * Remove the entry in the URL storage, if it has been saved before.\n */\nexport async function removeFromClientStorage(\n clientStorage: ClientStorage,\n clientStorageKey: string,\n): Promise<void> {\n if (!clientStorageKey) return;\n await clientStorage.removeUpload(clientStorageKey);\n}\n","import type { InputFile, UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport {\n type OnProgress,\n type OnShouldRetry,\n shouldRetry,\n uploadChunk,\n} from \"./chunk-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport {\n removeFromClientStorage,\n saveUploadInClientStorage,\n} from \"./upload-storage\";\nimport { encodeMetadata, inStatusCategory } from \"./upload-utils\";\n\nexport type Callbacks = {\n onProgress?: OnProgress;\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n onSuccess?: (payload: UploadFile) => void;\n onError?: (error: Error | UploadistaError) => void;\n onStart?: (file: { uploadId: string; size: number | null }) => void;\n onJobStart?: (jobId: string) => void;\n onShouldRetry?: OnShouldRetry;\n};\n\nexport type SingleUploadResult = {\n uploadIdStorageKey: string | undefined;\n uploadId: string;\n offset: number;\n};\n\n/**\n * Start uploading the file using PATCH requests. The file will be divided\n * into chunks as specified in the chunkSize option. During the upload\n * the onProgress event handler may be invoked multiple times.\n */\nexport async function performUpload({\n uploadId,\n offset,\n source,\n uploadLengthDeferred,\n retryAttempt = 0,\n abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry,\n ...callbacks\n}: {\n uploadId: string;\n offset: number;\n retryAttempt?: number;\n source: FileSource;\n abortController: AbortControllerLike;\n uploadLengthDeferred: boolean | undefined;\n retryDelays: number[] | undefined;\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n platformService: PlatformService;\n onRetry?: (timeout: Timeout) => void;\n} & Callbacks): Promise<void> {\n let offsetBeforeRetry = offset;\n let currentOffset = offset;\n\n try {\n const res = await uploadChunk({\n uploadId,\n source,\n offset,\n uploadLengthDeferred,\n onProgress: callbacks.onProgress,\n abortController,\n smartChunker,\n uploadistaApi,\n logger,\n });\n\n if (!inStatusCategory(res.status, 200) || res.upload == null) {\n throw new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while uploading chunk\",\n });\n }\n\n currentOffset = res.upload.offset;\n\n callbacks.onProgress?.(uploadId, currentOffset, res.upload.size ?? 0);\n callbacks.onChunkComplete?.(\n currentOffset - offset,\n offset,\n res.upload?.size ?? 0,\n );\n\n // Record detailed chunk metrics\n if (smartChunking?.enabled !== false) {\n const chunkIndex = Math.floor(offset / (currentOffset - offset || 1));\n const chunkSize = currentOffset - offset;\n const chunkDuration = Date.now() - (Date.now() - 100); // Approximate, real timing is in uploadChunk\n const lastDecision = smartChunker.getLastDecision();\n\n metrics.recordChunk({\n chunkIndex,\n size: chunkSize,\n duration: chunkDuration,\n speed: chunkSize / (chunkDuration / 1000),\n success: true,\n retryCount: retryAttempt,\n networkCondition: lastDecision?.networkCondition?.type,\n chunkingStrategy: lastDecision?.strategy,\n });\n\n // Update smart chunker with connection metrics for pooling optimization\n const connectionMetrics = uploadistaApi.getConnectionMetrics();\n smartChunker.updateConnectionMetrics(connectionMetrics);\n }\n\n if (currentOffset >= (source.size ?? 0)) {\n if (source) source.close();\n\n // Complete metrics session\n if (smartChunking?.enabled !== false) {\n const sessionMetrics = metrics.endSession();\n if (sessionMetrics) {\n logger.log(\n `Upload completed: ${sessionMetrics.totalSize} bytes in ${sessionMetrics.totalDuration}ms, avg speed: ${Math.round(sessionMetrics.averageSpeed / 1024)}KB/s`,\n );\n }\n }\n\n callbacks.onSuccess?.(res.upload);\n return;\n }\n\n await performUpload({\n uploadId,\n offset: currentOffset,\n source,\n uploadLengthDeferred,\n retryDelays,\n smartChunker,\n platformService,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n } catch (err) {\n // Check if we should retry, when enabled, before sending the error to the user.\n if (retryDelays != null) {\n // We will reset the attempt counter if\n // - we were already able to connect to the server (offset != null) and\n // - we were able to upload a small chunk of data to the server\n const shouldResetDelays =\n offset != null && currentOffset > offsetBeforeRetry;\n if (shouldResetDelays) {\n retryAttempt = 0;\n }\n\n const castedErr = !(err instanceof UploadistaError)\n ? new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error\",\n cause: err as Error,\n })\n : err;\n\n if (\n shouldRetry(\n platformService,\n castedErr,\n retryAttempt,\n retryDelays,\n callbacks.onShouldRetry,\n )\n ) {\n const delay = retryDelays[retryAttempt];\n\n offsetBeforeRetry = offset;\n\n const timeout = platformService.setTimeout(async () => {\n await performUpload({\n uploadId,\n offset,\n source,\n retryAttempt: retryAttempt + 1,\n uploadLengthDeferred,\n retryDelays,\n smartChunker,\n platformService,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n onRetry,\n abortController,\n ...callbacks,\n });\n }, delay);\n onRetry?.(timeout);\n } else {\n throw new UploadistaError({\n name: \"UPLOAD_CHUNK_FAILED\",\n message: `failed to upload chunk for ${uploadId} at offset ${offset}`,\n cause: err as Error,\n });\n }\n }\n }\n}\n\n/**\n * Create a new upload using the creation extension by sending a POST\n * request to the endpoint. After successful creation the file will be\n * uploaded\n */\nexport async function createUpload({\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata,\n uploadistaApi,\n logger,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n computeChecksum = true,\n checksumAlgorithm = \"sha256\",\n platformService,\n ...callbacks\n}: {\n fingerprint: string;\n storageId: string;\n source: FileSource;\n uploadLengthDeferred: boolean | undefined;\n metadata: Record<string, string>;\n uploadistaApi: UploadistaApi;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n checksumService: ChecksumService;\n computeChecksum?: boolean;\n checksumAlgorithm?: string;\n platformService: PlatformService;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n if (!uploadLengthDeferred && source.size == null) {\n const error = new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message: \"expected size to be set\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n // Compute checksum if enabled and file is a File object\n let checksum: string | undefined;\n if (computeChecksum && platformService.isFileLike(source.input)) {\n try {\n logger.log(\"Computing file checksum...\");\n checksum = await checksumService.computeChecksum(\n new Uint8Array(source.input as any),\n );\n logger.log(`Checksum computed: ${checksum}`);\n } catch (error) {\n logger.log(\n `Warning: Failed to compute checksum: ${error instanceof Error ? error.message : \"Unknown error\"}`,\n );\n // Continue without checksum if computation fails\n }\n }\n\n const createUploadData: InputFile = {\n uploadLengthDeferred,\n storageId,\n size: source.size ?? 0,\n metadata: metadata ? encodeMetadata(metadata) : undefined,\n fileName: source.name ?? undefined,\n type: source.type ?? \"\",\n lastModified: source.lastModified ?? undefined,\n checksum,\n checksumAlgorithm: checksum ? checksumAlgorithm : undefined,\n };\n\n const { upload, status } = await uploadistaApi.createUpload(createUploadData);\n\n if (!inStatusCategory(status, 200) || upload == null) {\n const error = new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while creating upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n logger.log(`Created upload ${upload.id}`);\n\n openWebSocket(upload.id);\n\n if (upload.size === 0) {\n // Nothing to upload and file was successfully created\n callbacks.onSuccess?.(upload);\n if (source) source.close();\n closeWebSocket(upload.id);\n return;\n }\n\n const uploadIdStorageKey = await saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size: upload.size ?? 0,\n metadata: upload.metadata ?? {},\n clientStorageKey: null,\n storeFingerprintForResuming,\n generateId,\n });\n\n callbacks.onStart?.({\n uploadId: upload.id,\n size: upload.size ?? null,\n });\n\n return {\n uploadIdStorageKey,\n uploadId: upload.id,\n offset: upload.offset,\n };\n}\n\n/**\n * Try to resume an existing upload. First a HEAD request will be sent\n * to retrieve the offset. If the request fails a new upload will be\n * created. In the case of a successful response the file will be uploaded.\n */\nexport async function resumeUpload({\n uploadId,\n storageId,\n uploadIdStorageKey,\n fingerprint,\n source,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n platformService,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n ...callbacks\n}: {\n uploadId: string;\n storageId: string;\n uploadIdStorageKey: string;\n fingerprint: string;\n platformService: PlatformService;\n source: FileSource;\n uploadLengthDeferred: boolean | undefined;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n const res = await uploadistaApi.getUpload(uploadId);\n const status = res.status;\n\n if (!inStatusCategory(status, 200)) {\n // If the upload is locked (indicated by the 423 Locked status code), we\n // emit an error instead of directly starting a new upload. This way the\n // retry logic can catch the error and will retry the upload. An upload\n // is usually locked for a short period of time and will be available\n // afterwards.\n if (status === 423) {\n const error = new UploadistaError({\n name: \"UPLOAD_LOCKED\",\n message: \"upload is currently locked; retry later\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n if (inStatusCategory(status, 400)) {\n // Remove stored fingerprint and corresponding endpoint,\n // on client errors since the file can not be found\n await removeFromClientStorage(clientStorage, uploadIdStorageKey);\n }\n\n // Try to create a new upload\n return await createUpload({\n platformService,\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata: {},\n uploadistaApi,\n logger,\n checksumService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket: () => {}, // Placeholder, will be provided by caller\n ...callbacks,\n });\n }\n\n const upload = res.upload;\n if (upload == null) {\n const error = new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while resuming upload\",\n });\n callbacks.onError?.(error);\n throw error;\n }\n\n await saveUploadInClientStorage({\n clientStorage,\n fingerprint,\n size: upload.size ?? 0,\n metadata: upload.metadata ?? {},\n clientStorageKey: uploadIdStorageKey,\n storeFingerprintForResuming,\n generateId,\n });\n\n // Upload has already been completed and we do not need to send additional\n // data to the server\n if (upload.offset === upload.size) {\n return undefined;\n }\n\n openWebSocket(upload.id);\n\n return {\n uploadId,\n uploadIdStorageKey,\n offset: upload.offset,\n };\n}\n\n/**\n * Initiate the uploading procedure for a non-parallel upload. Here the entire file is\n * uploaded in a sequential matter.\n */\nexport async function startSingleUpload({\n source,\n uploadId,\n uploadIdStorageKey,\n storageId,\n fingerprint,\n platformService,\n uploadLengthDeferred,\n uploadistaApi,\n checksumService,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n ...callbacks\n}: {\n source: FileSource;\n uploadId: string | null;\n uploadIdStorageKey: string | null;\n storageId: string;\n fingerprint: string;\n platformService: PlatformService;\n uploadLengthDeferred: boolean | undefined;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n} & Callbacks): Promise<SingleUploadResult | undefined> {\n // The upload had been started previously and we should reuse this URL.\n if (uploadId != null && uploadIdStorageKey != null) {\n logger.log(`Resuming upload from previous id: ${uploadId}`);\n return await resumeUpload({\n uploadId,\n uploadIdStorageKey,\n storageId,\n fingerprint,\n source,\n checksumService,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n platformService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n ...callbacks,\n });\n }\n\n // An upload has not started for the file yet, so we start a new one\n logger.log(\"Creating a new upload\");\n return await createUpload({\n fingerprint,\n storageId,\n source,\n uploadLengthDeferred,\n metadata: {},\n uploadistaApi,\n logger,\n checksumService,\n platformService,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n ...callbacks,\n });\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport type {\n AbortControllerFactory,\n AbortControllerLike,\n} from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileSource } from \"../services/file-reader-service\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { SmartChunker, SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport { type Callbacks, createUpload, performUpload } from \"./single-upload\";\nimport type { UploadMetrics } from \"./upload-metrics\";\nimport { calculateSegments } from \"./upload-utils\";\n\nexport type ParallelUploadSegment = {\n uploadId: string;\n uploadIdStorageKey: string | undefined;\n segmentIndex: number;\n startByte: number;\n endByte: number;\n offset: number;\n abortController: AbortControllerLike;\n retryTimeout: Timeout | null;\n};\n\nexport type ParallelUploadState = {\n segments: ParallelUploadSegment[];\n totalProgress: number;\n completed: boolean;\n failed: boolean;\n error?: Error;\n};\n\nexport type ParallelUploadResult = {\n parallelState: ParallelUploadState;\n abort: () => Promise<void>;\n};\n\n/**\n * Initiate the uploading procedure for a parallelized upload, where one file is split into\n * multiple request which are run in parallel.\n */\nexport async function startParallelUpload({\n source,\n storageId,\n fingerprint,\n uploadLengthDeferred,\n parallelUploads,\n parallelChunkSize,\n retryDelays,\n smartChunker,\n uploadistaApi,\n logger,\n checksumService,\n smartChunking,\n metrics,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n terminate,\n abortControllerFactory,\n platformService,\n ...callbacks\n}: {\n source: FileSource;\n storageId: string;\n fingerprint: string;\n uploadLengthDeferred: boolean | undefined;\n parallelUploads: number;\n parallelChunkSize?: number;\n retryDelays?: number[];\n smartChunker: SmartChunker;\n uploadistaApi: UploadistaApi;\n checksumService: ChecksumService;\n logger: Logger;\n smartChunking?: SmartChunkerConfig;\n metrics: UploadMetrics;\n clientStorage: ClientStorage;\n generateId: IdGenerationService;\n storeFingerprintForResuming: boolean;\n openWebSocket: (uploadId: string) => WebSocketLike;\n closeWebSocket: (uploadId: string) => void;\n terminate: (uploadId: string) => Promise<void>;\n abortControllerFactory: AbortControllerFactory;\n platformService: PlatformService;\n} & Callbacks): Promise<ParallelUploadResult | undefined> {\n if (!source.size || source.size === 0) {\n callbacks.onError?.(\n new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\",\n message: \"Parallel upload requires a known file size\",\n }),\n );\n return;\n }\n\n // Calculate segments for parallel upload\n const segments = calculateSegments(\n source.size,\n parallelUploads,\n parallelChunkSize,\n );\n logger.log(`Starting parallel upload with ${segments.length} segments`);\n\n // Initialize parallel upload state\n const parallelState: ParallelUploadState = {\n segments: [],\n totalProgress: 0,\n completed: false,\n failed: false,\n };\n\n // Progress tracking for aggregation\n const segmentProgress = new Map<number, number>();\n const segmentTotals = new Map<number, number>();\n\n const updateTotalProgress = () => {\n const totalBytes = Array.from(segmentTotals.values()).reduce(\n (sum, size) => sum + size,\n 0,\n );\n const progressBytes = Array.from(segmentProgress.values()).reduce(\n (sum, progress) => sum + progress,\n 0,\n );\n parallelState.totalProgress =\n totalBytes > 0 ? progressBytes / totalBytes : 0;\n\n // Aggregate progress callback\n if (callbacks.onProgress && totalBytes > 0) {\n callbacks.onProgress(`parallel-upload`, progressBytes, totalBytes);\n }\n };\n\n try {\n // Create upload sessions for each segment\n const segmentUploads = await Promise.all(\n segments.map(async (segment) => {\n // Create a segmented source for this chunk\n const segmentSource: FileSource = {\n ...source,\n size: segment.endByte - segment.startByte,\n async slice(start, end) {\n // Adjust slice to segment boundaries\n const actualStart = segment.startByte + (start ?? 0);\n const actualEnd = Math.min(\n segment.startByte + (end ?? segment.endByte - segment.startByte),\n segment.endByte,\n );\n return await source.slice(actualStart, actualEnd);\n },\n };\n\n const createResult = await createUpload({\n fingerprint: `${fingerprint}-segment-${segment.segmentIndex}`,\n storageId,\n source: segmentSource,\n uploadLengthDeferred,\n platformService,\n metadata: {\n parallelUpload: \"true\",\n segmentIndex: segment.segmentIndex.toString(),\n totalSegments: segments.length.toString(),\n parentFingerprint: fingerprint,\n },\n checksumService,\n uploadistaApi,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket,\n closeWebSocket,\n onSuccess: () => {},\n onError: (error) =>\n logger.log(\n `Segment ${segment.segmentIndex} creation error: ${error}`,\n ),\n onStart: (info) => {\n segmentTotals.set(segment.segmentIndex, info.size ?? 0);\n updateTotalProgress();\n },\n });\n\n if (!createResult) {\n throw new UploadistaError({\n name: \"PARALLEL_SEGMENT_CREATION_FAILED\",\n message: `Failed to create upload segment ${segment.segmentIndex}`,\n });\n }\n\n const parallelSegment: ParallelUploadSegment = {\n uploadId: createResult.uploadId,\n uploadIdStorageKey: createResult.uploadIdStorageKey,\n segmentIndex: segment.segmentIndex,\n startByte: segment.startByte,\n endByte: segment.endByte,\n offset: createResult.offset,\n abortController: abortControllerFactory.create(),\n retryTimeout: null,\n };\n\n return {\n segment: parallelSegment,\n source: segmentSource,\n };\n }),\n );\n\n // Store segments in state\n parallelState.segments = segmentUploads.map((upload) => upload.segment);\n\n // Notify start with combined upload info\n callbacks.onStart?.({\n uploadId: `parallel-${parallelState.segments.map((s) => s.uploadId).join(\",\")}`,\n size: source.size,\n });\n\n // Start parallel upload for each segment\n const uploadPromises = segmentUploads.map(\n async ({ segment, source: segmentSource }) => {\n try {\n await performUpload({\n uploadId: segment.uploadId,\n offset: segment.offset,\n source: segmentSource,\n uploadLengthDeferred,\n abortController: segment.abortController,\n retryDelays,\n smartChunker,\n uploadistaApi,\n platformService,\n logger,\n smartChunking,\n metrics,\n onProgress: (_, bytes, total) => {\n segmentProgress.set(segment.segmentIndex, bytes);\n if (total) segmentTotals.set(segment.segmentIndex, total);\n updateTotalProgress();\n },\n onChunkComplete: (chunkSize, bytesAccepted, bytesTotal) => {\n if (callbacks.onChunkComplete) {\n callbacks.onChunkComplete(chunkSize, bytesAccepted, bytesTotal);\n }\n },\n onSuccess: (_uploadFile) => {\n logger.log(\n `Segment ${segment.segmentIndex} completed successfully`,\n );\n // Mark this segment as completed\n segmentProgress.set(\n segment.segmentIndex,\n segmentTotals.get(segment.segmentIndex) ?? 0,\n );\n updateTotalProgress();\n },\n onShouldRetry: (error, retryAttempt) => {\n logger.log(\n `Segment ${segment.segmentIndex} retry attempt ${retryAttempt}: ${error}`,\n );\n return retryAttempt < (retryDelays?.length ?? 0);\n },\n onRetry: (timeout) => {\n segment.retryTimeout = timeout;\n },\n onError: (error) => {\n logger.log(`Segment ${segment.segmentIndex} failed: ${error}`);\n throw error;\n },\n });\n } catch (error) {\n logger.log(`Segment ${segment.segmentIndex} upload failed: ${error}`);\n throw new UploadistaError({\n name: \"PARALLEL_SEGMENT_UPLOAD_FAILED\",\n message: `Segment ${segment.segmentIndex} upload failed`,\n cause: error as Error,\n });\n }\n },\n );\n\n // Wait for all segments to complete\n await Promise.all(uploadPromises);\n\n // Mark as completed\n parallelState.completed = true;\n logger.log(\"All parallel upload segments completed successfully\");\n\n // Call success callback with aggregated result\n if (callbacks.onSuccess) {\n const aggregatedResult: UploadFile = {\n id: `parallel-${parallelState.segments.map((s) => s.uploadId).join(\",\")}`,\n offset: source.size,\n size: source.size,\n storage: {\n id: storageId,\n type: \"parallel-upload\",\n },\n metadata: {\n parallelUpload: \"true\",\n totalSegments: segments.length.toString(),\n fingerprint,\n },\n };\n callbacks.onSuccess(aggregatedResult);\n }\n\n // Close all sources\n for (const upload of segmentUploads) {\n upload.source.close?.();\n }\n\n return {\n parallelState,\n abort: async () => {\n await abortParallelUpload(\n parallelState,\n logger,\n terminate,\n closeWebSocket,\n platformService,\n );\n },\n };\n } catch (error) {\n parallelState.failed = true;\n parallelState.error = error as Error;\n\n // Clean up any created segments\n await abortParallelUpload(\n parallelState,\n logger,\n terminate,\n closeWebSocket,\n platformService,\n );\n\n callbacks.onError?.(error as Error);\n throw error;\n }\n}\n\n/**\n * Abort a parallel upload by cleaning up all segments\n */\nexport async function abortParallelUpload(\n state: ParallelUploadState,\n logger: Logger,\n terminate: (uploadId: string) => Promise<void>,\n closeWebSocket: (uploadId: string) => void,\n platformService: PlatformService,\n): Promise<void> {\n logger.log(\"Aborting parallel upload...\");\n\n // Abort all segment controllers\n for (const segment of state.segments) {\n segment.abortController.abort();\n\n if (segment.retryTimeout) {\n platformService.clearTimeout(segment.retryTimeout);\n segment.retryTimeout = null;\n }\n\n // Attempt to terminate the upload on the server\n try {\n await terminate(segment.uploadId);\n } catch (error) {\n logger.log(\n `Failed to terminate segment ${segment.segmentIndex}: ${error}`,\n );\n }\n\n // Close websockets\n closeWebSocket(segment.uploadId);\n }\n\n state.completed = false;\n state.failed = true;\n logger.log(\"Parallel upload aborted\");\n}\n","/**\n * Platform-agnostic service for platform-specific APIs\n * Provides abstraction for timer functions and platform detection\n */\n\nexport type Timeout = unknown;\n\nexport interface PlatformService {\n /**\n * Schedule a callback to run after a delay\n */\n setTimeout: (callback: () => void, ms: number | undefined) => Timeout;\n\n /**\n * Cancel a scheduled callback\n */\n clearTimeout: (id: Timeout) => void;\n\n /**\n * Check if we're in a browser environment\n */\n isBrowser: () => boolean;\n\n /**\n * Check if network is online\n */\n isOnline: () => boolean;\n\n /**\n * Check if a value is a File-like object\n */\n isFileLike: (value: unknown) => boolean;\n\n /**\n * Get file name from File-like object\n */\n getFileName: (file: unknown) => string | undefined;\n\n /**\n * Get file type from File-like object\n */\n getFileType: (file: unknown) => string | undefined;\n\n /**\n * Get file size from File-like object\n */\n getFileSize: (file: unknown) => number | undefined;\n\n /**\n * Get file last modified timestamp from File-like object\n */\n getFileLastModified: (file: unknown) => number | undefined;\n}\n\n/**\n * Simple async wait utility\n */\nexport async function wait(\n platformService: PlatformService,\n ms: number,\n): Promise<void> {\n return new Promise<void>((resolve) =>\n platformService.setTimeout(resolve, ms),\n );\n}\n","import type { UploadistaApi } from \"../client/uploadista-api\";\nimport { UploadistaError } from \"../error\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport {\n type PlatformService,\n type Timeout,\n wait,\n} from \"../services/platform-service\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport { shouldRetry } from \"./chunk-upload\";\nimport { removeFromClientStorage } from \"./upload-storage\";\n\n/**\n * Use the Termination extension to delete an upload from the server by sending a DELETE\n * request to the specified upload URL. This is only possible if the server supports the\n * Termination extension. If the `retryDelays` property is set, the method will\n * also retry if an error occurs.\n */\nexport async function terminate(\n uploadId: string,\n uploadistaApi: UploadistaApi,\n platformService: PlatformService,\n retryDelays: number[] | undefined,\n retryAttempt = 0,\n): Promise<void> {\n try {\n const res = await uploadistaApi.deleteUpload(uploadId);\n // A 204 response indicates a successful request\n if (res.status === 204) {\n return;\n }\n\n throw new UploadistaError({\n name: \"NETWORK_UNEXPECTED_RESPONSE\",\n message: \"Unexpected response while terminating upload\",\n });\n } catch (err) {\n const error = err as UploadistaError;\n\n if (!shouldRetry(platformService, error, retryAttempt, retryDelays)) {\n throw err;\n }\n\n // Instead of keeping track of the retry attempts, we remove the first element from the delays\n // array. If the array is empty, all retry attempts are used up and we will bubble up the error.\n // We recursively call the terminate function will removing elements from the retryDelays array.\n const delay = retryDelays?.[retryAttempt] ?? 0;\n\n await wait(platformService, delay);\n\n return await terminate(\n uploadId,\n uploadistaApi,\n platformService,\n retryDelays,\n retryAttempt + 1,\n );\n }\n}\n\n/**\n * Abort any running request and stop the current upload. After abort is called, no event\n * handler will be invoked anymore. You can use the `start` method to resume the upload\n * again.\n * If `shouldTerminate` is true, the `terminate` function will be called to remove the\n * current upload from the server.\n */\nexport async function abort({\n uploadId,\n uploadIdStorageKey,\n retryTimeout,\n shouldTerminate,\n abortController,\n uploadistaApi,\n platformService,\n retryDelays,\n clientStorage,\n}: {\n uploadId: string;\n uploadIdStorageKey: string | undefined;\n retryTimeout: Timeout | null;\n shouldTerminate: boolean;\n abortController: AbortControllerLike;\n uploadistaApi: UploadistaApi;\n platformService: PlatformService;\n retryDelays?: number[];\n clientStorage: ClientStorage;\n}): Promise<void> {\n // Stop any current running request.\n abortController.abort();\n\n // Stop any timeout used for initiating a retry.\n if (retryTimeout != null) {\n platformService.clearTimeout(retryTimeout);\n }\n\n if (!shouldTerminate || uploadId == null) {\n return;\n }\n\n await terminate(uploadId, uploadistaApi, platformService, retryDelays);\n\n if (uploadIdStorageKey != null) {\n return removeFromClientStorage(clientStorage, uploadIdStorageKey);\n }\n}\n","import type { ChunkMetrics } from \"../types/chunk-metrics\";\nimport type { PerformanceInsights } from \"../types/performance-insights\";\nimport type { UploadSessionMetrics } from \"../types/upload-session-metrics\";\n\nexport interface UploadMetricsConfig {\n maxChunkHistory?: number;\n enableDetailedMetrics?: boolean;\n performanceThresholds?: {\n slowSpeed: number; // bytes per second\n fastSpeed: number; // bytes per second\n highRetryRate: number; // ratio\n };\n}\n\nexport class UploadMetrics {\n private config: Required<UploadMetricsConfig>;\n private chunkHistory: ChunkMetrics[] = [];\n private currentSession: Partial<UploadSessionMetrics> = {};\n private sessionStartTime = 0;\n\n constructor(config: UploadMetricsConfig = {}) {\n this.config = {\n maxChunkHistory: config.maxChunkHistory ?? 1000,\n enableDetailedMetrics: config.enableDetailedMetrics ?? true,\n performanceThresholds: {\n slowSpeed: 100 * 1024, // 100 KB/s\n fastSpeed: 5 * 1024 * 1024, // 5 MB/s\n highRetryRate: 0.2, // 20%\n ...config.performanceThresholds,\n },\n };\n }\n\n startSession(\n uploadId: string,\n totalSize: number,\n adaptiveChunkingEnabled: boolean,\n ): void {\n this.sessionStartTime = Date.now();\n this.currentSession = {\n uploadId,\n totalSize,\n chunksCompleted: 0,\n chunksTotal: Math.ceil(totalSize / (1024 * 1024)), // rough estimate\n totalDuration: 0,\n totalRetries: 0,\n adaptiveChunkingEnabled,\n startTime: this.sessionStartTime,\n };\n this.chunkHistory = [];\n }\n\n recordChunk(metrics: Omit<ChunkMetrics, \"timestamp\">): void {\n const chunkMetrics: ChunkMetrics = {\n ...metrics,\n timestamp: Date.now(),\n };\n\n this.chunkHistory.push(chunkMetrics);\n\n // Keep history within limits\n if (this.chunkHistory.length > this.config.maxChunkHistory) {\n this.chunkHistory = this.chunkHistory.slice(-this.config.maxChunkHistory);\n }\n\n // Update session metrics\n if (this.currentSession && chunkMetrics.success) {\n this.currentSession.chunksCompleted =\n (this.currentSession.chunksCompleted || 0) + 1;\n this.currentSession.totalDuration =\n (this.currentSession.totalDuration || 0) + chunkMetrics.duration;\n this.currentSession.totalRetries =\n (this.currentSession.totalRetries || 0) + chunkMetrics.retryCount;\n }\n }\n\n endSession(): UploadSessionMetrics | null {\n if (!this.currentSession.uploadId) {\n return null;\n }\n\n const endTime = Date.now();\n const totalDuration = endTime - this.sessionStartTime;\n const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);\n\n if (successfulChunks.length === 0) {\n return null;\n }\n\n const speeds = successfulChunks.map((chunk) => chunk.speed);\n const averageSpeed =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n const peakSpeed = Math.max(...speeds);\n const minSpeed = Math.min(...speeds);\n const successRate = successfulChunks.length / this.chunkHistory.length;\n\n const sessionMetrics: UploadSessionMetrics = {\n uploadId: this.currentSession.uploadId || \"\",\n totalSize: this.currentSession.totalSize || 0,\n totalDuration,\n chunksCompleted: successfulChunks.length,\n chunksTotal: this.chunkHistory.length,\n averageSpeed,\n peakSpeed,\n minSpeed,\n totalRetries: this.currentSession.totalRetries || 0,\n successRate,\n adaptiveChunkingEnabled:\n this.currentSession.adaptiveChunkingEnabled || false,\n startTime: this.currentSession.startTime || 0,\n endTime,\n };\n\n // Reset current session\n this.currentSession = {};\n\n return sessionMetrics;\n }\n\n getCurrentSessionMetrics(): Partial<UploadSessionMetrics> {\n return { ...this.currentSession };\n }\n\n getChunkHistory(count?: number): ChunkMetrics[] {\n const history = this.chunkHistory.slice();\n return count ? history.slice(-count) : history;\n }\n\n getPerformanceInsights(): PerformanceInsights {\n if (this.chunkHistory.length < 5) {\n return {\n overallEfficiency: 0,\n chunkingEffectiveness: 0,\n networkStability: 0,\n recommendations: [\"Insufficient data for analysis\"],\n optimalChunkSizeRange: { min: 256 * 1024, max: 2 * 1024 * 1024 },\n };\n }\n\n const successfulChunks = this.chunkHistory.filter((chunk) => chunk.success);\n const speeds = successfulChunks.map((chunk) => chunk.speed);\n\n // Calculate metrics\n const averageSpeed =\n speeds.length > 0\n ? speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length\n : 0;\n const speedVariance = this.calculateVariance(speeds);\n const speedStdDev = Math.sqrt(speedVariance);\n const coefficientOfVariation = speedStdDev / averageSpeed;\n\n // Overall efficiency based on speed and retry rate\n const successRate = successfulChunks.length / this.chunkHistory.length;\n const speedScore = Math.min(\n 1,\n averageSpeed / this.config.performanceThresholds.fastSpeed,\n );\n const overallEfficiency = speedScore * 0.7 + successRate * 0.3;\n\n // Network stability (lower coefficient of variation = higher stability)\n const networkStability = Math.max(\n 0,\n 1 - Math.min(1, coefficientOfVariation),\n );\n\n // Chunking effectiveness based on how well chunk sizes correlate with performance\n const chunkingEffectiveness =\n this.calculateChunkingEffectiveness(successfulChunks);\n\n // Generate recommendations\n const recommendations = this.generateRecommendations(\n averageSpeed,\n successRate,\n coefficientOfVariation,\n );\n\n // Calculate optimal chunk size range\n const optimalChunkSizeRange =\n this.calculateOptimalChunkSizeRange(successfulChunks);\n\n return {\n overallEfficiency,\n chunkingEffectiveness,\n networkStability,\n recommendations,\n optimalChunkSizeRange,\n };\n }\n\n exportMetrics(): {\n session: Partial<UploadSessionMetrics>;\n chunks: ChunkMetrics[];\n insights: PerformanceInsights;\n } {\n return {\n session: this.getCurrentSessionMetrics(),\n chunks: this.getChunkHistory(),\n insights: this.getPerformanceInsights(),\n };\n }\n\n reset(): void {\n this.chunkHistory = [];\n this.currentSession = {};\n this.sessionStartTime = 0;\n }\n\n private calculateVariance(values: number[]): number {\n if (values.length === 0) return 0;\n\n const mean = values.reduce((sum, value) => sum + value, 0) / values.length;\n const squaredDifferences = values.map((value) => (value - mean) ** 2);\n return (\n squaredDifferences.reduce((sum, diff) => sum + diff, 0) / values.length\n );\n }\n\n private calculateChunkingEffectiveness(chunks: ChunkMetrics[]): number {\n if (chunks.length < 3) return 0.5;\n\n // Look for correlation between chunk size and upload speed\n // Better chunking should show consistent performance across different sizes\n const sizeGroups = this.groupChunksBySize(chunks);\n\n if (Object.keys(sizeGroups).length < 2) return 0.5;\n\n // Calculate coefficient of variation for each size group\n const groupVariations = Object.values(sizeGroups).map((group) => {\n const speeds = group.map((chunk) => chunk.speed);\n const mean =\n speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length;\n const variance = this.calculateVariance(speeds);\n return Math.sqrt(variance) / mean;\n });\n\n // Lower average variation indicates better chunking effectiveness\n const averageVariation =\n groupVariations.reduce((sum, cv) => sum + cv, 0) / groupVariations.length;\n return Math.max(0, 1 - Math.min(1, averageVariation));\n }\n\n private groupChunksBySize(\n chunks: ChunkMetrics[],\n ): Record<string, ChunkMetrics[]> {\n const groups: Record<string, ChunkMetrics[]> = {};\n\n chunks.forEach((chunk) => {\n // Group by size ranges (64KB, 128KB, 256KB, 512KB, 1MB, 2MB, 4MB, 8MB+)\n let sizeGroup: string;\n if (chunk.size < 128 * 1024) sizeGroup = \"64KB\";\n else if (chunk.size < 256 * 1024) sizeGroup = \"128KB\";\n else if (chunk.size < 512 * 1024) sizeGroup = \"256KB\";\n else if (chunk.size < 1024 * 1024) sizeGroup = \"512KB\";\n else if (chunk.size < 2 * 1024 * 1024) sizeGroup = \"1MB\";\n else if (chunk.size < 4 * 1024 * 1024) sizeGroup = \"2MB\";\n else if (chunk.size < 8 * 1024 * 1024) sizeGroup = \"4MB\";\n else sizeGroup = \"8MB+\";\n\n if (!groups[sizeGroup]) groups[sizeGroup] = [];\n const group = groups[sizeGroup];\n if (group) group.push(chunk);\n });\n\n return groups;\n }\n\n private generateRecommendations(\n averageSpeed: number,\n successRate: number,\n coefficientOfVariation: number,\n ): string[] {\n const recommendations: string[] = [];\n\n if (averageSpeed < this.config.performanceThresholds.slowSpeed) {\n recommendations.push(\n \"Consider using smaller chunk sizes for better performance on slow connections\",\n );\n }\n\n if (averageSpeed > this.config.performanceThresholds.fastSpeed) {\n recommendations.push(\n \"Network is fast - larger chunk sizes may improve efficiency\",\n );\n }\n\n if (successRate < 0.9) {\n recommendations.push(\n \"High failure rate detected - consider more conservative chunking strategy\",\n );\n }\n\n if (coefficientOfVariation > 0.5) {\n recommendations.push(\n \"Network appears unstable - smaller, more frequent chunks may be more reliable\",\n );\n }\n\n if (\n coefficientOfVariation < 0.2 &&\n averageSpeed > this.config.performanceThresholds.slowSpeed\n ) {\n recommendations.push(\n \"Stable network detected - larger chunks may improve efficiency\",\n );\n }\n\n if (recommendations.length === 0) {\n recommendations.push(\n \"Performance appears optimal with current configuration\",\n );\n }\n\n return recommendations;\n }\n\n private calculateOptimalChunkSizeRange(chunks: ChunkMetrics[]): {\n min: number;\n max: number;\n } {\n if (chunks.length < 5) {\n return { min: 256 * 1024, max: 2 * 1024 * 1024 };\n }\n\n // Find chunks with best performance (top 30% by speed)\n const sortedBySpeed = chunks.slice().sort((a, b) => b.speed - a.speed);\n const topPerformers = sortedBySpeed.slice(\n 0,\n Math.ceil(chunks.length * 0.3),\n );\n\n const topSizes = topPerformers.map((chunk) => chunk.size);\n const minOptimal = Math.min(...topSizes);\n const maxOptimal = Math.max(...topSizes);\n\n return {\n min: Math.max(64 * 1024, minOptimal), // At least 64KB\n max: Math.min(32 * 1024 * 1024, maxOptimal), // At most 32MB\n };\n }\n}\n","import type { DataStoreCapabilities } from \"@uploadista/core/types\";\nimport {\n type NegotiatedStrategy,\n UploadStrategyNegotiator,\n type UploadStrategyOptions,\n} from \"@uploadista/core/upload\";\nimport { UploadistaError } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport {\n defaultClientCapabilities,\n MockClientDataStore,\n} from \"../mock-data-store\";\nimport type { HttpClient } from \"../services/http-client\";\n\nexport type UploadStrategyConfig = {\n preferredStrategy?: \"single\" | \"parallel\" | \"auto\";\n minFileSizeForParallel?: number;\n enableCapabilityNegotiation?: boolean;\n onStrategySelected?: (strategy: {\n chosen: \"single\" | \"parallel\";\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n }) => void;\n};\n\nexport type UploadClientOptions = {\n baseUrl: string;\n uploadBasePath?: string;\n storageId: string;\n retryDelays?: number[];\n chunkSize: number;\n parallelUploads?: number;\n parallelChunkSize?: number;\n uploadStrategy?: UploadStrategyConfig;\n};\n\nexport function createUploadStrategyNegotiator(\n dataStore: MockClientDataStore,\n): UploadStrategyNegotiator {\n return new UploadStrategyNegotiator(dataStore.getCapabilities(), (strategy) =>\n dataStore.validateUploadStrategy(strategy),\n );\n}\n\n/**\n * Fetch capabilities from server\n */\nexport async function fetchServerCapabilities(\n baseUrl: string,\n uploadBasePath: string,\n storageId: string,\n httpClient: HttpClient,\n): Promise<DataStoreCapabilities> {\n const capabilitiesUrl = `${baseUrl}/${uploadBasePath}/capabilities?storageId=${encodeURIComponent(storageId)}`;\n\n try {\n const response = await httpClient.request(capabilitiesUrl, {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n });\n\n if (!response.ok) {\n throw new Error(\n `Failed to fetch capabilities: ${response.status} ${response.statusText}`,\n );\n }\n\n const data = await response.json();\n return (data as { capabilities: DataStoreCapabilities }).capabilities;\n } catch (_error) {\n // Fall back to default capabilities if server fetch fails\n return defaultClientCapabilities;\n }\n}\n\n/**\n * Negotiate upload strategy based on capabilities and options\n */\nexport function negotiateUploadStrategy({\n capabilities,\n fileSize,\n chunkSize,\n parallelUploads,\n uploadLengthDeferred,\n strategyConfig,\n logger,\n}: {\n capabilities: DataStoreCapabilities;\n fileSize: number | null;\n chunkSize: number;\n parallelUploads: number;\n uploadLengthDeferred?: boolean;\n strategyConfig?: UploadStrategyConfig;\n logger: Logger;\n}): NegotiatedStrategy {\n if (strategyConfig?.enableCapabilityNegotiation !== false) {\n // Use capability negotiation with server-fetched capabilities\n const mockDataStore = new MockClientDataStore(capabilities);\n const negotiator = createUploadStrategyNegotiator(mockDataStore);\n\n const negotiationOptions: UploadStrategyOptions = {\n fileSize: fileSize || 0,\n preferredStrategy:\n strategyConfig?.preferredStrategy === \"auto\"\n ? undefined\n : strategyConfig?.preferredStrategy,\n preferredChunkSize: chunkSize,\n parallelUploads,\n minChunkSizeForParallel:\n strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024,\n };\n\n const negotiatedStrategy = negotiator.negotiateStrategy(negotiationOptions);\n\n // Log negotiation results\n logger.log(`Upload strategy negotiated: ${negotiatedStrategy.strategy}`);\n for (const reason of negotiatedStrategy.reasoning) {\n logger.log(` - ${reason}`);\n }\n for (const warning of negotiatedStrategy.warnings) {\n logger.log(` Warning: ${warning}`);\n }\n\n // Notify client of strategy selection if callback provided\n strategyConfig?.onStrategySelected?.({\n chosen: negotiatedStrategy.strategy,\n chunkSize: negotiatedStrategy.chunkSize,\n parallelUploads: negotiatedStrategy.parallelUploads,\n reasoning: negotiatedStrategy.reasoning,\n warnings: negotiatedStrategy.warnings,\n });\n\n return negotiatedStrategy;\n } else {\n // Fallback to legacy logic\n const shouldUseParallelUpload =\n parallelUploads > 1 &&\n fileSize &&\n fileSize > (strategyConfig?.minFileSizeForParallel || 10 * 1024 * 1024) &&\n !uploadLengthDeferred;\n\n return {\n strategy: shouldUseParallelUpload ? \"parallel\" : \"single\",\n chunkSize,\n parallelUploads: shouldUseParallelUpload ? parallelUploads : 1,\n reasoning: [\n `Legacy strategy selection: ${shouldUseParallelUpload ? \"parallel\" : \"single\"}`,\n ],\n warnings: [],\n };\n }\n}\n\n/**\n * Validate upload client configuration against data store capabilities\n */\nexport function validateConfiguration(\n options: UploadClientOptions,\n capabilities: DataStoreCapabilities = defaultClientCapabilities,\n logger: Logger,\n): {\n valid: boolean;\n errors: string[];\n warnings: string[];\n} {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n // Validate against capabilities\n const mockDataStore = new MockClientDataStore(capabilities);\n const negotiator = createUploadStrategyNegotiator(mockDataStore);\n\n const validation = negotiator.validateConfiguration({\n fileSize: 0, // Placeholder for validation\n preferredStrategy:\n options.uploadStrategy?.preferredStrategy === \"auto\"\n ? undefined\n : options.uploadStrategy?.preferredStrategy,\n preferredChunkSize: options.chunkSize,\n parallelUploads: options.parallelUploads,\n });\n\n if (!validation.valid) {\n errors.push(...validation.errors);\n }\n\n // Additional client-specific validations\n if (options.parallelUploads && options.parallelUploads < 1) {\n errors.push(\"parallelUploads must be at least 1\");\n }\n\n if (options.chunkSize && options.chunkSize < 1024) {\n warnings.push(\"Chunk size below 1KB may impact performance\");\n }\n\n if (\n options.uploadStrategy?.preferredStrategy === \"parallel\" &&\n !options.parallelUploads\n ) {\n warnings.push(\n \"Parallel strategy requested but parallelUploads not configured\",\n );\n }\n\n // Log validation results\n if (errors.length > 0) {\n logger.log(\"Configuration validation errors:\");\n for (const error of errors) {\n logger.log(` Error: ${error}`);\n }\n }\n\n if (warnings.length > 0) {\n logger.log(\"Configuration validation warnings:\");\n for (const warning of warnings) {\n logger.log(` Warning: ${warning}`);\n }\n }\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n };\n}\n\n/**\n * Async configuration validation with server capabilities\n */\nexport async function validateConfigurationAsync(\n options: UploadClientOptions,\n httpClient: HttpClient,\n logger: Logger,\n): Promise<{\n valid: boolean;\n errors: string[];\n warnings: string[];\n capabilities: DataStoreCapabilities;\n}> {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n let capabilities: DataStoreCapabilities;\n try {\n capabilities = await fetchServerCapabilities(\n options.baseUrl,\n options.uploadBasePath || \"api/upload\",\n options.storageId,\n httpClient,\n );\n } catch (error) {\n logger.log(`Failed to fetch server capabilities for validation: ${error}`);\n capabilities = defaultClientCapabilities;\n warnings.push(\n \"Using default capabilities for validation - server unavailable\",\n );\n }\n\n const validation = validateConfiguration(options, capabilities, logger);\n errors.push(...validation.errors);\n warnings.push(...validation.warnings);\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n capabilities,\n };\n}\n\n/**\n * Validate options and throw if invalid\n */\nexport function validateAndThrow(\n options: UploadClientOptions,\n logger: Logger,\n): void {\n const validationResult = validateConfiguration(\n options,\n defaultClientCapabilities,\n logger,\n );\n\n if (!validationResult.valid) {\n const errorMessage = `Upload client configuration validation failed: ${validationResult.errors.join(\", \")}`;\n logger.log(errorMessage);\n throw new UploadistaError({\n name: \"UPLOAD_SIZE_NOT_SPECIFIED\", // Reusing existing error type\n message: errorMessage,\n });\n }\n}\n","/**\n * Utilities for detecting input data types to determine flow execution strategy.\n *\n * @module utils/input-detection\n */\n\n/**\n * Input type classification for flow execution.\n *\n * - `file`: File-like object requiring chunked upload\n * - `url`: URL string for direct file fetch\n * - `data`: Structured data to pass through unchanged\n */\nexport type InputType = \"file\" | \"url\" | \"data\";\n\n/**\n * Minimal interface for file-like objects (File, Blob, React Native assets).\n * Platform-agnostic representation of uploadable content.\n */\nexport interface FileLike {\n /** File name (optional) */\n name?: string;\n /** MIME type (optional) */\n type?: string;\n /** File size in bytes (optional) */\n size?: number;\n}\n\n/**\n * URL regex pattern matching http:// or https:// protocols.\n * Validates common URL structures for input type detection.\n */\nconst URL_PATTERN = /^https?:\\/\\/.+/i;\n\n/**\n * Detect the type of input data for flow execution.\n *\n * Detection rules:\n * 1. Object with file-like properties (name/type/size) → \"file\"\n * 2. String matching URL pattern → \"url\"\n * 3. Everything else → \"data\"\n *\n * Uses duck typing to detect file-like objects across platforms.\n *\n * @param data - Input data to classify\n * @returns Input type classification\n *\n * @example\n * ```typescript\n * detectInputType(new File([], \"test.jpg\")); // \"file\"\n * detectInputType({ name: \"test.jpg\", size: 1024 }); // \"file\"\n * detectInputType(\"https://example.com/image.jpg\"); // \"url\"\n * detectInputType({ field: \"value\" }); // \"data\"\n * ```\n */\nexport function detectInputType(data: unknown): InputType {\n // Check for file-like object using duck typing\n if (isFileLike(data)) {\n return \"file\";\n }\n\n // Check for URL string\n if (typeof data === \"string\" && URL_PATTERN.test(data)) {\n return \"url\";\n }\n\n // Default to structured data\n return \"data\";\n}\n\n/**\n * Check if input is a URL string.\n *\n * @param data - Input data to check\n * @returns True if data is a URL string\n *\n * @example\n * ```typescript\n * isURL(\"https://example.com/file.jpg\"); // true\n * isURL(\"not a url\"); // false\n * isURL({ url: \"https://...\" }); // false\n * ```\n */\nexport function isURL(data: unknown): data is string {\n return typeof data === \"string\" && URL_PATTERN.test(data);\n}\n\n/**\n * Check if input is a file-like object (File, Blob, or platform-specific file).\n *\n * Uses duck typing to identify objects with file-like properties.\n * Works across browser (File/Blob) and React Native environments.\n *\n * @param data - Input data to check\n * @returns True if data is file-like\n *\n * @example\n * ```typescript\n * isFileLike(new File([], \"test.jpg\")); // true\n * isFileLike(new Blob([\"data\"])); // true\n * isFileLike({ name: \"test.jpg\", size: 1024 }); // true\n * isFileLike(\"not a file\"); // false\n * ```\n */\nexport function isFileLike(data: unknown): data is FileLike {\n if (typeof data !== \"object\" || data === null) {\n return false;\n }\n\n // Check for File or Blob using runtime type check (browser)\n if (typeof globalThis !== \"undefined\") {\n // @ts-expect-error - File and Blob may not exist in all environments\n if (globalThis.File && data instanceof globalThis.File) {\n return true;\n }\n // @ts-expect-error - File and Blob may not exist in all environments\n if (globalThis.Blob && data instanceof globalThis.Blob) {\n return true;\n }\n }\n\n // Duck typing: object with file-like properties\n const obj = data as Record<string, unknown>;\n return (\n (\"name\" in obj || \"type\" in obj || \"size\" in obj) &&\n (typeof obj.size === \"number\" || typeof obj.size === \"undefined\")\n );\n}\n","import type { FlowData, FlowJob } from \"@uploadista/core/flow\";\nimport type {\n DataStoreCapabilities,\n InputFile,\n UploadFile,\n} from \"@uploadista/core/types\";\nimport { AuthHttpClient, type AuthManager } from \"../auth\";\nimport { UploadistaError, type UploadistaErrorName } from \"../error\";\nimport type { Logger } from \"../logger\";\nimport { defaultClientCapabilities } from \"../mock-data-store\";\nimport type { AbortControllerLike } from \"../services/abort-controller-service\";\nimport type {\n ConnectionMetrics,\n DetailedConnectionMetrics,\n HttpClient,\n RequestBody,\n} from \"../services/http-client\";\nimport type {\n WebSocketFactory,\n WebSocketLike,\n} from \"../services/websocket-service\";\n\n// Error response type - matches server format\ntype ErrorResponse = {\n error?: string;\n message?: string;\n code?: string;\n details?: unknown;\n timestamp?: string;\n};\n\n/**\n * Maps server error codes to client error names\n * If no mapping exists, uses a default error name based on context\n */\nconst mapServerErrorCodeToClientName = (\n serverCode: string | undefined,\n defaultName: UploadistaErrorName,\n): UploadistaErrorName => {\n if (!serverCode) return defaultName;\n\n // Map common server error codes to client error names\n const errorMap: Record<string, UploadistaErrorName> = {\n FILE_NOT_FOUND: \"UPLOAD_NOT_FOUND\",\n UPLOAD_ID_NOT_FOUND: \"UPLOAD_NOT_FOUND\",\n FLOW_JOB_NOT_FOUND: \"JOB_NOT_FOUND\",\n FLOW_NODE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_STRUCTURE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_CYCLE_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_INPUT_VALIDATION_ERROR: \"FLOW_RUN_FAILED\",\n FLOW_OUTPUT_VALIDATION_ERROR: \"FLOW_RUN_FAILED\",\n VALIDATION_ERROR: \"CREATE_UPLOAD_FAILED\",\n DATASTORE_NOT_FOUND: \"FLOW_RUN_FAILED\",\n };\n\n return errorMap[serverCode] || defaultName;\n};\n\n/**\n * Response from upload-related API calls.\n *\n * Contains the upload metadata and HTTP status code.\n */\nexport type UploadistaUploadResponse = {\n /** Upload file metadata, undefined if request failed */\n upload?: UploadFile;\n /** HTTP status code */\n status: number;\n};\n\n/**\n * Response from delete upload API call.\n */\nexport type UploadistaDeleteUploadResponse =\n | {\n /** Successfully deleted (no content) */\n status: 204;\n }\n | {\n /** Other status codes (e.g., 404, 500) */\n status: number;\n };\n\n/**\n * Response from flow retrieval API call.\n */\nexport type FlowResponse = {\n /** HTTP status code */\n status: number;\n /** Flow configuration and metadata */\n flow: FlowData;\n};\n\n/**\n * Unified Uploadista API interface combining upload and flow operations.\n *\n * This low-level API provides direct access to server endpoints for:\n * - Upload CRUD operations (create, get, delete, patch chunks)\n * - Flow operations (get, run, continue)\n * - Job status tracking\n * - WebSocket connections for real-time updates\n * - Server capabilities discovery\n * - Connection pooling metrics\n *\n * Most applications should use the higher-level {@link UploadistaClient} instead,\n * which provides a more convenient interface with automatic retry, resumption,\n * and smart chunking.\n *\n * @example Direct API usage (advanced)\n * ```typescript\n * const api = createUploadistaApi(baseUrl, basePath, {\n * httpClient,\n * logger,\n * authManager,\n * webSocketFactory,\n * });\n *\n * // Create an upload\n * const { upload } = await api.createUpload({\n * storageId: 'my-storage',\n * size: 1024000,\n * metadata: { filename: 'test.txt' },\n * });\n *\n * // Upload a chunk\n * const chunk = new Uint8Array(1024);\n * await api.uploadChunk(upload.id, chunk, {});\n *\n * // Check status\n * const { upload: updated } = await api.getUpload(upload.id);\n * console.log(`Progress: ${updated.offset}/${updated.size}`);\n * ```\n *\n * @see {@link createUploadistaApi} for creating an instance\n */\nexport type UploadistaApi = {\n /**\n * Retrieves upload metadata and current status.\n *\n * @param uploadId - Unique upload identifier\n * @returns Upload metadata including current offset and status\n * @throws {UploadistaError} If upload not found or request fails\n */\n getUpload: (uploadId: string) => Promise<UploadistaUploadResponse>;\n\n /**\n * Deletes an upload and its associated data.\n *\n * @param uploadId - Unique upload identifier\n * @returns Response with status 204 on success\n * @throws {UploadistaError} If upload not found or deletion fails\n */\n deleteUpload: (uploadId: string) => Promise<UploadistaDeleteUploadResponse>;\n\n /**\n * Creates a new upload on the server.\n *\n * @param body - Upload configuration including storageId, size, and metadata\n * @returns Created upload metadata with unique ID\n * @throws {UploadistaError} If creation fails or validation errors occur\n */\n createUpload: (body: InputFile) => Promise<UploadistaUploadResponse>;\n\n /**\n * Uploads a chunk of data to an existing upload.\n *\n * @param uploadId - Upload identifier to append data to\n * @param data - Chunk data bytes, or null to finalize without data\n * @param options - Upload options including abort controller and progress callback\n * @returns Updated upload metadata with new offset\n * @throws {UploadistaError} If chunk upload fails or upload is locked\n */\n uploadChunk: (\n uploadId: string,\n data: Uint8Array | null,\n options: {\n abortController?: AbortControllerLike;\n onProgress?: (bytes: number, total: number) => void;\n },\n ) => Promise<UploadistaUploadResponse>;\n\n /**\n * Retrieves flow configuration and metadata.\n *\n * @param flowId - Unique flow identifier\n * @returns Flow configuration including nodes and edges\n * @throws {UploadistaError} If flow not found\n */\n getFlow: (flowId: string) => Promise<FlowResponse>;\n\n /**\n * Executes a flow with the provided inputs.\n *\n * @param flowId - Flow to execute\n * @param storageId - Storage backend to use for flow outputs\n * @param inputs - Input data for flow nodes (keyed by node ID)\n * @returns Job metadata including job ID and initial state\n * @throws {UploadistaError} If flow execution fails or inputs are invalid\n */\n runFlow: (\n flowId: string,\n storageId: string,\n inputs: Record<string, unknown>,\n ) => Promise<{ status: number; job: FlowJob }>;\n\n /**\n * Continues a paused flow execution with new data.\n *\n * Used for interactive flows that wait for user input or external data.\n *\n * @param jobId - Job identifier for the paused flow\n * @param nodeId - Node ID where execution should continue\n * @param newData - Data to provide to the node\n * @param options - Options including content type for binary data\n * @returns Updated job metadata\n * @throws {UploadistaError} If job not found or continuation fails\n */\n resumeFlow: (\n jobId: string,\n nodeId: string,\n newData: unknown,\n options?: {\n contentType?: \"application/json\" | \"application/octet-stream\";\n },\n ) => Promise<FlowJob>;\n\n /**\n * Pauses a running flow execution.\n *\n * The flow will stop at the next node boundary (not mid-node execution).\n * Can be resumed later using resumeFlow.\n *\n * @param jobId - Job identifier for the running flow\n * @returns Updated job metadata with \"paused\" status\n * @throws {UploadistaError} If job not found or cannot be paused\n */\n pauseFlow: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Cancels a running or paused flow execution.\n *\n * The flow will stop at the next node boundary (not mid-node execution).\n * Intermediate files are automatically cleaned up. This operation is terminal\n * and cannot be undone.\n *\n * @param jobId - Job identifier for the flow to cancel\n * @returns Updated job metadata with \"cancelled\" status\n * @throws {UploadistaError} If job not found or cannot be cancelled\n */\n cancelFlow: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Retrieves current job status and outputs.\n *\n * Works for both upload and flow jobs.\n *\n * @param jobId - Job identifier\n * @returns Job metadata including state, progress, and outputs\n * @throws {UploadistaError} If job not found\n */\n getJobStatus: (jobId: string) => Promise<FlowJob>;\n\n /**\n * Opens a WebSocket connection for upload progress events.\n *\n * @param uploadId - Upload to monitor\n * @returns WebSocket instance for receiving real-time updates\n */\n openUploadWebSocket: (uploadId: string) => Promise<WebSocketLike>;\n\n /**\n * Opens a WebSocket connection for flow job events.\n *\n * @param jobId - Flow job to monitor\n * @returns WebSocket instance for receiving real-time updates\n */\n openFlowWebSocket: (jobId: string) => Promise<WebSocketLike>;\n\n /**\n * Closes a WebSocket connection.\n *\n * @param ws - WebSocket instance to close\n */\n closeWebSocket: (ws: WebSocketLike) => void;\n\n /**\n * Returns current connection pool metrics.\n *\n * @returns Basic metrics including active connections and reuse rate\n */\n getConnectionMetrics: () => ConnectionMetrics;\n\n /**\n * Returns detailed connection pool metrics with health diagnostics.\n *\n * @returns Comprehensive metrics including health status and recommendations\n */\n getDetailedConnectionMetrics: () => DetailedConnectionMetrics;\n\n /**\n * Pre-warms connections to the specified URLs.\n *\n * Useful for reducing latency on first upload by establishing\n * connections ahead of time.\n *\n * @param urls - URLs to pre-connect to\n */\n warmupConnections: (urls: string[]) => Promise<void>;\n\n /**\n * Fetches server capabilities for the specified storage backend.\n *\n * Returns information about chunk size constraints, supported features,\n * and storage-specific requirements. Falls back to default capabilities\n * if the request fails.\n *\n * @param storageId - Storage backend identifier\n * @returns Storage capabilities including chunk size limits\n */\n getCapabilities: (storageId: string) => Promise<DataStoreCapabilities>;\n};\n\n/**\n * Creates an Uploadista API instance for direct server communication.\n *\n * This factory creates a low-level API client that handles:\n * - HTTP requests to upload and flow endpoints\n * - Authentication via AuthManager (optional)\n * - WebSocket connections for real-time updates\n * - Error mapping from server to client error types\n * - Connection pooling and metrics\n *\n * Most applications should use {@link createUploadistaClient} instead,\n * which wraps this API with higher-level features like automatic retry,\n * resumption, and smart chunking.\n *\n * @param baseURL - Base URL of the Uploadista server (e.g., \"https://upload.example.com\")\n * @param uploadistBasePath - Base path for endpoints, typically \"uploadista\"\n * @param options - Configuration object\n * @param options.httpClient - HTTP client for making requests\n * @param options.logger - Optional logger for debugging\n * @param options.authManager - Optional authentication manager\n * @param options.webSocketFactory - Factory for creating WebSocket connections\n * @returns UploadistaApi instance\n *\n * @example Basic API instance\n * ```typescript\n * import { createUploadistaApi } from '@uploadista/client-core';\n *\n * const api = createUploadistaApi(\n * 'https://upload.example.com',\n * 'uploadista',\n * {\n * httpClient: myHttpClient,\n * logger: console,\n * webSocketFactory: {\n * create: (url) => new WebSocket(url),\n * },\n * }\n * );\n *\n * // Use the API directly\n * const { upload } = await api.createUpload({\n * storageId: 'my-storage',\n * size: 1024,\n * });\n * ```\n *\n * @example With authentication\n * ```typescript\n * const authManager = new DirectAuthManager(authConfig, platformService, logger);\n *\n * const api = createUploadistaApi(baseUrl, 'uploadista', {\n * httpClient,\n * logger,\n * authManager, // Automatically adds auth headers to requests\n * webSocketFactory,\n * });\n * ```\n *\n * @see {@link UploadistaApi} for the API interface\n * @see {@link createUploadistaClient} for the high-level client\n */\nexport function createUploadistaApi(\n baseURL: string,\n uploadistBasePath: string,\n {\n httpClient: baseHttpClient,\n logger,\n authManager,\n webSocketFactory,\n }: {\n httpClient: HttpClient;\n logger?: Logger;\n authManager?: AuthManager;\n webSocketFactory: WebSocketFactory;\n },\n): UploadistaApi {\n // Create base HTTP client with connection pooling\n\n // Wrap with auth if auth manager is provided\n const httpClient = authManager\n ? new AuthHttpClient(baseHttpClient, authManager)\n : baseHttpClient;\n\n // Construct endpoint URLs\n const uploadEndpoint = `${baseURL}/${uploadistBasePath}/api/upload`;\n const flowEndpoint = `${baseURL}/${uploadistBasePath}/api/flow`;\n const jobsEndpoint = `${baseURL}/${uploadistBasePath}/api/jobs`;\n\n // WebSocket URLs\n const wsBaseURL = baseURL.replace(\"http\", \"ws\");\n const uploadWsURL = `${wsBaseURL}/uploadista/ws/upload`;\n const flowWsURL = `${wsBaseURL}/uploadista/ws/flow`;\n\n /**\n * Helper function to extract auth token for WebSocket connection.\n * Supports both DirectAuthManager (extracts from headers) and UploadistaCloudAuthManager (gets cached token).\n */\n const getAuthTokenForWebSocket = async (\n manager: AuthManager,\n jobId?: string,\n ): Promise<string | null> => {\n logger?.log(`Getting auth token for WebSocket (jobId: ${jobId})`);\n\n // Check if this is a UploadistaCloudAuthManager (has attachToken method)\n if (\"attachToken\" in manager) {\n logger?.log(\"Detected UploadistaCloudAuthManager, calling attachToken\");\n const headers = await manager.attachToken({}, jobId);\n const authHeader = headers.Authorization;\n if (authHeader?.startsWith(\"Bearer \")) {\n logger?.log(\n \"Successfully extracted Bearer token from UploadistaCloudAuthManager\",\n );\n return authHeader.substring(7); // Remove \"Bearer \" prefix\n }\n logger?.log(\n `No valid Authorization header from UploadistaCloudAuthManager: ${authHeader}`,\n );\n }\n\n // Check if this is a DirectAuthManager (has attachCredentials method)\n if (\"attachCredentials\" in manager) {\n logger?.log(\"Detected DirectAuthManager, calling attachCredentials\");\n const headers = await manager.attachCredentials({});\n const authHeader = headers.Authorization;\n if (authHeader) {\n logger?.log(\n \"Successfully extracted Authorization header from DirectAuthManager\",\n );\n // Support both \"Bearer token\" and plain token formats\n return authHeader.startsWith(\"Bearer \")\n ? authHeader.substring(7)\n : authHeader;\n }\n logger?.log(`No Authorization header from DirectAuthManager`);\n }\n\n logger?.log(\"No auth token could be extracted from auth manager\");\n return null;\n };\n\n return {\n // Upload operations\n getUpload: async (uploadId: string) => {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"UPLOAD_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Upload ${uploadId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as UploadFile;\n return { status: res.status, upload: data };\n },\n\n deleteUpload: async (uploadId: string) => {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`, {\n method: \"DELETE\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"DELETE_UPLOAD_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to delete upload ${uploadId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n return { status: res.status };\n },\n\n createUpload: async (data: InputFile) => {\n logger?.log(`createUpload ${JSON.stringify(data)}`);\n const res = await httpClient.request(uploadEndpoint, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(data),\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"CREATE_UPLOAD_FAILED\",\n );\n const errorMessage =\n errorData.error || errorData.message || \"Failed to create upload\";\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const responseData = (await res.json()) as UploadFile;\n logger?.log(JSON.stringify(responseData));\n return { status: res.status, upload: responseData };\n },\n\n uploadChunk: async (uploadId, data, { abortController }) => {\n try {\n const res = await httpClient.request(`${uploadEndpoint}/${uploadId}`, {\n method: \"PATCH\",\n headers: {\n \"Content-Type\": \"application/octet-stream\",\n },\n body: data,\n signal: abortController?.signal,\n });\n\n if (!res.ok) {\n const errorData = (await res\n .json()\n .catch(() => ({}))) as ErrorResponse;\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message:\n errorData.error || errorData.message || \"Unknown network error\",\n status: res.status,\n });\n }\n\n const responseData = (await res.json()) as UploadFile;\n return { status: res.status, upload: responseData };\n } catch (err) {\n if (err instanceof UploadistaError) {\n throw err;\n }\n throw new UploadistaError({\n name: \"NETWORK_ERROR\",\n message: \"Network error\",\n cause: err as Error,\n });\n }\n },\n\n // Flow operations\n getFlow: async (flowId: string) => {\n const res = await httpClient.request(`${flowEndpoint}/${flowId}`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error || errorData.message || `Flow ${flowId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowData;\n logger?.log(`getFlow: ${flowId}`);\n return { status: res.status, flow: data };\n },\n\n runFlow: async (\n flowId: string,\n storageId: string,\n inputs: Record<string, unknown>,\n ) => {\n logger?.log(`runFlow: ${flowId} with storage: ${storageId}`);\n const res = await httpClient.request(\n `${flowEndpoint}/${flowId}/${storageId}`,\n {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify({ inputs }),\n },\n );\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_RUN_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to run flow ${flowId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`runFlow response: ${JSON.stringify(data)}`);\n return { status: res.status, job: data };\n },\n\n resumeFlow: async (\n jobId: string,\n nodeId: string,\n newData: unknown,\n options?: {\n contentType?: \"application/json\" | \"application/octet-stream\";\n },\n ) => {\n const contentType = options?.contentType || \"application/json\";\n\n let body: RequestBody;\n if (contentType === \"application/octet-stream\") {\n // For octet-stream, newData should be a Uint8Array or similar\n body = newData as RequestBody;\n } else {\n // For JSON, wrap newData in an object\n body = JSON.stringify({ newData });\n }\n\n const res = await httpClient.request(\n `${jobsEndpoint}/${jobId}/resume/${nodeId}`,\n {\n method: \"PATCH\",\n headers: {\n \"Content-Type\": contentType,\n },\n body,\n },\n );\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_RESUMED_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to resume flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n return data;\n },\n\n pauseFlow: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/pause`, {\n method: \"POST\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_PAUSE_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to pause flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`Flow paused: ${jobId}, status: ${data.status}`);\n return data;\n },\n\n cancelFlow: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/cancel`, {\n method: \"POST\",\n });\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"FLOW_CANCEL_FAILED\",\n );\n const errorMessage =\n errorData.error ||\n errorData.message ||\n `Failed to cancel flow for job ${jobId}`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n logger?.log(`Flow cancelled: ${jobId}, status: ${data.status}`);\n return data;\n },\n\n // Unified job operations\n getJobStatus: async (jobId: string) => {\n const res = await httpClient.request(`${jobsEndpoint}/${jobId}/status`);\n\n if (!res.ok) {\n const errorData = (await res.json().catch(() => ({}))) as ErrorResponse;\n const errorName = mapServerErrorCodeToClientName(\n errorData.code,\n \"JOB_NOT_FOUND\",\n );\n const errorMessage =\n errorData.error || errorData.message || `Job ${jobId} not found`;\n\n throw new UploadistaError({\n name: errorName,\n message: errorData.code\n ? `${errorMessage} (${errorData.code})`\n : errorMessage,\n status: res.status,\n });\n }\n\n const data = (await res.json()) as FlowJob;\n return data;\n },\n\n // WebSocket operations\n openUploadWebSocket: async (uploadId: string) => {\n let wsUrl = `${uploadWsURL}/${uploadId}`;\n\n // Attach auth token if auth manager is configured\n // Note: For cookie-based auth (e.g., HttpOnly cookies with better-auth),\n // no token is needed as cookies are automatically sent by the browser\n if (authManager) {\n try {\n const token = await getAuthTokenForWebSocket(authManager, uploadId);\n if (token) {\n wsUrl += `?token=${encodeURIComponent(token)}`;\n logger?.log(`WebSocket token attached for upload: ${uploadId}`);\n } else {\n // No token means cookie-based auth - this is fine\n logger?.log(\n `No token for upload WebSocket (using cookie-based auth): ${uploadId}`,\n );\n }\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n logger?.log(\n `Error getting auth token for upload WebSocket: ${errorMessage}`,\n );\n // Don't throw - allow cookie-based auth to proceed\n logger?.log(\n `Proceeding with cookie-based authentication for upload WebSocket: ${uploadId}`,\n );\n }\n }\n\n const ws = webSocketFactory.create(wsUrl);\n\n ws.onopen = () => {\n logger?.log(`Upload WebSocket connection opened for: ${uploadId}`);\n };\n\n ws.onclose = () => {\n logger?.log(`Upload WebSocket connection closed for: ${uploadId}`);\n };\n\n ws.onerror = (error) => {\n logger?.log(`Upload WebSocket error for ${uploadId}: ${error}`);\n };\n\n return ws;\n },\n\n openFlowWebSocket: async (jobId: string) => {\n let wsUrl = `${flowWsURL}/${jobId}`;\n\n // Attach auth token if auth manager is configured\n // Note: For cookie-based auth (e.g., HttpOnly cookies with better-auth),\n // no token is needed as cookies are automatically sent by the browser\n if (authManager) {\n try {\n const token = await getAuthTokenForWebSocket(authManager, jobId);\n if (token) {\n wsUrl += `?token=${encodeURIComponent(token)}`;\n logger?.log(`WebSocket token attached for flow job: ${jobId}`);\n } else {\n // No token means cookie-based auth - this is fine\n logger?.log(\n `No token for flow WebSocket (using cookie-based auth): ${jobId}`,\n );\n }\n } catch (error) {\n const errorMessage =\n error instanceof Error ? error.message : String(error);\n logger?.log(\n `Error getting auth token for flow WebSocket: ${errorMessage}`,\n );\n // Don't throw - allow cookie-based auth to proceed\n logger?.log(\n `Proceeding with cookie-based authentication for flow WebSocket: ${jobId}`,\n );\n }\n }\n\n const ws = webSocketFactory.create(wsUrl);\n\n ws.onopen = () => {\n logger?.log(`Flow WebSocket connection opened for job: ${jobId}`);\n };\n\n ws.onclose = () => {\n logger?.log(`Flow WebSocket connection closed for job: ${jobId}`);\n };\n\n ws.onerror = (error) => {\n logger?.log(`Flow WebSocket error for job ${jobId}: ${error}`);\n };\n\n return ws;\n },\n\n closeWebSocket: (ws: WebSocketLike) => {\n ws.close();\n },\n\n // Connection metrics\n getConnectionMetrics: () => {\n return httpClient.getMetrics();\n },\n\n getDetailedConnectionMetrics: () => {\n return httpClient.getDetailedMetrics();\n },\n\n warmupConnections: async (urls: string[]) => {\n return httpClient.warmupConnections(urls);\n },\n\n // Capabilities\n getCapabilities: async (storageId: string) => {\n const capabilitiesUrl = `${uploadEndpoint}/capabilities?storageId=${encodeURIComponent(storageId)}`;\n\n try {\n const response = await httpClient.request(capabilitiesUrl, {\n method: \"GET\",\n headers: {\n \"Content-Type\": \"application/json\",\n },\n });\n\n if (!response.ok) {\n logger?.log(\n `Failed to fetch capabilities: ${response.status} ${response.statusText}`,\n );\n return defaultClientCapabilities;\n }\n\n const data = await response.json();\n return (data as { capabilities: DataStoreCapabilities }).capabilities;\n } catch (error) {\n logger?.log(\n `Failed to fetch server capabilities, using defaults: ${error}`,\n );\n return defaultClientCapabilities;\n }\n },\n };\n}\n","import type { FlowEvent } from \"@uploadista/core/flow\";\nimport type { UploadEvent } from \"@uploadista/core/types\";\nimport { webSocketMessageSchema } from \"@uploadista/core/types\";\nimport type { Logger } from \"../logger\";\nimport type { WebSocketLike } from \"../services/websocket-service\";\nimport type { UploadistaApi } from \"./uploadista-api\";\n\nexport type UploadistaEvent = UploadEvent | FlowEvent;\n\nexport type UploadistaWebSocketEventHandler = (event: UploadistaEvent) => void;\n\nexport type UploadistaWebSocketMessage =\n | { type: \"connection\"; message: string; id: string; timestamp: string }\n | {\n type: \"subscribed\";\n payload: { uploadId?: string; jobId?: string };\n timestamp: string;\n }\n | { type: \"error\"; message: string; code?: string }\n | { type: \"pong\"; timestamp: string }\n | { type: \"upload_event\"; payload: UploadEvent }\n | { type: \"flow_event\"; payload: FlowEvent };\n\n/**\n * Unified WebSocket management for both upload and flow events\n */\nexport class UploadistaWebSocketManager {\n private uploadWebsockets = new Map<string, WebSocketLike>();\n private flowWebsockets = new Map<string, WebSocketLike>();\n\n constructor(\n private uploadistaApi: UploadistaApi,\n private logger: Logger,\n private onEvent?: UploadistaWebSocketEventHandler,\n ) {}\n\n /**\n * Open a WebSocket connection for upload events\n */\n async openUploadWebSocket(uploadId: string): Promise<WebSocketLike> {\n // Close existing connection if any\n this.closeUploadWebSocket(uploadId);\n\n const ws = await this.uploadistaApi.openUploadWebSocket(uploadId);\n this.uploadWebsockets.set(uploadId, ws);\n\n ws.onmessage = (event) => {\n try {\n const parsedEvent = webSocketMessageSchema.safeParse(\n JSON.parse(event.data),\n );\n\n if (parsedEvent.success) {\n if (parsedEvent.data.type === \"upload_event\") {\n this.onEvent?.(parsedEvent.data.payload);\n }\n } else {\n this.logger.error(\n `Error parsing upload event: ${parsedEvent.error.message}`,\n );\n }\n } catch (error) {\n this.logger.error(`Error parsing upload event: ${error}`);\n }\n };\n\n ws.onerror = (error) => {\n this.logger.error(`Upload WebSocket error for ${uploadId}: ${error}`);\n };\n\n ws.onclose = (event) => {\n this.logger.log(\n `Upload WebSocket closed for ${uploadId}, \\n code: ${event.code as number}, reason: ${event.reason as string}`,\n );\n this.uploadWebsockets.delete(uploadId);\n };\n\n return ws;\n }\n\n /**\n * Open a WebSocket connection for flow/job events\n */\n async openFlowWebSocket(jobId: string): Promise<WebSocketLike> {\n // Close existing connection if any\n this.closeFlowWebSocket(jobId);\n\n const ws = await this.uploadistaApi.openFlowWebSocket(jobId);\n this.flowWebsockets.set(jobId, ws);\n\n ws.onmessage = (event) => {\n try {\n const message = JSON.parse(event.data) as UploadistaWebSocketMessage;\n\n switch (message.type) {\n case \"connection\":\n this.logger.log(`Flow WebSocket connected for job: ${message.id}`);\n break;\n case \"subscribed\":\n this.logger.log(\n `Flow WebSocket subscribed for job: ${message.payload.jobId}`,\n );\n break;\n case \"error\":\n this.logger.error(\n `Flow WebSocket error: ${message.message} for job ${jobId} with code ${message.code}`,\n );\n break;\n case \"pong\":\n this.logger.log(`Flow WebSocket pong received for job: ${jobId}`);\n break;\n case \"flow_event\":\n this.onEvent?.(message.payload);\n break;\n default:\n this.logger.warn(\n `Unknown flow WebSocket message type: ${message.type}`,\n );\n }\n } catch (error) {\n this.logger.error(`Error parsing flow WebSocket message:${error}`);\n }\n };\n\n ws.onerror = (error) => {\n this.logger.error(`Flow WebSocket error for job ${jobId}: ${error}`);\n };\n\n ws.onclose = (event) => {\n this.logger.log(\n `Flow WebSocket closed for job ${jobId}, \\n code: ${event.code as number}, reason: ${event.reason as string}`,\n );\n this.flowWebsockets.delete(jobId);\n };\n\n return ws;\n }\n\n /**\n * Open a unified WebSocket connection - automatically determines if it's for upload or flow\n * based on the ID format (upload IDs typically start with 'upload-', job IDs start with 'job-')\n */\n async openWebSocket(id: string): Promise<WebSocketLike> {\n // Heuristic: if ID starts with 'upload-' or contains upload-related patterns, treat as upload\n // Otherwise, treat as flow/job\n if (id.startsWith(\"upload-\") || id.includes(\"upload\")) {\n return await this.openUploadWebSocket(id);\n }\n return await this.openFlowWebSocket(id);\n }\n\n /**\n * Close upload WebSocket connection\n */\n closeUploadWebSocket(uploadId: string): void {\n const ws = this.uploadWebsockets.get(uploadId);\n if (ws) {\n this.uploadistaApi.closeWebSocket(ws);\n this.uploadWebsockets.delete(uploadId);\n }\n }\n\n /**\n * Close flow WebSocket connection\n */\n closeFlowWebSocket(jobId: string): void {\n const ws = this.flowWebsockets.get(jobId);\n if (ws) {\n this.uploadistaApi.closeWebSocket(ws);\n this.flowWebsockets.delete(jobId);\n }\n }\n\n /**\n * Close WebSocket connection by ID (auto-detects type)\n */\n closeWebSocket(id: string): void {\n // Try both maps\n this.closeUploadWebSocket(id);\n this.closeFlowWebSocket(id);\n }\n\n /**\n * Close all WebSocket connections (both upload and flow)\n */\n closeAll(): void {\n // Close all upload websockets\n for (const [uploadId, ws] of this.uploadWebsockets.entries()) {\n this.uploadistaApi.closeWebSocket(ws);\n this.uploadWebsockets.delete(uploadId);\n }\n\n // Close all flow websockets\n for (const [jobId, ws] of this.flowWebsockets.entries()) {\n this.uploadistaApi.closeWebSocket(ws);\n this.flowWebsockets.delete(jobId);\n }\n }\n\n /**\n * Send ping to flow WebSocket\n */\n sendPing(jobId: string): boolean {\n const ws = this.flowWebsockets.get(jobId);\n if (ws && ws.readyState === ws.OPEN) {\n ws.send(\n JSON.stringify({\n type: \"ping\",\n timestamp: new Date().toISOString(),\n }),\n );\n return true;\n }\n return false;\n }\n\n /**\n * Get upload WebSocket by ID\n */\n getUploadWebSocket(uploadId: string): WebSocketLike | undefined {\n return this.uploadWebsockets.get(uploadId);\n }\n\n /**\n * Get flow WebSocket by ID\n */\n getFlowWebSocket(jobId: string): WebSocketLike | undefined {\n return this.flowWebsockets.get(jobId);\n }\n\n /**\n * Check if upload WebSocket is connected\n */\n isUploadConnected(uploadId: string): boolean {\n const ws = this.uploadWebsockets.get(uploadId);\n return ws?.readyState === ws?.OPEN;\n }\n\n /**\n * Check if flow WebSocket is connected\n */\n isFlowConnected(jobId: string): boolean {\n const ws = this.flowWebsockets.get(jobId);\n return ws?.readyState === ws?.OPEN;\n }\n\n /**\n * Check if WebSocket is connected (auto-detects type)\n */\n isConnected(id: string): boolean {\n return this.isUploadConnected(id) || this.isFlowConnected(id);\n }\n\n /**\n * Get total number of active WebSocket connections\n */\n getConnectionCount(): number {\n return this.uploadWebsockets.size + this.flowWebsockets.size;\n }\n\n /**\n * Get connection counts by type\n */\n getConnectionCountByType(): {\n upload: number;\n flow: number;\n total: number;\n } {\n return {\n upload: this.uploadWebsockets.size,\n flow: this.flowWebsockets.size,\n total: this.uploadWebsockets.size + this.flowWebsockets.size,\n };\n }\n}\n","import type { FlowJob } from \"@uploadista/core/flow\";\nimport type { DataStoreCapabilities } from \"@uploadista/core/types\";\nimport type { AuthConfig, AuthManager } from \"../auth\";\nimport {\n DirectAuthManager,\n NoAuthManager,\n UploadistaCloudAuthManager,\n} from \"../auth\";\nimport type { Logger } from \"../logger\";\nimport { createLogger } from \"../logger\";\nimport { defaultClientCapabilities } from \"../mock-data-store\";\nimport { NetworkMonitor, type NetworkMonitorConfig } from \"../network-monitor\";\nimport type { AbortControllerFactory } from \"../services/abort-controller-service\";\nimport type { ChecksumService } from \"../services/checksum-service\";\nimport type { FileReaderService } from \"../services/file-reader-service\";\nimport type { FingerprintService } from \"../services/fingerprint-service\";\nimport type { ConnectionPoolConfig, HttpClient } from \"../services/http-client\";\nimport type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { PlatformService, Timeout } from \"../services/platform-service\";\nimport type {\n WebSocketFactory,\n WebSocketLike,\n} from \"../services/websocket-service\";\nimport { SmartChunker, type SmartChunkerConfig } from \"../smart-chunker\";\nimport type { ClientStorage } from \"../storage/client-storage\";\nimport type { FlowUploadConfig } from \"../types/flow-upload-config\";\nimport { performFlowUpload, startFlowUpload } from \"../upload/flow-upload\";\nimport {\n finalizeFlowInput,\n initializeFlowInput,\n uploadInputChunks,\n} from \"../upload/flow-upload-orchestrator\";\nimport { startParallelUpload } from \"../upload/parallel-upload\";\nimport {\n type Callbacks,\n performUpload,\n startSingleUpload,\n} from \"../upload/single-upload\";\nimport { abort, terminate } from \"../upload/upload-manager\";\nimport {\n UploadMetrics,\n type UploadMetricsConfig,\n} from \"../upload/upload-metrics\";\nimport {\n findPreviousUploads,\n resumeFromPreviousUpload,\n} from \"../upload/upload-storage\";\nimport {\n negotiateUploadStrategy,\n type UploadStrategyConfig,\n validateAndThrow,\n validateConfiguration,\n} from \"../upload/upload-strategy\";\nimport { calculateFileSize } from \"../upload/upload-utils\";\nimport { detectInputType } from \"../utils/input-detection\";\nimport { createUploadistaApi } from \"./uploadista-api\";\nimport {\n type UploadistaWebSocketEventHandler,\n UploadistaWebSocketManager,\n} from \"./uploadista-websocket-manager\";\n\n/**\n * Options for individual upload operations.\n *\n * Extends the base upload callbacks with configuration for deferred length,\n * size overrides, metadata, and checksum computation.\n */\nexport type UploadistaUploadOptions = {\n /**\n * Whether to defer specifying the upload size until later.\n * Useful for streaming uploads where size isn't known upfront.\n * Defaults to false.\n */\n uploadLengthDeferred?: boolean;\n\n /**\n * Manual override for upload size in bytes.\n * If not provided, size is determined from the file/blob.\n */\n uploadSize?: number;\n\n /**\n * Custom metadata to attach to the upload.\n * Stored as key-value pairs on the server.\n */\n metadata?: Record<string, string>;\n\n /**\n * Whether to compute checksums for uploaded chunks.\n * Enables integrity verification but adds computational overhead.\n * Defaults to false.\n */\n computeChecksum?: boolean;\n\n /**\n * Checksum algorithm to use (e.g., \"sha256\", \"md5\").\n * Only relevant if computeChecksum is true.\n */\n checksumAlgorithm?: string;\n} & Callbacks;\n\n/**\n * Configuration options for creating an Uploadista client.\n *\n * This comprehensive configuration object allows customization of all aspects\n * of upload behavior including chunking, retries, authentication, storage,\n * network monitoring, and platform-specific services.\n *\n * @template UploadInput - The platform-specific file/blob type (e.g., File, Blob, Buffer)\n */\nexport type UploadistaClientOptions<UploadInput> = {\n /** Base URL of the Uploadista server (e.g., \"https://upload.example.com\") */\n baseUrl: string;\n\n /** Base path for Uploadista endpoints. Defaults to \"uploadista\" */\n uploadistaBasePath?: string;\n\n /** Storage backend identifier configured on the server */\n storageId: string;\n\n /** Retry delay intervals in milliseconds. Defaults to [1000, 3000, 5000] */\n retryDelays?: number[];\n\n /** Default chunk size in bytes for uploads */\n chunkSize: number;\n\n /** Number of parallel upload streams. Defaults to 1 (sequential) */\n parallelUploads?: number;\n\n /** Chunk size for parallel uploads. Required if parallelUploads > 1 */\n parallelChunkSize?: number;\n\n /** Service for computing checksums of uploaded chunks */\n checksumService: ChecksumService;\n\n /** Strategy configuration for determining upload approach (single/parallel/chunked) */\n uploadStrategy?: UploadStrategyConfig;\n\n /** Smart chunking configuration for adaptive chunk sizes based on network conditions */\n smartChunking?: SmartChunkerConfig;\n\n /** Network monitoring configuration for tracking upload performance */\n networkMonitoring?: NetworkMonitorConfig;\n\n /** Upload metrics configuration for performance insights */\n uploadMetrics?: UploadMetricsConfig;\n\n /** HTTP client with connection pooling support */\n httpClient: HttpClient;\n\n /** Service for generating unique IDs */\n generateId: IdGenerationService;\n\n /** Client-side storage for upload resumption data */\n clientStorage: ClientStorage;\n\n /** Platform-specific file reading service */\n fileReader: FileReaderService<UploadInput>;\n\n /** Logger for debugging and monitoring */\n logger: Logger;\n\n /** Service for computing file fingerprints for resumption */\n fingerprintService: FingerprintService<UploadInput>;\n\n /** Whether to store fingerprints for upload resumption. Defaults to true */\n storeFingerprintForResuming: boolean;\n\n /** Factory for creating WebSocket connections */\n webSocketFactory: WebSocketFactory;\n\n /** Factory for creating abort controllers */\n abortControllerFactory: AbortControllerFactory;\n\n /** Platform-specific service for timers and async operations */\n platformService: PlatformService;\n\n /** Global error handler for all upload operations */\n onError?: (error: Error) => void;\n\n /** WebSocket event handler for real-time upload/flow events */\n onEvent?: UploadistaWebSocketEventHandler;\n\n /**\n * Optional authentication configuration.\n * Supports two modes:\n * - Direct: Bring your own auth (headers, cookies, custom tokens)\n * - UploadistaCloud: Standard JWT token exchange with auth server\n *\n * If omitted, client operates in no-auth mode (backward compatible).\n *\n * @example Direct mode with Bearer token\n * ```typescript\n * auth: {\n * mode: 'direct',\n * getCredentials: () => ({\n * headers: { 'Authorization': 'Bearer token123' }\n * })\n * }\n * ```\n *\n * @example UploadistaCloud mode with auth server\n * ```typescript\n * auth: {\n * mode: 'uploadista-cloud',\n * authServerUrl: 'https://auth.myapp.com/token',\n * getCredentials: () => ({ username: 'user', password: 'pass' })\n * }\n * ```\n */\n auth?: AuthConfig;\n};\n\n/**\n * Default connection pooling configuration with health monitoring.\n *\n * Optimized for typical upload scenarios with support for HTTP/2 multiplexing,\n * connection reuse, and automatic retry on connection errors.\n */\nexport const defaultConnectionPoolingConfig: ConnectionPoolConfig = {\n /** Maximum concurrent connections per host */\n maxConnectionsPerHost: 8,\n /** Timeout for establishing new connections in milliseconds */\n connectionTimeout: 20000,\n /** Keep-alive timeout for idle connections in milliseconds */\n keepAliveTimeout: 90000,\n /** Enable HTTP/2 for connection multiplexing */\n enableHttp2: true,\n /** Automatically retry requests on connection errors */\n retryOnConnectionError: true,\n};\n\n/**\n * Creates a unified Uploadista client for file uploads and flow processing.\n *\n * This is the primary factory function for creating an Uploadista client instance.\n * It configures all upload capabilities including:\n * - Resumable chunked uploads with automatic retry\n * - Parallel upload streams for large files\n * - Smart chunking based on network conditions\n * - Flow-based file processing pipelines\n * - WebSocket support for real-time progress\n * - Authentication (direct, uploadista-cloud, or no-auth modes)\n *\n * The client automatically:\n * - Fetches server capabilities and adapts upload strategy\n * - Monitors network performance for optimal chunking\n * - Stores upload state for resumption across sessions\n * - Manages WebSocket connections for progress tracking\n *\n * @template UploadInput - Platform-specific file type (File, Blob, Buffer, etc.)\n * @param options - Comprehensive client configuration\n * @returns Uploadista client instance with upload and flow methods\n *\n * @example Basic browser setup\n * ```typescript\n * import { createUploadistaClient } from '@uploadista/client-core';\n * import { browserServices } from '@uploadista/client-browser';\n *\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'my-storage',\n * chunkSize: 5 * 1024 * 1024, // 5MB chunks\n * ...browserServices,\n * });\n *\n * // Upload a file\n * const { abort } = await client.upload(file, {\n * onProgress: (progress) => console.log(`${progress}% complete`),\n * onSuccess: (result) => console.log('Upload complete:', result),\n * });\n * ```\n *\n * @example Upload with flow processing\n * ```typescript\n * const client = createUploadistaClient(config);\n *\n * // Upload and process through a flow\n * const { abort, jobId } = await client.uploadWithFlow(file, {\n * flowId: 'image-optimization-flow',\n * storageId: 'images',\n * outputNodeId: 'optimized-output',\n * }, {\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Processed:', result),\n * });\n *\n * // Monitor job status\n * const status = await client.getJobStatus(jobId);\n * ```\n *\n * @example Parallel uploads for large files\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'large-files',\n * chunkSize: 10 * 1024 * 1024, // 10MB\n * parallelUploads: 4, // 4 concurrent streams\n * parallelChunkSize: 5 * 1024 * 1024, // 5MB per stream\n * ...browserServices,\n * });\n *\n * await client.upload(largeFile);\n * ```\n *\n * @example With authentication\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'protected',\n * chunkSize: 5 * 1024 * 1024,\n * auth: {\n * mode: 'direct',\n * getCredentials: async () => ({\n * headers: {\n * 'Authorization': `Bearer ${await getToken()}`,\n * },\n * }),\n * },\n * ...browserServices,\n * });\n * ```\n *\n * @example Smart chunking with network monitoring\n * ```typescript\n * const client = createUploadistaClient({\n * baseUrl: 'https://upload.example.com',\n * storageId: 'adaptive',\n * chunkSize: 1 * 1024 * 1024, // Fallback: 1MB\n * smartChunking: {\n * enabled: true,\n * minChunkSize: 256 * 1024, // 256KB min\n * maxChunkSize: 10 * 1024 * 1024, // 10MB max\n * },\n * networkMonitoring: {\n * slowThreshold: 50 * 1024, // 50 KB/s\n * fastThreshold: 5 * 1024 * 1024, // 5 MB/s\n * },\n * ...browserServices,\n * });\n *\n * // Monitor network conditions\n * const condition = client.getNetworkCondition();\n * console.log(`Network: ${condition.type} (confidence: ${condition.confidence})`);\n * ```\n *\n * @see {@link UploadistaClientOptions} for full configuration options\n * @see {@link UploadistaUploadOptions} for per-upload options\n */\nexport function createUploadistaClient<UploadInput>({\n baseUrl: _baseUrl,\n uploadistaBasePath = \"uploadista\",\n storageId,\n retryDelays = [1000, 3000, 5000],\n chunkSize,\n parallelUploads = 1,\n parallelChunkSize,\n uploadStrategy,\n smartChunking,\n networkMonitoring,\n uploadMetrics,\n checksumService,\n onEvent,\n generateId,\n httpClient,\n logger = createLogger(true),\n fileReader,\n fingerprintService,\n clientStorage,\n storeFingerprintForResuming = true,\n webSocketFactory,\n abortControllerFactory,\n platformService,\n auth,\n}: UploadistaClientOptions<UploadInput>) {\n const baseUrl = _baseUrl.replace(/\\/$/, \"\");\n\n // Create auth manager based on configuration\n const authManager: AuthManager = auth\n ? auth.mode === \"direct\"\n ? new DirectAuthManager(auth, platformService, logger)\n : new UploadistaCloudAuthManager(auth, httpClient)\n : new NoAuthManager();\n\n // Log auth mode for debugging (without exposing credentials)\n if (auth) {\n logger.log(\n `Authentication enabled in ${auth.mode} mode${auth.mode === \"uploadista-cloud\" ? ` (server: ${auth.authServerUrl})` : \"\"}`,\n );\n }\n\n // Create the unified API with auth support\n const uploadistaApi = createUploadistaApi(baseUrl, uploadistaBasePath, {\n logger,\n httpClient,\n authManager,\n webSocketFactory,\n });\n\n // Initialize smart chunking components\n const networkMonitor = new NetworkMonitor(networkMonitoring);\n const metrics = new UploadMetrics(uploadMetrics);\n\n // Cache for server capabilities\n let cachedCapabilities: DataStoreCapabilities | null = null;\n\n const getCapabilities = async (): Promise<DataStoreCapabilities> => {\n if (cachedCapabilities) {\n return cachedCapabilities;\n }\n cachedCapabilities = await uploadistaApi.getCapabilities(storageId);\n return cachedCapabilities;\n };\n\n // Initialize smart chunker with datastore constraints from server capabilities\n let smartChunker: SmartChunker;\n const initializeSmartChunker = async () => {\n if (smartChunker) return smartChunker;\n\n const capabilities = await getCapabilities();\n\n const datastoreConstraints =\n capabilities.minChunkSize &&\n capabilities.maxChunkSize &&\n capabilities.optimalChunkSize\n ? {\n minChunkSize: capabilities.minChunkSize,\n maxChunkSize: capabilities.maxChunkSize,\n optimalChunkSize: capabilities.optimalChunkSize,\n requiresOrderedChunks: capabilities.requiresOrderedChunks,\n }\n : undefined;\n\n smartChunker = new SmartChunker(networkMonitor, {\n enabled: true,\n ...smartChunking,\n fallbackChunkSize: chunkSize,\n datastoreConstraints,\n });\n\n logger.log(\n `Smart chunker initialized with datastore constraints: ${JSON.stringify(datastoreConstraints)}`,\n );\n\n return smartChunker;\n };\n\n // WebSocket management (uses uploadistaApi for both upload and flow websockets)\n const wsManager = new UploadistaWebSocketManager(\n uploadistaApi,\n logger,\n onEvent,\n );\n\n /**\n * Upload a file\n */\n const upload = async (\n file: UploadInput,\n {\n uploadLengthDeferred = false,\n uploadSize,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onError,\n }: UploadistaUploadOptions = {},\n ): Promise<{ abort: () => void }> => {\n let uploadId: string | null = null;\n let uploadIdStorageKey: string | null = null;\n\n const fingerprint = await fingerprintService.computeFingerprint(\n file,\n `${baseUrl}/${uploadistaBasePath}/api/upload`,\n );\n\n logger.log(`fingerprint: ${fingerprint}`);\n if (!fingerprint) {\n throw new Error(\"unable calculate fingerprint for this input file\");\n }\n\n const previousUploads = await findPreviousUploads(\n clientStorage,\n fingerprint,\n );\n if (previousUploads.length > 0 && previousUploads[0] != null) {\n const previousUpload = resumeFromPreviousUpload(previousUploads[0]);\n uploadIdStorageKey = previousUpload.clientStorageKey;\n uploadId = previousUpload.uploadId;\n }\n\n const source = await fileReader.openFile(file, chunkSize);\n\n const size = calculateFileSize(source.size, {\n uploadLengthDeferred,\n uploadSize,\n });\n source.size = size;\n\n const initializedSmartChunker = await initializeSmartChunker();\n\n const isSmartChunkingEnabled = smartChunking?.enabled !== false;\n if (isSmartChunkingEnabled) {\n metrics.startSession(fingerprint, size || 0, true);\n }\n\n const capabilities = await getCapabilities();\n\n const negotiatedStrategy = negotiateUploadStrategy({\n capabilities,\n fileSize: size,\n chunkSize,\n parallelUploads,\n uploadLengthDeferred,\n strategyConfig: uploadStrategy,\n logger,\n });\n\n if (negotiatedStrategy.strategy === \"parallel\") {\n logger.log(\n `Using parallel upload with ${negotiatedStrategy.parallelUploads} streams`,\n );\n\n const parallelResult = await startParallelUpload({\n checksumService,\n source,\n storageId,\n fingerprint,\n uploadLengthDeferred,\n parallelUploads: negotiatedStrategy.parallelUploads,\n parallelChunkSize,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n abortControllerFactory,\n platformService,\n openWebSocket: (id) => {\n wsManager.openUploadWebSocket(id);\n // Note: WebSocket opening is now async due to auth, but this callback is sync\n // The WebSocket will be opened in the background\n return null as unknown as WebSocketLike;\n },\n closeWebSocket: (id) => wsManager.closeUploadWebSocket(id),\n terminate: (id) =>\n terminate(id, uploadistaApi, platformService, retryDelays),\n onProgress,\n onChunkComplete,\n onSuccess,\n onError,\n });\n\n if (parallelResult) {\n return {\n abort: async () => {\n await parallelResult.abort();\n },\n };\n }\n\n logger.log(\"Parallel upload failed, falling back to single upload\");\n }\n\n // Single upload path\n const result = await startSingleUpload({\n source,\n storageId,\n uploadId,\n platformService,\n uploadIdStorageKey,\n checksumService,\n fingerprint,\n uploadLengthDeferred,\n uploadistaApi,\n logger,\n clientStorage,\n generateId,\n storeFingerprintForResuming,\n openWebSocket: (id) => {\n wsManager.openUploadWebSocket(id);\n // Note: WebSocket opening is now async due to auth, but this callback is sync\n // The WebSocket will be opened in the background\n return null as unknown as WebSocketLike;\n },\n closeWebSocket: (id) => wsManager.closeUploadWebSocket(id),\n onProgress,\n onChunkComplete,\n onSuccess,\n onError,\n });\n\n if (result) {\n const abortController = abortControllerFactory.create();\n const { uploadId, uploadIdStorageKey, offset } = result;\n\n let timeoutId: Timeout | null = null;\n\n performUpload({\n platformService,\n uploadId,\n offset,\n source,\n uploadLengthDeferred,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n abortController,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onRetry: (timeout) => {\n timeoutId = timeout;\n },\n onError,\n });\n\n return {\n abort: () => {\n abort({\n platformService,\n uploadId,\n uploadIdStorageKey,\n retryTimeout: timeoutId,\n shouldTerminate: true,\n abortController,\n uploadistaApi,\n retryDelays,\n clientStorage,\n });\n },\n };\n }\n\n return {\n abort: () => {},\n };\n };\n\n // Run validation on client creation\n validateAndThrow(\n {\n baseUrl,\n storageId,\n chunkSize,\n parallelUploads,\n parallelChunkSize,\n uploadStrategy,\n },\n logger,\n );\n\n /**\n * Upload a file through a flow (using streaming-input-node)\n */\n const uploadWithFlow = async (\n file: UploadInput,\n flowConfig: FlowUploadConfig,\n {\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onJobStart,\n onError,\n }: Omit<\n UploadistaUploadOptions,\n \"uploadLengthDeferred\" | \"uploadSize\" | \"metadata\"\n > = {},\n ): Promise<{\n abort: () => Promise<void>;\n pause: () => Promise<void>;\n jobId: string;\n }> => {\n const source = await fileReader.openFile(file, chunkSize);\n\n const initializedSmartChunker = await initializeSmartChunker();\n\n const isSmartChunkingEnabled = smartChunking?.enabled !== false;\n if (isSmartChunkingEnabled) {\n const fingerprint = await fingerprintService.computeFingerprint(\n file,\n `${baseUrl}/${uploadistaBasePath}/api/flow`,\n );\n metrics.startSession(fingerprint || \"unknown\", source.size || 0, true);\n }\n\n const result = await startFlowUpload({\n source,\n flowConfig,\n uploadistaApi,\n logger,\n platformService,\n openWebSocket: (id) => wsManager.openFlowWebSocket(id),\n closeWebSocket: (id) => wsManager.closeWebSocket(id),\n onProgress,\n onChunkComplete,\n onSuccess,\n onJobStart,\n onError,\n });\n\n if (!result) {\n return {\n abort: async () => {},\n pause: async () => {},\n jobId: \"\",\n };\n }\n\n const { jobId, uploadFile, inputNodeId } = result;\n const abortController = abortControllerFactory.create();\n\n // Open upload WebSocket to receive upload progress events\n await wsManager.openUploadWebSocket(uploadFile.id);\n\n let timeoutId: Timeout | null = null;\n\n performFlowUpload({\n jobId,\n uploadFile,\n inputNodeId,\n offset: uploadFile.offset,\n source,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n abortController,\n onProgress,\n onChunkComplete,\n onSuccess,\n onShouldRetry,\n onRetry: (timeout) => {\n timeoutId = timeout;\n },\n onError,\n });\n\n return {\n abort: async () => {\n // First, tell the server to cancel the flow\n try {\n await uploadistaApi.cancelFlow(jobId);\n logger.log(`Flow cancelled on server: ${jobId}`);\n } catch (err) {\n // Log but don't throw - client cleanup should still happen\n logger.log(`Failed to cancel flow on server: ${err}`);\n }\n\n // Then do client-side cleanup\n abortController.abort();\n if (timeoutId) {\n platformService.clearTimeout(timeoutId);\n }\n // Close both flow and upload WebSockets\n wsManager.closeWebSocket(jobId);\n wsManager.closeUploadWebSocket(uploadFile.id);\n },\n pause: async () => {\n await uploadistaApi.pauseFlow(jobId);\n },\n jobId,\n };\n };\n\n /**\n * Upload multiple inputs through a flow with parallel coordination.\n * Supports mixed input types: File/Blob (upload), URL strings (fetch), structured data.\n *\n * @param inputs - Record of nodeId to input data (File, URL string, or object)\n * @param flowConfig - Flow configuration\n * @param callbacks - Upload lifecycle callbacks\n * @returns Abort controller and job ID\n */\n const multiInputFlowUpload = async (\n inputs: Record<string, unknown>,\n flowConfig: FlowUploadConfig,\n {\n onProgress,\n onChunkComplete,\n onShouldRetry,\n onJobStart,\n onError,\n onInputProgress,\n onInputComplete,\n onInputError,\n }: Omit<\n UploadistaUploadOptions,\n \"uploadLengthDeferred\" | \"uploadSize\" | \"metadata\"\n > & {\n onInputProgress?: (\n nodeId: string,\n progress: number,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n onInputComplete?: (nodeId: string) => void;\n onInputError?: (nodeId: string, error: Error) => void;\n } = {},\n ): Promise<{\n abort: () => Promise<void>;\n pause: () => Promise<void>;\n jobId: string;\n }> => {\n // Start the flow and get job ID\n const { job } = await uploadistaApi.runFlow(\n flowConfig.flowId,\n flowConfig.storageId || storageId,\n {},\n );\n const jobId = job.id;\n\n logger.log(`Multi-input flow started: ${jobId}`);\n onJobStart?.(jobId);\n\n // Open flow WebSocket for flow events\n await wsManager.openFlowWebSocket(jobId);\n\n const abortControllers: Map<\n string,\n ReturnType<typeof abortControllerFactory.create>\n > = new Map();\n const uploadIds: Map<string, string> = new Map();\n const timeoutIds: Timeout[] = [];\n\n try {\n // Initialize all inputs in parallel\n const inputEntries = Object.entries(inputs);\n const initPromises = inputEntries.map(async ([nodeId, data]) => {\n const inputType = detectInputType(data);\n\n if (inputType === \"file\") {\n // File/Blob upload\n const file = data as UploadInput;\n const source = await fileReader.openFile(file, chunkSize);\n\n const initResult = await initializeFlowInput({\n nodeId,\n jobId,\n source,\n storageId: flowConfig.storageId || storageId,\n metadata: {},\n uploadistaApi,\n logger,\n platformService,\n callbacks: {\n onStart: ({ uploadId }) => {\n uploadIds.set(nodeId, uploadId);\n // Open WebSocket for this upload\n wsManager.openUploadWebSocket(uploadId);\n },\n onError,\n },\n });\n\n return {\n nodeId,\n uploadFile: initResult.uploadFile,\n source,\n inputType,\n };\n } else if (inputType === \"url\") {\n // URL input - send to server immediately\n await uploadistaApi.resumeFlow(\n jobId,\n nodeId,\n {\n operation: \"url\",\n url: data as string,\n storageId: flowConfig.storageId || storageId,\n },\n { contentType: \"application/json\" },\n );\n\n return { nodeId, uploadFile: null, source: null, inputType };\n } else {\n // Structured data input\n await uploadistaApi.resumeFlow(jobId, nodeId, data, {\n contentType: \"application/json\",\n });\n\n return { nodeId, uploadFile: null, source: null, inputType };\n }\n });\n\n const initializedInputs = await Promise.all(initPromises);\n\n // Upload all file inputs in parallel\n const initializedSmartChunker = await initializeSmartChunker();\n const uploadPromises = initializedInputs\n .filter(\n (input) =>\n input.inputType === \"file\" && input.uploadFile && input.source,\n )\n .map(async ({ nodeId, uploadFile, source }) => {\n const abortController = abortControllerFactory.create();\n abortControllers.set(nodeId, abortController);\n\n const metrics = new UploadMetrics({\n enableDetailedMetrics: smartChunking?.enabled !== false,\n });\n\n if (!uploadFile || !source) {\n throw new Error(`Missing uploadFile or source for node ${nodeId}`);\n }\n\n try {\n await uploadInputChunks({\n nodeId,\n jobId,\n uploadFile,\n source,\n offset: uploadFile.offset,\n retryAttempt: 0,\n abortController,\n retryDelays,\n smartChunker: initializedSmartChunker,\n uploadistaApi,\n logger,\n smartChunking,\n metrics,\n platformService,\n onRetry: (timeout) => {\n timeoutIds.push(timeout);\n },\n callbacks: {\n onProgress: (uploadId, bytesUploaded, totalBytes) => {\n onProgress?.(uploadId, bytesUploaded, totalBytes);\n\n // Calculate progress percentage\n const progress = totalBytes\n ? Math.round((bytesUploaded / totalBytes) * 100)\n : 0;\n onInputProgress?.(\n nodeId,\n progress,\n bytesUploaded,\n totalBytes,\n );\n },\n onChunkComplete,\n onShouldRetry,\n },\n });\n\n // Finalize this input\n await finalizeFlowInput({\n nodeId,\n jobId,\n uploadId: uploadFile.id,\n uploadistaApi,\n logger,\n callbacks: { onError },\n });\n\n onInputComplete?.(nodeId);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n onInputError?.(nodeId, error);\n throw error;\n }\n });\n\n await Promise.all(uploadPromises);\n\n logger.log(`All inputs uploaded for job: ${jobId}`);\n } catch (err) {\n const error = err instanceof Error ? err : new Error(String(err));\n logger.log(`Multi-input flow upload failed: ${error.message}`);\n onError?.(error);\n throw error;\n }\n\n return {\n abort: async () => {\n try {\n await uploadistaApi.cancelFlow(jobId);\n logger.log(`Flow cancelled on server: ${jobId}`);\n } catch (err) {\n logger.log(`Failed to cancel flow on server: ${err}`);\n }\n\n // Abort all uploads\n for (const controller of abortControllers.values()) {\n controller.abort();\n }\n\n // Clear all timeouts\n for (const timeoutId of timeoutIds) {\n platformService.clearTimeout(timeoutId);\n }\n\n // Close all WebSockets\n wsManager.closeWebSocket(jobId);\n for (const uploadId of uploadIds.values()) {\n wsManager.closeUploadWebSocket(uploadId);\n }\n },\n pause: async () => {\n await uploadistaApi.pauseFlow(jobId);\n },\n jobId,\n };\n };\n\n return {\n // Upload operations\n upload,\n uploadWithFlow,\n multiInputFlowUpload,\n abort: (params: Parameters<typeof abort>[0]) => abort(params),\n\n // Flow operations\n getFlow: async (flowId: string) => {\n const { status, flow } = await uploadistaApi.getFlow(flowId);\n return { status, flow };\n },\n\n runFlow: async ({\n flowId,\n inputs,\n storageId: flowStorageId,\n }: {\n flowId: string;\n inputs: Record<string, unknown>;\n storageId?: string;\n }) => {\n const { status, job } = await uploadistaApi.runFlow(\n flowId,\n flowStorageId || storageId,\n inputs,\n );\n return { status, job };\n },\n\n resumeFlow: async ({\n jobId,\n nodeId,\n newData,\n contentType,\n }: {\n jobId: string;\n nodeId: string;\n newData: unknown;\n contentType?: \"application/json\" | \"application/octet-stream\";\n }) => {\n return uploadistaApi.resumeFlow(jobId, nodeId, newData, {\n contentType,\n });\n },\n\n pauseFlow: async (jobId: string) => {\n return uploadistaApi.pauseFlow(jobId);\n },\n\n cancelFlow: async (jobId: string) => {\n return uploadistaApi.cancelFlow(jobId);\n },\n\n /**\n * Find input nodes in a flow.\n *\n * Discovers all input nodes in a flow and returns their metadata.\n * Useful for auto-discovering input node IDs in single-input flows.\n *\n * @param flowId - The flow ID to inspect\n * @returns Discovery result with input node information\n *\n * @example\n * ```typescript\n * const { inputNodes, single } = await client.findInputNode(\"my-flow\");\n *\n * if (single) {\n * // Flow has exactly one input node, can auto-map input data\n * const inputNodeId = inputNodes[0].id;\n * } else {\n * // Multi-input flow, requires explicit node IDs\n * console.log(\"Input nodes:\", inputNodes.map(n => n.id));\n * }\n * ```\n */\n findInputNode: async (flowId: string) => {\n const { flow } = await uploadistaApi.getFlow(flowId);\n const inputNodes = flow.nodes\n .filter((node) => node.type === \"input\")\n .map((node) => ({\n id: node.id,\n type: node.type,\n name: node.name,\n }));\n\n return {\n inputNodes,\n single: inputNodes.length === 1,\n };\n },\n\n /**\n * Execute a flow with arbitrary inputs (URL, structured data, etc.).\n *\n * This method supports flexible flow execution beyond traditional file uploads.\n * It directly executes flows with provided inputs, bypassing chunked upload for\n * non-file operations like URL fetching or structured data processing.\n *\n * @param flowId - The flow ID to execute\n * @param inputs - Map of node IDs to their input data\n * @param options - Optional execution options\n * @returns Job status and initial result\n *\n * @example\n * ```typescript\n * // URL-based flow execution\n * const { job } = await client.executeFlowWithInputs(\"optimize-flow\", {\n * \"input-node\": {\n * operation: \"url\",\n * url: \"https://example.com/image.jpg\",\n * storageId: \"s3\"\n * }\n * });\n *\n * // Listen for flow events\n * client.openFlowWebSocket(job.id);\n * client.subscribeToEvents((event) => {\n * if (event.eventType === EventType.FlowEnd) {\n * console.log(\"Flow complete:\", event.outputs);\n * }\n * });\n * ```\n */\n executeFlowWithInputs: async (\n flowId: string,\n inputs: Record<string, unknown>,\n options?: {\n storageId?: string;\n onJobStart?: (jobId: string) => void;\n },\n ) => {\n // Execute flow with provided inputs\n const { status, job } = await uploadistaApi.runFlow(\n flowId,\n options?.storageId || storageId,\n inputs,\n );\n\n // Notify callback if job started successfully\n if (job?.id && options?.onJobStart) {\n options.onJobStart(job.id);\n }\n\n return { status, job };\n },\n\n // Job operations (unified for both uploads and flows)\n getJobStatus: async (jobId: string) => {\n return uploadistaApi.getJobStatus(jobId);\n },\n\n // WebSocket management methods\n openUploadWebSocket: (uploadId: string) =>\n wsManager.openUploadWebSocket(uploadId),\n openFlowWebSocket: (jobId: string) => wsManager.openFlowWebSocket(jobId),\n openWebSocket: (id: string) => wsManager.openWebSocket(id),\n closeWebSocket: (id: string) => wsManager.closeWebSocket(id),\n closeAllWebSockets: () => wsManager.closeAll(),\n sendPing: (jobId: string) => wsManager.sendPing(jobId),\n isWebSocketConnected: (id: string) => wsManager.isConnected(id),\n getWebSocketConnectionCount: () => wsManager.getConnectionCount(),\n getWebSocketConnectionCountByType: () =>\n wsManager.getConnectionCountByType(),\n\n // Smart chunking utilities\n getNetworkMetrics: () => networkMonitor.getCurrentMetrics(),\n getNetworkCondition: () => networkMonitor.getNetworkCondition(),\n getChunkingInsights: () => metrics.getPerformanceInsights(),\n exportMetrics: () => metrics.exportMetrics(),\n\n // Connection pooling utilities\n getConnectionMetrics: () => uploadistaApi.getConnectionMetrics(),\n getDetailedConnectionMetrics: () =>\n uploadistaApi.getDetailedConnectionMetrics(),\n warmupConnections: (urls: string[]) =>\n uploadistaApi.warmupConnections(urls),\n\n // Smart chunking insights\n getConnectionPoolingInsights: async () => {\n const chunker = await initializeSmartChunker();\n return chunker.getConnectionPoolingInsights();\n },\n\n resetMetrics: async () => {\n networkMonitor.reset();\n const chunker = await initializeSmartChunker();\n chunker.reset();\n metrics.reset();\n },\n\n // Configuration validation\n validateConfiguration: (options: UploadistaClientOptions<UploadInput>) => {\n return validateConfiguration(options, defaultClientCapabilities, logger);\n },\n\n validateConfigurationAsync: async (\n options: UploadistaClientOptions<UploadInput>,\n ) => {\n const errors: string[] = [];\n const warnings: string[] = [];\n\n // Fetch capabilities using the authenticated HTTP client\n const capabilities = await uploadistaApi.getCapabilities(\n options.storageId,\n );\n\n const validation = validateConfiguration(options, capabilities, logger);\n errors.push(...validation.errors);\n warnings.push(...validation.warnings);\n\n return {\n valid: errors.length === 0,\n errors,\n warnings,\n capabilities,\n };\n },\n\n getCapabilities,\n };\n}\n\n/**\n * Uploadista client instance type.\n *\n * The client provides methods for:\n * - **Upload operations**: upload(), uploadWithFlow()\n * - **Flow operations**: getFlow(), runFlow(), resumeFlow()\n * - **Job management**: getJobStatus()\n * - **WebSocket management**: openUploadWebSocket(), openFlowWebSocket(), closeWebSocket()\n * - **Metrics and diagnostics**: getNetworkMetrics(), getChunkingInsights(), exportMetrics()\n * - **Connection pooling**: getConnectionMetrics(), warmupConnections()\n * - **Configuration validation**: validateConfiguration(), validateConfigurationAsync()\n *\n * @example Basic usage\n * ```typescript\n * const client = createUploadistaClient(config);\n *\n * // Upload a file\n * await client.upload(file, {\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Done:', result.id),\n * });\n *\n * // Get network metrics\n * const metrics = client.getNetworkMetrics();\n * console.log(`Speed: ${metrics.averageSpeed / 1024} KB/s`);\n * ```\n *\n * @see {@link createUploadistaClient} for creating an instance\n */\nexport type UploadistaClient = ReturnType<typeof createUploadistaClient>;\n","/**\n * Generic event type that the subscription manager can handle\n */\nexport interface GenericEvent {\n type: string;\n data?: unknown;\n}\n\n/**\n * Event handler callback function\n */\nexport type SubscriptionEventHandler<T = GenericEvent> = (event: T) => void;\n\n/**\n * Unsubscribe function returned from subscriptions\n */\nexport type UnsubscribeFunction = () => void;\n\n/**\n * Event source that provides subscription capabilities\n */\nexport interface EventSource<T = GenericEvent> {\n /**\n * Subscribe to events from this source\n * @returns Unsubscribe function to clean up the subscription\n */\n subscribe(handler: SubscriptionEventHandler<T>): UnsubscribeFunction;\n}\n\n/**\n * Options for event filtering\n */\nexport interface EventFilterOptions {\n /**\n * Filter events by type (exact match)\n */\n eventType?: string;\n\n /**\n * Filter events by upload/job ID\n * If provided, only events with matching ID will be passed to the handler\n */\n uploadId?: string | null;\n\n /**\n * Custom filter function for advanced filtering\n * Return true to pass the event to the handler\n */\n customFilter?: (event: GenericEvent) => boolean;\n}\n\n/**\n * Subscription information for tracking\n */\ninterface SubscriptionInfo<T extends GenericEvent = GenericEvent> {\n unsubscribe: UnsubscribeFunction;\n handler: SubscriptionEventHandler<T>;\n filter?: EventFilterOptions;\n}\n\n/**\n * Platform-agnostic event subscription manager that handles event filtering,\n * subscription tracking, and automatic cleanup.\n *\n * This manager simplifies event handling by:\n * - Filtering events by type and/or ID\n * - Tracking all active subscriptions\n * - Providing cleanup methods to unsubscribe from all events\n * - Supporting custom filter functions for advanced scenarios\n *\n * @example Basic event subscription\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n *\n * manager.subscribe(\n * (event) => console.log('Upload progress:', event),\n * { eventType: 'UPLOAD_PROGRESS', uploadId: 'abc123' }\n * );\n *\n * // Clean up all subscriptions when done\n * manager.cleanup();\n * ```\n *\n * @example Multiple filtered subscriptions\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n *\n * // Subscribe to progress events for specific upload\n * manager.subscribe(\n * onProgress,\n * { eventType: 'UPLOAD_PROGRESS', uploadId: currentUploadId }\n * );\n *\n * // Subscribe to error events for any upload\n * manager.subscribe(\n * onError,\n * { eventType: 'UPLOAD_ERROR' }\n * );\n *\n * // Subscribe to all events with custom filtering\n * manager.subscribe(\n * onEvent,\n * { customFilter: (e) => e.data?.priority === 'high' }\n * );\n * ```\n */\nexport class EventSubscriptionManager<T extends GenericEvent = GenericEvent> {\n private subscriptions: SubscriptionInfo<T>[] = [];\n\n /**\n * Create a new EventSubscriptionManager\n *\n * @param eventSource - Source to subscribe to for events\n */\n constructor(private readonly eventSource: EventSource<T>) {}\n\n /**\n * Subscribe to events with optional filtering\n *\n * @param handler - Callback function to invoke when matching events occur\n * @param filter - Optional filter options to narrow down which events trigger the handler\n * @returns Unsubscribe function to remove this specific subscription\n *\n * @example Subscribe to specific event type\n * ```typescript\n * const unsubscribe = manager.subscribe(\n * (event) => console.log('Progress:', event),\n * { eventType: 'UPLOAD_PROGRESS' }\n * );\n *\n * // Later, unsubscribe\n * unsubscribe();\n * ```\n */\n subscribe(\n handler: SubscriptionEventHandler<T>,\n filter?: EventFilterOptions,\n ): UnsubscribeFunction {\n // Create a wrapper handler that applies filtering\n const wrappedHandler: SubscriptionEventHandler<T> = (event: T) => {\n if (this.shouldHandleEvent(event, filter)) {\n handler(event);\n }\n };\n\n // Subscribe to the event source with the wrapped handler\n const unsubscribe = this.eventSource.subscribe(wrappedHandler);\n\n // Track this subscription\n const subscription: SubscriptionInfo<T> = {\n unsubscribe,\n handler: wrappedHandler,\n filter,\n };\n\n this.subscriptions.push(subscription);\n\n // Return unsubscribe function that also removes from tracking\n return () => {\n const index = this.subscriptions.indexOf(subscription);\n if (index !== -1) {\n this.subscriptions.splice(index, 1);\n }\n unsubscribe();\n };\n }\n\n /**\n * Check if an event matches the filter criteria\n *\n * @param event - Event to check\n * @param filter - Filter options to apply\n * @returns True if the event passes all filters\n */\n private shouldHandleEvent(event: T, filter?: EventFilterOptions): boolean {\n if (!filter) {\n return true;\n }\n\n // Check event type filter\n if (filter.eventType && event.type !== filter.eventType) {\n return false;\n }\n\n // Check upload ID filter\n if (filter.uploadId !== undefined) {\n const eventData = event.data as { id?: string } | undefined;\n const eventId = eventData?.id;\n\n // If filter.uploadId is null, only pass events without an ID\n // If filter.uploadId is a string, only pass events with matching ID\n if (filter.uploadId === null) {\n if (eventId !== undefined) {\n return false;\n }\n } else if (eventId !== filter.uploadId) {\n return false;\n }\n }\n\n // Check custom filter\n if (filter.customFilter) {\n // Cast to GenericEvent for custom filter as it operates on the base interface\n return filter.customFilter(event as unknown as GenericEvent);\n }\n\n return true;\n }\n\n /**\n * Get the number of active subscriptions\n *\n * @returns Number of tracked subscriptions\n */\n getSubscriptionCount(): number {\n return this.subscriptions.length;\n }\n\n /**\n * Check if there are any active subscriptions\n *\n * @returns True if at least one subscription is active\n */\n hasSubscriptions(): boolean {\n return this.subscriptions.length > 0;\n }\n\n /**\n * Unsubscribe from all tracked subscriptions and clear the subscription list\n *\n * This is typically called when disposing of a component or cleaning up resources.\n *\n * @example Cleanup in framework hooks\n * ```typescript\n * // React\n * useEffect(() => {\n * const manager = new EventSubscriptionManager(eventSource);\n * manager.subscribe(handler, filter);\n *\n * return () => manager.cleanup();\n * }, []);\n *\n * // Vue\n * onUnmounted(() => {\n * manager.cleanup();\n * });\n * ```\n */\n cleanup(): void {\n for (const subscription of this.subscriptions) {\n subscription.unsubscribe();\n }\n this.subscriptions = [];\n }\n\n /**\n * Update the upload ID filter for all subscriptions that have an uploadId filter\n *\n * This is useful when the current upload changes and you want to update\n * all subscriptions to listen for the new upload's events.\n *\n * @param newUploadId - New upload ID to filter events by\n *\n * @example Update upload ID when starting new upload\n * ```typescript\n * const manager = new EventSubscriptionManager(eventSource);\n * manager.subscribe(onProgress, { eventType: 'UPLOAD_PROGRESS', uploadId: null });\n *\n * // When upload starts\n * manager.updateUploadIdFilter(uploadId);\n * ```\n */\n updateUploadIdFilter(newUploadId: string | null): void {\n for (const subscription of this.subscriptions) {\n if (subscription.filter && subscription.filter.uploadId !== undefined) {\n subscription.filter.uploadId = newUploadId;\n }\n }\n }\n}\n","import type { FlowEvent, TypedOutput } from \"@uploadista/core/flow\";\nimport { EventType } from \"@uploadista/core/flow\";\nimport type { UploadFile } from \"@uploadista/core/types\";\nimport type { FlowUploadOptions } from \"../types/flow-upload-options\";\nimport { detectInputType } from \"../utils/input-detection\";\n\n/**\n * Flow upload status representing the current state of a flow upload lifecycle.\n * Flow uploads progress through: idle → uploading → processing → success/error/aborted\n */\nexport type FlowUploadStatus =\n | \"idle\"\n | \"uploading\"\n | \"processing\"\n | \"success\"\n | \"error\"\n | \"aborted\";\n\n/**\n * Complete state information for a flow upload operation.\n * Tracks both the upload phase (file transfer) and processing phase (flow execution).\n */\nexport interface FlowUploadState {\n /** Current upload status */\n status: FlowUploadStatus;\n /** Upload progress percentage (0-100) */\n progress: number;\n /** Number of bytes uploaded */\n bytesUploaded: number;\n /** Total bytes to upload, null if unknown */\n totalBytes: number | null;\n /** Error if upload or processing failed */\n error: Error | null;\n /** Unique identifier for the flow execution job */\n jobId: string | null;\n /** Whether the flow processing has started */\n flowStarted: boolean;\n /** Name of the currently executing flow node */\n currentNodeName: string | null;\n /** Type of the currently executing flow node */\n currentNodeType: string | null;\n /**\n * Complete typed outputs from all output nodes in the flow.\n * Each output includes nodeId, optional nodeType, data, and timestamp.\n * Available when status is \"success\".\n */\n flowOutputs: TypedOutput[] | null;\n}\n\n/**\n * State for a single input in a multi-input flow.\n */\nexport interface InputExecutionState {\n /** Input node ID */\n nodeId: string;\n /** Input type (file, url, data) */\n type: \"file\" | \"url\" | \"data\";\n /** Current status of this input */\n status: \"pending\" | \"uploading\" | \"complete\" | \"error\";\n /** Progress percentage for file uploads (0-100) */\n progress: number;\n /** Bytes uploaded for file uploads */\n bytesUploaded: number;\n /** Total bytes for file uploads */\n totalBytes: number | null;\n /** Error if this input failed */\n error: Error | null;\n /** Abort controller for this specific input */\n abortController: FlowUploadAbortController | null;\n}\n\n/**\n * Callbacks that FlowManager invokes during the flow upload lifecycle\n */\nexport interface FlowManagerCallbacks {\n /**\n * Called when the flow upload state changes\n */\n onStateChange: (state: FlowUploadState) => void;\n\n /**\n * Called when upload progress updates\n * @param progress - Progress percentage (0-100)\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when a chunk completes\n * @param chunkSize - Size of the completed chunk\n * @param bytesAccepted - Total bytes accepted so far\n * @param bytesTotal - Total bytes to upload, null if unknown\n */\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n\n /**\n * Called when the flow completes successfully (receives full flow outputs)\n * Each output includes nodeId, optional nodeType (e.g., \"storage-output-v1\"), data, and timestamp.\n *\n * @param outputs - Array of typed outputs from all output nodes\n *\n * @example\n * ```typescript\n * onFlowComplete: (outputs) => {\n * for (const output of outputs) {\n * console.log(`${output.nodeId} (${output.nodeType}):`, output.data);\n * }\n * }\n * ```\n */\n onFlowComplete?: (outputs: TypedOutput[]) => void;\n\n /**\n * Called when upload succeeds (receives typed outputs from all output nodes)\n * Each output includes nodeId, optional nodeType (e.g., \"storage-output-v1\"), data, and timestamp.\n *\n * @param outputs - Array of typed outputs from all output nodes\n *\n * @example\n * ```typescript\n * onSuccess: (outputs) => {\n * for (const output of outputs) {\n * console.log(`${output.nodeId} completed:`, output.data);\n * }\n * }\n * ```\n */\n onSuccess?: (outputs: TypedOutput[]) => void;\n\n /**\n * Called when upload or flow processing fails with an error\n * @param error - The error that occurred\n */\n onError?: (error: Error) => void;\n\n /**\n * Called when upload or flow is aborted\n */\n onAbort?: () => void;\n}\n\n/**\n * Generic flow execution input type - can be any value that the flow execution client accepts.\n * Common types include File, Blob, string (for URLs), or structured data objects.\n *\n * @remarks\n * The flexibility of this type enables different flow execution patterns:\n * - File/Blob: Traditional chunked file upload with init/finalize operations\n * - string (URL): Direct file fetch from external URL\n * - object: Structured data for non-file input nodes (future)\n */\nexport type FlowUploadInput = unknown;\n\n/**\n * Flow configuration for upload\n */\nexport interface FlowConfig {\n flowId: string;\n storageId: string;\n outputNodeId?: string;\n metadata?: Record<string, string>;\n}\n\n/**\n * Abort and pause controller interface for canceling/pausing flow uploads\n */\nexport interface FlowUploadAbortController {\n abort: () => void | Promise<void>;\n pause: () => void | Promise<void>;\n}\n\n/**\n * Internal upload options used by the flow upload function.\n * The upload phase always returns UploadFile, regardless of the final TOutput type.\n */\nexport interface InternalFlowUploadOptions {\n onJobStart?: (jobId: string) => void;\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n onSuccess?: (result: UploadFile) => void;\n onError?: (error: Error) => void;\n onAbort?: () => void;\n onShouldRetry?: (error: Error, retryAttempt: number) => boolean;\n}\n\n/**\n * Flow upload function that performs the actual upload with flow processing.\n * Returns a promise that resolves to an abort controller with pause capability.\n *\n * Note: The upload phase onSuccess always receives UploadFile. The final TOutput\n * result comes from the flow execution and is handled via FlowEnd events.\n */\nexport type FlowUploadFunction<TInput = FlowUploadInput> = (\n input: TInput,\n flowConfig: FlowConfig,\n options: InternalFlowUploadOptions,\n) => Promise<FlowUploadAbortController>;\n\n/**\n * Callbacks for tracking individual input progress in multi-input flows\n */\nexport interface MultiInputCallbacks {\n /**\n * Called when an input's progress updates\n * @param nodeId - The input node ID\n * @param progress - Progress percentage (0-100)\n * @param bytesUploaded - Bytes uploaded for this input\n * @param totalBytes - Total bytes for this input\n */\n onInputProgress?: (\n nodeId: string,\n progress: number,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when an input completes successfully\n * @param nodeId - The input node ID\n */\n onInputComplete?: (nodeId: string) => void;\n\n /**\n * Called when an input fails\n * @param nodeId - The input node ID\n * @param error - The error that occurred\n */\n onInputError?: (nodeId: string, error: Error) => void;\n}\n\n/**\n * Multi-input flow upload function that coordinates multiple inputs in a single flow.\n * Platform packages should implement this to enable parallel multi-input upload support.\n *\n * @param inputs - Record of nodeId to input data (File, URL string, or structured data)\n * @param flowConfig - Flow configuration\n * @param options - Upload callbacks and configuration\n * @param multiInputCallbacks - Per-input progress tracking callbacks\n * @returns Promise resolving to abort controller for the entire flow execution\n *\n * @example\n * ```typescript\n * const uploadFn: MultiInputFlowUploadFunction = async (inputs, flowConfig, options, callbacks) => {\n * // 1. Start flow and create job\n * const jobId = await startFlow(flowConfig.flowId, flowConfig.storageId);\n *\n * // 2. Initialize all inputs in parallel using orchestrator functions\n * const initPromises = Object.entries(inputs).map(([nodeId, data]) =>\n * initializeFlowInput({ nodeId, jobId, source: data, ... })\n * );\n *\n * // 3. Upload files in parallel\n * // 4. Finalize all inputs\n * // 5. Return abort controller\n * };\n * ```\n */\nexport type MultiInputFlowUploadFunction = (\n inputs: Record<string, unknown>,\n flowConfig: FlowConfig,\n options: InternalFlowUploadOptions,\n multiInputCallbacks?: MultiInputCallbacks,\n) => Promise<FlowUploadAbortController>;\n\n/**\n * Initial state for a new flow upload\n */\nconst initialState: FlowUploadState = {\n status: \"idle\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n};\n\n/**\n * Platform-agnostic flow execution manager that handles flow state machine,\n * progress tracking, flow event handling, error handling, abort, pause, reset, and retry logic.\n *\n * Supports multiple input types through generic TInput parameter:\n * - File/Blob: Chunked file upload with progress tracking\n * - string (URL): Direct file fetch from external source\n * - object: Structured data for custom input nodes\n *\n * Framework packages (React, Vue, React Native) should wrap this manager\n * with framework-specific hooks/composables.\n *\n * @template TInput - The type of input data accepted by the flow (File, Blob, string, object, etc.)\n *\n * @example\n * ```typescript\n * // File upload flow\n * const fileFlowManager = new FlowManager<File>(...);\n * await fileFlowManager.upload(myFile);\n *\n * // URL fetch flow\n * const urlFlowManager = new FlowManager<string>(...);\n * await urlFlowManager.upload(\"https://example.com/image.jpg\");\n *\n * // Structured data flow\n * const dataFlowManager = new FlowManager<{ text: string }>(...);\n * await dataFlowManager.upload({ text: \"Process this\" });\n * ```\n */\nexport class FlowManager<TInput = FlowUploadInput> {\n private state: FlowUploadState;\n private abortController: FlowUploadAbortController | null = null;\n private inputStates: Map<string, InputExecutionState> = new Map();\n /** Tracks the nodeId when executing a single-input flow via executeFlow() */\n private currentSingleInputNodeId: string | null = null;\n\n /**\n * Create a new FlowManager\n *\n * @param flowUploadFn - Flow upload function to use for uploads\n * @param callbacks - Callbacks to invoke during flow upload lifecycle\n * @param options - Flow upload configuration options\n * @param multiInputUploadFn - Optional multi-input upload function for executeFlow()\n */\n constructor(\n private readonly flowUploadFn: FlowUploadFunction<TInput>,\n private readonly callbacks: FlowManagerCallbacks,\n private readonly options: FlowUploadOptions,\n private readonly multiInputUploadFn?: MultiInputFlowUploadFunction,\n ) {\n this.state = { ...initialState };\n }\n\n /**\n * Get the current flow upload state\n */\n getState(): FlowUploadState {\n return { ...this.state };\n }\n\n /**\n * Check if an upload or flow is currently active\n */\n isUploading(): boolean {\n return (\n this.state.status === \"uploading\" || this.state.status === \"processing\"\n );\n }\n\n /**\n * Check if file upload is in progress\n */\n isUploadingFile(): boolean {\n return this.state.status === \"uploading\";\n }\n\n /**\n * Check if flow processing is in progress\n */\n isProcessing(): boolean {\n return this.state.status === \"processing\";\n }\n\n /**\n * Get the current job ID\n */\n getJobId(): string | null {\n return this.state.jobId;\n }\n\n /**\n * Update the internal state and notify callbacks\n */\n private updateState(update: Partial<FlowUploadState>): void {\n this.state = { ...this.state, ...update };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Handle flow events from the event subscription\n * This method should be called by the framework wrapper when it receives flow events\n *\n * @param event - Flow event to process\n */\n handleFlowEvent(event: FlowEvent): void {\n // For FlowStart, accept if we don't have a jobId yet (first event)\n // This handles the race condition where flow events arrive before onJobStart callback\n if (event.eventType === EventType.FlowStart && !this.state.jobId) {\n this.updateState({\n jobId: event.jobId,\n flowStarted: true,\n status: \"processing\",\n });\n return;\n }\n\n // Only handle events for the current job\n if (!this.state.jobId || event.jobId !== this.state.jobId) {\n // console.warn(\"[FlowManager] IGNORING event - jobId mismatch\");\n return;\n }\n\n switch (event.eventType) {\n case EventType.FlowStart:\n this.updateState({\n flowStarted: true,\n status: \"processing\",\n });\n break;\n\n case EventType.NodeStart:\n this.updateState({\n status: \"processing\",\n currentNodeName: event.nodeName,\n currentNodeType: event.nodeType,\n });\n break;\n\n case EventType.NodePause:\n // When input node pauses, it's waiting for upload - switch to uploading state\n this.updateState({\n status: \"uploading\",\n currentNodeName: event.nodeName,\n // NodePause doesn't have nodeType, keep previous value\n });\n break;\n\n case EventType.NodeResume:\n // When node resumes, upload is complete - switch to processing state\n this.updateState({\n status: \"processing\",\n currentNodeName: event.nodeName,\n currentNodeType: event.nodeType,\n });\n break;\n\n case EventType.NodeEnd:\n this.updateState({\n status:\n this.state.status === \"uploading\"\n ? \"processing\"\n : this.state.status,\n currentNodeName: null,\n currentNodeType: null,\n });\n break;\n\n case EventType.FlowEnd: {\n // Get typed outputs from the event\n const flowOutputs = event.outputs || null;\n\n // Call onFlowComplete with full typed outputs\n if (flowOutputs && this.callbacks.onFlowComplete) {\n this.callbacks.onFlowComplete(flowOutputs);\n }\n\n // Call onSuccess with full typed outputs\n if (flowOutputs && flowOutputs.length > 0 && this.callbacks.onSuccess) {\n this.callbacks.onSuccess(flowOutputs);\n }\n\n this.updateState({\n status: \"success\",\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs,\n });\n\n this.abortController = null;\n break;\n }\n\n case EventType.FlowError: {\n const error = new Error(event.error);\n this.updateState({\n status: \"error\",\n error,\n });\n this.callbacks.onError?.(error);\n this.abortController = null;\n break;\n }\n\n case EventType.NodeError: {\n const error = new Error(event.error);\n this.updateState({\n status: \"error\",\n error,\n });\n this.callbacks.onError?.(error);\n this.abortController = null;\n break;\n }\n\n case EventType.FlowCancel:\n this.updateState({\n status: \"aborted\",\n });\n this.callbacks.onAbort?.();\n this.abortController = null;\n break;\n }\n }\n\n /**\n * Handle upload progress events from the event subscription\n * This method should be called by the framework wrapper when it receives upload progress events\n *\n * @param uploadId - The unique identifier for this upload\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n handleUploadProgress(\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ): void {\n // Calculate progress percentage\n const progress =\n totalBytes && totalBytes > 0\n ? Math.round((bytesUploaded / totalBytes) * 100)\n : 0;\n\n this.updateState({\n bytesUploaded,\n totalBytes,\n progress,\n });\n\n // Also update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(this.currentSingleInputNodeId);\n if (inputState) {\n inputState.status = \"uploading\";\n inputState.progress = progress;\n inputState.bytesUploaded = bytesUploaded;\n inputState.totalBytes = totalBytes;\n }\n }\n\n this.callbacks.onProgress?.(uploadId, bytesUploaded, totalBytes);\n }\n\n /**\n * Execute a flow with the provided input data.\n *\n * The input type and execution behavior depends on the generic TInput type:\n * - File/Blob: Initiates chunked upload with progress tracking\n * - string (URL): Directly passes URL to flow for fetching\n * - object: Passes structured data to flow input nodes\n *\n * @param input - Input data for the flow execution (type determined by TInput generic)\n *\n * @example\n * ```typescript\n * // File upload\n * await manager.upload(fileObject);\n *\n * // URL fetch\n * await manager.upload(\"https://example.com/image.jpg\");\n * ```\n */\n async upload(input: TInput): Promise<void> {\n // Determine totalBytes from input if possible (File/Blob on browser platforms)\n // For non-file inputs (URLs, structured data), totalBytes remains null\n let totalBytes: number | null = null;\n if (input && typeof input === \"object\") {\n if (\"size\" in input && typeof input.size === \"number\") {\n totalBytes = input.size;\n }\n }\n\n // Reset state but keep reference for potential retries\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n });\n\n try {\n // Build internal upload options with our callbacks\n const internalOptions: InternalFlowUploadOptions = {\n onJobStart: (jobId: string) => {\n this.updateState({\n jobId,\n });\n this.options?.onJobStart?.(jobId);\n },\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => {\n this.handleUploadProgress(uploadId, bytesUploaded, totalBytes);\n this.options?.onProgress?.(uploadId, bytesUploaded, totalBytes);\n },\n onChunkComplete: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => {\n this.callbacks.onChunkComplete?.(\n chunkSize,\n bytesAccepted,\n bytesTotal,\n );\n this.options?.onChunkComplete?.(chunkSize, bytesAccepted, bytesTotal);\n },\n onSuccess: (_result: UploadFile) => {\n // Note: This gets called when upload phase completes, not flow completion\n // Flow completion is handled by FlowEnd event\n this.updateState({\n progress: 100,\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"complete\";\n inputState.progress = 100;\n }\n this.currentSingleInputNodeId = null;\n }\n // Don't call callbacks.onSuccess here - wait for FlowEnd event with TOutput\n },\n onError: (error: Error) => {\n this.updateState({\n status: \"error\",\n error,\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = error;\n }\n this.currentSingleInputNodeId = null;\n }\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({\n status: \"aborted\",\n });\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(\n this.currentSingleInputNodeId,\n );\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = new Error(\"Upload aborted\");\n }\n this.currentSingleInputNodeId = null;\n }\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n };\n\n // Start the flow upload\n this.abortController = await this.flowUploadFn(\n input,\n this.options.flowConfig,\n internalOptions,\n );\n } catch (error) {\n // Handle errors from upload initiation\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n\n // Update inputStates for single-input flows executed via executeFlow()\n if (this.currentSingleInputNodeId) {\n const inputState = this.inputStates.get(this.currentSingleInputNodeId);\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = uploadError;\n }\n this.currentSingleInputNodeId = null;\n }\n\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Abort the current flow upload\n */\n abort(): void {\n if (this.abortController) {\n this.abortController.abort();\n // Note: State update happens in onAbort callback or FlowCancel event\n }\n }\n\n /**\n * Pause the current flow upload\n */\n pause(): void {\n if (this.abortController) {\n this.abortController.pause();\n }\n }\n\n /**\n * Reset the flow upload state to idle\n */\n reset(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n // Abort all input-specific controllers\n for (const inputState of this.inputStates.values()) {\n if (inputState.abortController) {\n inputState.abortController.abort();\n }\n }\n this.inputStates.clear();\n this.currentSingleInputNodeId = null;\n\n this.state = { ...initialState };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Aggregate progress across multiple inputs.\n * Uses simple average for Phase 1 (size-weighted can be added in Phase 2).\n */\n private aggregateProgress(): void {\n if (this.inputStates.size === 0) {\n return;\n }\n\n const states = Array.from(this.inputStates.values());\n\n // Calculate average progress across all inputs\n const totalProgress = states.reduce(\n (sum, state) => sum + state.progress,\n 0,\n );\n const avgProgress = Math.round(totalProgress / states.length);\n\n // Calculate total bytes (sum of all inputs)\n const totalBytes = states.reduce(\n (sum, state) => sum + (state.totalBytes || 0),\n 0,\n );\n const bytesUploaded = states.reduce(\n (sum, state) => sum + state.bytesUploaded,\n 0,\n );\n\n this.updateState({\n progress: avgProgress,\n bytesUploaded,\n totalBytes: totalBytes > 0 ? totalBytes : null,\n });\n }\n\n /**\n * Execute a flow with multiple inputs (generic execution path).\n *\n * This method:\n * 1. Builds FlowInputs with auto-detection\n * 2. Validates inputs (optional, to be added in integration)\n * 3. Executes flow with the inputs\n * 4. Tracks multi-input state\n *\n * @param inputs - Map of nodeId to raw input data\n *\n * @example\n * ```typescript\n * await manager.executeFlow({\n * \"file-input\": myFile,\n * \"url-input\": \"https://example.com/image.jpg\"\n * });\n * ```\n */\n async executeFlow(inputs: Record<string, unknown>): Promise<void> {\n const inputEntries = Object.entries(inputs);\n\n if (inputEntries.length === 0) {\n throw new Error(\"No inputs provided to executeFlow\");\n }\n\n // Initialize input states for tracking\n this.inputStates.clear();\n for (const [nodeId, data] of Object.entries(inputs)) {\n const inputType = detectInputType(data);\n this.inputStates.set(nodeId, {\n nodeId,\n type: inputType,\n status: \"pending\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes:\n inputType === \"file\" &&\n data &&\n typeof data === \"object\" &&\n \"size\" in data &&\n typeof data.size === \"number\"\n ? data.size\n : null,\n error: null,\n abortController: null,\n });\n }\n\n // For single input, use the standard upload path\n if (inputEntries.length === 1) {\n const firstEntry = inputEntries[0];\n if (!firstEntry) {\n throw new Error(\"No inputs provided to executeFlow\");\n }\n const [nodeId, firstData] = firstEntry;\n // Track nodeId so upload() callbacks can update inputStates\n this.currentSingleInputNodeId = nodeId;\n await this.upload(firstData as TInput);\n return;\n }\n\n // For multiple inputs, use the multi-input upload function\n if (!this.multiInputUploadFn) {\n throw new Error(\n \"Multi-input flows require multiInputUploadFn to be provided in FlowManager constructor. \" +\n \"Platform packages should implement MultiInputFlowUploadFunction.\",\n );\n }\n\n // Reset state for multi-input flow\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n jobId: null,\n flowStarted: false,\n currentNodeName: null,\n currentNodeType: null,\n flowOutputs: null,\n });\n\n try {\n // Build internal options with callbacks\n const internalOptions: InternalFlowUploadOptions = {\n onJobStart: (jobId: string) => {\n this.updateState({ jobId });\n this.options?.onJobStart?.(jobId);\n },\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => {\n // Global progress tracking (will be overridden by aggregateProgress)\n this.options?.onProgress?.(uploadId, bytesUploaded, totalBytes);\n },\n onSuccess: (_result: UploadFile) => {\n // Flow completion is handled by FlowEnd event\n this.updateState({ progress: 100 });\n },\n onError: (error: Error) => {\n this.updateState({ status: \"error\", error });\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({ status: \"aborted\" });\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n };\n\n // Multi-input callbacks for per-input tracking\n const multiInputCallbacks: MultiInputCallbacks = {\n onInputProgress: (nodeId, progress, bytesUploaded, totalBytes) => {\n // Update input state\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"uploading\";\n inputState.progress = progress;\n inputState.bytesUploaded = bytesUploaded;\n inputState.totalBytes = totalBytes;\n }\n\n // Aggregate progress across all inputs\n this.aggregateProgress();\n },\n onInputComplete: (nodeId) => {\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"complete\";\n inputState.progress = 100;\n }\n this.aggregateProgress();\n },\n onInputError: (nodeId, error) => {\n const inputState = this.inputStates.get(nodeId);\n if (inputState) {\n inputState.status = \"error\";\n inputState.error = error;\n }\n },\n };\n\n // Execute multi-input flow\n this.abortController = await this.multiInputUploadFn(\n inputs,\n this.options.flowConfig,\n internalOptions,\n multiInputCallbacks,\n );\n } catch (error) {\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Get the input execution states (for multi-input flows).\n * @returns Map of nodeId to input state\n */\n getInputStates(): ReadonlyMap<string, InputExecutionState> {\n return this.inputStates;\n }\n\n /**\n * Clean up resources (call when disposing the manager)\n */\n cleanup(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n // Cleanup input-specific controllers\n for (const inputState of this.inputStates.values()) {\n if (inputState.abortController) {\n inputState.abortController.abort();\n }\n }\n this.inputStates.clear();\n this.currentSingleInputNodeId = null;\n }\n}\n","import type { UploadFile } from \"@uploadista/core/types\";\nimport type { UploadOptions } from \"../types/upload-options\";\n\n/**\n * Upload status representing the current state of an upload\n */\nexport type UploadStatus =\n | \"idle\"\n | \"uploading\"\n | \"success\"\n | \"error\"\n | \"aborted\";\n\n/**\n * Complete upload state\n */\nexport interface UploadState {\n /** Current status of the upload */\n status: UploadStatus;\n /** Upload progress percentage (0-100) */\n progress: number;\n /** Number of bytes uploaded */\n bytesUploaded: number;\n /** Total bytes to upload, null if unknown/deferred */\n totalBytes: number | null;\n /** Error if upload failed */\n error: Error | null;\n /** Result if upload succeeded */\n result: UploadFile | null;\n}\n\n/**\n * Callbacks that UploadManager invokes during the upload lifecycle\n */\nexport interface UploadManagerCallbacks {\n /**\n * Called when the upload state changes\n */\n onStateChange: (state: UploadState) => void;\n\n /**\n * Called when upload progress updates\n * @param uploadId - The unique identifier for this upload\n * @param bytesUploaded - Number of bytes uploaded\n * @param totalBytes - Total bytes to upload, null if unknown\n */\n onProgress?: (\n uploadId: string,\n bytesUploaded: number,\n totalBytes: number | null,\n ) => void;\n\n /**\n * Called when a chunk completes\n * @param chunkSize - Size of the completed chunk\n * @param bytesAccepted - Total bytes accepted so far\n * @param bytesTotal - Total bytes to upload, null if unknown\n */\n onChunkComplete?: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => void;\n\n /**\n * Called when upload completes successfully\n * @param result - The uploaded file result\n */\n onSuccess?: (result: UploadFile) => void;\n\n /**\n * Called when upload fails with an error\n * @param error - The error that occurred\n */\n onError?: (error: Error) => void;\n\n /**\n * Called when upload is aborted\n */\n onAbort?: () => void;\n}\n\n/**\n * Generic upload input type - can be any value that the upload client accepts\n */\nexport type UploadInput = unknown;\n\n/**\n * Abort controller interface for canceling uploads\n */\nexport interface UploadAbortController {\n abort: () => void;\n}\n\n/**\n * Upload function that performs the actual upload.\n * Returns a promise that resolves to an abort controller.\n */\nexport type UploadFunction<\n TInput = UploadInput,\n TOptions extends UploadOptions = UploadOptions,\n> = (input: TInput, options: TOptions) => Promise<UploadAbortController>;\n\n/**\n * Initial state for a new upload\n */\nconst initialState: UploadState = {\n status: \"idle\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes: null,\n error: null,\n result: null,\n};\n\n/**\n * Platform-agnostic upload manager that handles upload state machine,\n * progress tracking, error handling, abort, reset, and retry logic.\n *\n * Framework packages (React, Vue, React Native) should wrap this manager\n * with framework-specific hooks/composables.\n *\n * @example\n * ```typescript\n * const uploadFn = (input, options) => client.upload(input, options);\n * const manager = new UploadManager(uploadFn, {\n * onStateChange: (state) => setState(state),\n * onProgress: (progress) => console.log(`${progress}%`),\n * onSuccess: (result) => console.log('Upload complete:', result),\n * onError: (error) => console.error('Upload failed:', error),\n * });\n *\n * await manager.upload(file);\n * ```\n */\nexport class UploadManager<\n TInput = UploadInput,\n TOptions extends UploadOptions = UploadOptions,\n> {\n private state: UploadState;\n private abortController: UploadAbortController | null = null;\n private lastInput: TInput | null = null;\n private uploadId: string | null = null;\n\n /**\n * Create a new UploadManager\n *\n * @param uploadFn - Upload function to use for uploads\n * @param callbacks - Callbacks to invoke during upload lifecycle\n * @param options - Upload configuration options\n */\n constructor(\n private readonly uploadFn: UploadFunction<TInput, TOptions>,\n private readonly callbacks: UploadManagerCallbacks,\n private readonly options?: TOptions,\n ) {\n this.state = { ...initialState };\n }\n\n /**\n * Get the current upload state\n */\n getState(): UploadState {\n return { ...this.state };\n }\n\n /**\n * Check if an upload is currently active\n */\n isUploading(): boolean {\n return this.state.status === \"uploading\";\n }\n\n /**\n * Check if the upload can be retried\n */\n canRetry(): boolean {\n return (\n (this.state.status === \"error\" || this.state.status === \"aborted\") &&\n this.lastInput !== null\n );\n }\n\n /**\n * Update the internal state and notify callbacks\n */\n private updateState(update: Partial<UploadState>): void {\n this.state = { ...this.state, ...update };\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Start uploading a file or input\n *\n * @param input - File or input to upload (type depends on platform)\n */\n async upload(input: TInput): Promise<void> {\n // Determine totalBytes from input if possible (File/Blob on browser platforms)\n let totalBytes: number | null = null;\n if (input && typeof input === \"object\") {\n if (\"size\" in input && typeof input.size === \"number\") {\n totalBytes = input.size;\n }\n }\n\n // Reset state but keep reference for retries\n this.updateState({\n status: \"uploading\",\n progress: 0,\n bytesUploaded: 0,\n totalBytes,\n error: null,\n result: null,\n });\n\n this.lastInput = input;\n\n try {\n // Build complete options with our callbacks\n const uploadOptions = {\n ...this.options,\n onProgress: (\n uploadId: string,\n bytesUploaded: number,\n bytes: number | null,\n ) => {\n // Store uploadId on first progress callback\n if (!this.uploadId) {\n this.uploadId = uploadId;\n }\n\n const progressPercent = bytes\n ? Math.round((bytesUploaded / bytes) * 100)\n : 0;\n\n this.updateState({\n progress: progressPercent,\n bytesUploaded,\n totalBytes: bytes,\n });\n\n this.callbacks.onProgress?.(uploadId, bytesUploaded, bytes);\n this.options?.onProgress?.(uploadId, bytesUploaded, bytes);\n },\n onChunkComplete: (\n chunkSize: number,\n bytesAccepted: number,\n bytesTotal: number | null,\n ) => {\n this.callbacks.onChunkComplete?.(\n chunkSize,\n bytesAccepted,\n bytesTotal,\n );\n this.options?.onChunkComplete?.(chunkSize, bytesAccepted, bytesTotal);\n },\n onSuccess: (result: UploadFile) => {\n this.updateState({\n status: \"success\",\n result,\n progress: 100,\n bytesUploaded: result.size || 0,\n totalBytes: result.size || null,\n });\n\n this.callbacks.onSuccess?.(result);\n this.options?.onSuccess?.(result);\n this.abortController = null;\n },\n onError: (error: Error) => {\n this.updateState({\n status: \"error\",\n error,\n });\n\n this.callbacks.onError?.(error);\n this.options?.onError?.(error);\n this.abortController = null;\n },\n onAbort: () => {\n this.updateState({\n status: \"aborted\",\n });\n\n this.callbacks.onAbort?.();\n this.options?.onAbort?.();\n this.abortController = null;\n },\n onShouldRetry: this.options?.onShouldRetry,\n } as TOptions;\n\n // Start the upload\n this.abortController = await this.uploadFn(input, uploadOptions);\n } catch (error) {\n // Handle errors from upload initiation\n const uploadError =\n error instanceof Error ? error : new Error(String(error));\n this.updateState({\n status: \"error\",\n error: uploadError,\n });\n\n this.callbacks.onError?.(uploadError);\n this.options?.onError?.(uploadError);\n this.abortController = null;\n }\n }\n\n /**\n * Abort the current upload\n */\n abort(): void {\n if (this.abortController) {\n this.abortController.abort();\n // Note: State update happens in onAbort callback\n }\n }\n\n /**\n * Reset the upload state to idle\n */\n reset(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n\n this.state = { ...initialState };\n this.lastInput = null;\n this.uploadId = null;\n this.callbacks.onStateChange(this.state);\n }\n\n /**\n * Retry the last failed or aborted upload\n */\n retry(): void {\n if (this.canRetry() && this.lastInput !== null) {\n this.upload(this.lastInput);\n }\n }\n\n /**\n * Clean up resources (call when disposing the manager)\n */\n cleanup(): void {\n if (this.abortController) {\n this.abortController.abort();\n this.abortController = null;\n }\n this.uploadId = null;\n }\n}\n","import z from \"zod\";\n\nexport type PreviousUpload = {\n size: number | null;\n metadata: { [key: string]: string | number | boolean };\n creationTime: string;\n uploadId?: string;\n parallelUploadUrls?: string[];\n clientStorageKey: string;\n};\n\nexport const previousUploadSchema = z.object({\n size: z.number().nullable(),\n metadata: z.record(\n z.string(),\n z.union([z.string(), z.number(), z.boolean()]),\n ),\n creationTime: z.string(),\n uploadId: z.string().optional(),\n parallelUploadUrls: z.array(z.string()).optional(),\n clientStorageKey: z.string(),\n});\n","import type { IdGenerationService } from \"../services/id-generation-service\";\nimport type { StorageService } from \"../services/storage-service\";\nimport {\n type PreviousUpload,\n previousUploadSchema,\n} from \"../types/previous-upload\";\n\n/**\n * Client-side storage interface for managing upload resumption data.\n *\n * Provides methods to store, retrieve, and manage previous upload information,\n * enabling the client to resume interrupted uploads from where they left off.\n * This is essential for implementing reliable upload resumption across sessions.\n *\n * Storage keys are namespaced with \"uploadista::\" prefix and organized by\n * file fingerprint to allow quick lookup of resumable uploads.\n *\n * @example Finding resumable uploads\n * ```typescript\n * const storage = createClientStorage(localStorage);\n *\n * // Find all previous uploads\n * const allUploads = await storage.findAllUploads();\n *\n * // Find uploads for a specific file\n * const fingerprint = await computeFingerprint(file);\n * const matches = await storage.findUploadsByFingerprint(fingerprint);\n *\n * if (matches.length > 0) {\n * // Resume from the most recent upload\n * const uploadId = matches[0].uploadId;\n * await resumeUpload(uploadId);\n * }\n * ```\n */\nexport type ClientStorage = {\n /**\n * Retrieves all stored upload records from client storage.\n *\n * Useful for debugging or displaying a list of resumable uploads to the user.\n *\n * @returns Array of all previous upload records\n */\n findAllUploads: () => Promise<PreviousUpload[]>;\n\n /**\n * Finds previous upload records matching a specific file fingerprint.\n *\n * This is the primary method for discovering resumable uploads.\n * Returns uploads sorted by most recent first.\n *\n * @param fingerprint - The file fingerprint to search for\n * @returns Array of matching upload records, or empty array if none found\n *\n * @example\n * ```typescript\n * const fingerprint = await computeFingerprint(file);\n * const previous = await storage.findUploadsByFingerprint(fingerprint);\n *\n * if (previous.length > 0) {\n * console.log(`Found ${previous.length} resumable uploads`);\n * console.log(`Last upload was ${previous[0].offset} bytes`);\n * }\n * ```\n */\n findUploadsByFingerprint: (fingerprint: string) => Promise<PreviousUpload[]>;\n\n /**\n * Removes an upload record from client storage.\n *\n * Called after an upload completes successfully or is explicitly cancelled\n * to clean up storage and prevent stale resumption attempts.\n *\n * @param clientStorageKey - The storage key returned by addUpload\n *\n * @example Cleanup after successful upload\n * ```typescript\n * await uploadFile(file);\n * await storage.removeUpload(storageKey);\n * ```\n */\n removeUpload: (clientStorageKey: string) => Promise<void>;\n\n /**\n * Stores an upload record in client storage for future resumption.\n *\n * Creates a namespaced storage key that includes the file fingerprint,\n * making it easy to find resumable uploads later.\n *\n * @param fingerprint - File fingerprint for organizing uploads\n * @param upload - Upload metadata to store (uploadId, offset, etc.)\n * @param options - Options object containing ID generation service\n * @returns The storage key that can be used to remove this upload later, or undefined if storage failed\n *\n * @example Storing upload progress\n * ```typescript\n * const fingerprint = await computeFingerprint(file);\n * const key = await storage.addUpload(\n * fingerprint,\n * { uploadId: 'abc123', offset: 1024000 },\n * { generateId: idService }\n * );\n *\n * // Later, remove when complete\n * if (key) await storage.removeUpload(key);\n * ```\n */\n addUpload: (\n fingerprint: string,\n upload: PreviousUpload,\n { generateId }: { generateId: IdGenerationService },\n ) => Promise<string | undefined>;\n};\n\n/**\n * Creates a ClientStorage instance using the provided storage service.\n *\n * This factory function wraps a platform-specific StorageService (e.g., localStorage,\n * AsyncStorage) with the ClientStorage interface, providing a consistent API\n * for upload resumption across different platforms.\n *\n * @param storageService - Platform-specific storage implementation\n * @returns ClientStorage instance for managing upload records\n *\n * @example Browser with localStorage\n * ```typescript\n * const storage = createClientStorage({\n * find: async (prefix) => {\n * const items: Record<string, string> = {};\n * for (let i = 0; i < localStorage.length; i++) {\n * const key = localStorage.key(i);\n * if (key?.startsWith(prefix)) {\n * items[key] = localStorage.getItem(key) || '';\n * }\n * }\n * return items;\n * },\n * setItem: async (key, value) => localStorage.setItem(key, value),\n * removeItem: async (key) => localStorage.removeItem(key),\n * });\n * ```\n *\n * @example React Native with AsyncStorage\n * ```typescript\n * const storage = createClientStorage({\n * find: async (prefix) => {\n * const keys = await AsyncStorage.getAllKeys();\n * const matching = keys.filter(k => k.startsWith(prefix));\n * const pairs = await AsyncStorage.multiGet(matching);\n * return Object.fromEntries(pairs);\n * },\n * setItem: async (key, value) => AsyncStorage.setItem(key, value),\n * removeItem: async (key) => AsyncStorage.removeItem(key),\n * });\n * ```\n */\nexport function createClientStorage(\n storageService: StorageService,\n): ClientStorage {\n return {\n findAllUploads: async () => {\n const items = await storageService.find(\"uploadista::\");\n return Object.values(items).map((item) =>\n previousUploadSchema.parse(JSON.parse(item)),\n );\n },\n findUploadsByFingerprint: async (fingerprint: string) => {\n const items = await storageService.find(`uploadista::${fingerprint}`);\n return Object.values(items).map((item) =>\n previousUploadSchema.parse(JSON.parse(item)),\n );\n },\n removeUpload: (clientStorageKey: string) =>\n storageService.removeItem(clientStorageKey),\n addUpload: async (\n fingerprint: string,\n upload: PreviousUpload,\n { generateId }: { generateId: IdGenerationService },\n ) => {\n const key = generateId.generate();\n const clientStorageKey = `uploadista::${fingerprint}::${key}`;\n await storageService.setItem(clientStorageKey, JSON.stringify(upload));\n return clientStorageKey;\n },\n };\n}\n","import type { StorageService } from \"../services/storage-service\";\n\n/**\n * In-memory fallback storage service for Expo\n * Used when AsyncStorage is not available or for testing\n */\nexport function createInMemoryStorageService(): StorageService {\n const storage = new Map<string, string>();\n\n return {\n async getItem(key: string): Promise<string | null> {\n return storage.get(key) ?? null;\n },\n\n async setItem(key: string, value: string): Promise<void> {\n storage.set(key, value);\n },\n\n async removeItem(key: string): Promise<void> {\n storage.delete(key);\n },\n\n async findAll(): Promise<Record<string, string>> {\n return Object.fromEntries(storage.entries());\n },\n\n async find(prefix: string): Promise<Record<string, string>> {\n return Object.fromEntries(\n Array.from(storage.entries()).filter(([key]) => key.startsWith(prefix)),\n );\n },\n };\n}\n"],"mappings":"2OAoEA,IAAa,EAAb,KAAyB,CAWvB,YAAY,EAA2B,aAVR,EAAE,kBACX,mBAEA,EAQpB,KAAK,OAAS,CACZ,aAAc,EAAO,aACrB,cAAe,EAAO,eAAiB,EAAO,aAAe,EAC7D,UAAW,EAAO,WAAa,IAChC,CA2BH,IAAI,EAAyC,CAS3C,OARA,KAAK,OAAO,KAAK,EAAM,CACvB,KAAK,aAAe,EAAM,OAC1B,KAAK,YAAc,KAAK,KAAK,CAEzB,KAAK,aAAa,CACb,KAAK,OAAO,CAGd,KA0BT,OAA8B,CAC5B,GAAI,KAAK,OAAO,SAAW,EACzB,OAAO,KAGT,IAAM,EAAW,IAAI,WAAW,KAAK,YAAY,CAC7C,EAAS,EAEb,IAAK,IAAM,KAAS,KAAK,OACvB,EAAS,IAAI,EAAO,EAAO,CAC3B,GAAU,EAAM,OAGlB,IAAMA,EAAwB,CAC5B,KAAM,EACN,KAAM,KAAK,YACX,UAAW,KAAK,YACjB,CAGD,OADA,KAAK,OAAO,CACL,EAyBT,aAAuB,CAKrB,GAJI,KAAK,aAAe,KAAK,OAAO,cAIhC,KAAK,aAAe,KAAK,OAAO,cAClC,MAAO,GAGT,IAAM,EAAmB,KAAK,KAAK,CAAG,KAAK,YAK3C,OAJI,KAAK,OAAO,OAAS,GAAK,EAAmB,KAAK,OAAO,UA0B/D,eAKE,CACA,MAAO,CACL,KAAM,KAAK,YACX,WAAY,KAAK,OAAO,OACxB,eAAgB,KAAK,aAAa,CAClC,iBAAkB,KAAK,KAAK,CAAG,KAAK,YACrC,CAwBH,gBAA0B,CACxB,OAAO,KAAK,OAAO,OAAS,EAS9B,OAAc,CACZ,KAAK,OAAS,EAAE,CAChB,KAAK,YAAc,EACnB,KAAK,YAAc,EAQrB,iBAA0B,CACxB,OAAO,KAAK,OAAO,eClQV,EAAb,KAAkD,CAChD,YACE,EACA,EACA,CAFQ,KAAA,WAAA,EACA,KAAA,YAAA,EAOV,MAAM,QACJ,EACA,EAA8B,EAAE,CACT,CACvB,GAAI,CAEF,IAAM,EAAuB,MAAM,KAAK,sBACtC,EAAQ,SAAW,EAAE,CACrB,EACD,CAGD,OAAO,MAAM,KAAK,WAAW,QAAQ,EAAK,CACxC,GAAG,EACH,QAAS,EAET,YACE,KAAK,YAAY,SAAS,GAAK,WAC/B,KAAK,YAAY,SAAS,GAAK,mBAC3B,OACC,EAAQ,aAAe,UAC/B,CAAC,OACK,EAAO,CAOd,MALI,aAAiB,OAAS,EAAM,QAAQ,SAAS,OAAO,CACpD,GAYZ,MAAc,sBACZ,EACA,EACiC,CAEjC,GAAI,sBAAuB,KAAK,YAE9B,OAAO,MAAM,KAAK,YAAY,kBAAkB,EAAQ,CAG1D,GAAI,gBAAiB,KAAK,YAAa,CAErC,IAAM,EAAQ,KAAK,oBAAoB,EAAI,CAC3C,OAAO,MAAM,KAAK,YAAY,YAAY,EAAS,EAAM,CAI3D,OAAO,EAOT,oBAA4B,EAAiC,CAO3D,IAAM,EAAc,EAAI,MAAM,0BAA0B,CACxD,GAAI,EACF,OAAO,EAAY,GAGrB,IAAM,EAAY,EAAI,MAAM,wBAAwB,CACpD,GAAI,EACF,OAAO,EAAU,GAGnB,IAAM,EAAW,EAAI,MAAM,wBAAwB,CACnD,GAAI,EACF,OAAO,EAAS,GAUpB,YAAa,CACX,OAAO,KAAK,WAAW,YAAY,CAGrC,oBAAqB,CACnB,OAAO,KAAK,WAAW,oBAAoB,CAG7C,OAAQ,CACN,KAAK,WAAW,OAAO,CAGzB,MAAM,OAAQ,CACZ,MAAM,KAAK,WAAW,OAAO,CAG/B,MAAM,kBAAkB,EAAgB,CACtC,MAAM,KAAK,WAAW,kBAAkB,EAAK,CAM/C,gBAA8B,CAC5B,OAAO,KAAK,cCtJH,EAAb,KAA6B,CAC3B,YAAY,EAAyD,CAAjD,KAAA,KAAA,EAEpB,SAAU,CACR,OAAO,KAAK,OCSH,EAAb,cAAuC,CAAgB,CACrD,YACE,EACA,EACA,EACA,CACA,MAAM,SAAS,CAJP,KAAA,OAAA,EACA,KAAA,gBAAA,EACA,KAAA,OAAA,EAaV,MAAM,kBACJ,EAAkC,EAAE,CACH,CACjC,GAAI,CACF,GAAI,CAAC,KAAK,OAAO,eACf,OAAO,EAIT,IAAM,EAAc,MAAM,QAAQ,QAAQ,KAAK,OAAO,gBAAgB,CAAC,CAGvE,GAAI,CAAC,GAAe,OAAO,GAAgB,SACzC,MAAU,MACR,qEACD,CAIH,IAAM,EAAiB,CAAE,GAAG,EAAS,CAcrC,OAZI,EAAY,UACd,KAAK,gBAAgB,EAAY,QAAQ,CACzC,OAAO,OAAO,EAAgB,EAAY,QAAQ,EAMhD,EAAY,SACd,KAAK,cAAc,EAAgB,EAAY,QAAQ,CAGlD,QACA,EAAO,CAEd,IAAM,EAAU,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACtE,MAAU,MAAM,sCAAsC,IAAU,EAOpE,gBAAwB,EAAuC,CAC7D,GAAI,OAAO,GAAY,WAAY,EACjC,MAAU,MAAM,4BAA4B,CAG9C,IAAK,GAAM,CAAC,EAAK,KAAU,OAAO,QAAQ,EAAQ,CAChD,GAAI,OAAO,GAAQ,UAAY,OAAO,GAAU,SAC9C,MAAU,MACR,sDAAsD,EAAI,IAAI,OAAO,EAAM,GAC5E,CAUP,cACE,EACA,EACM,CAIN,GAFkB,KAAK,gBAAgB,WAAW,CAOhD,KAAK,OAAO,KACV,2IAED,KACI,CAEL,IAAM,EAAe,OAAO,QAAQ,EAAQ,CACzC,KAAK,CAAC,EAAK,KAAW,GAAG,EAAI,GAAG,IAAQ,CACxC,KAAK,KAAK,CAET,IACF,EAAQ,OAAS,MC5GZ,EAAb,cAAmC,CAAgB,CACjD,aAAc,CACZ,MAAM,UAAU,CASlB,MAAM,kBACJ,EAAkC,EAAE,CACH,CACjC,OAAO,EAMT,WAAW,EAAsB,EAOjC,gBAAuB,ICFZ,EAAb,cAAgD,CAAgB,CAO9D,YACE,EACA,EACA,CACA,MAAM,mBAAmB,CAHjB,KAAA,OAAA,EACA,KAAA,WAAA,kBAPW,IAAI,qBAGiB,KAe1C,MAAM,YAAqC,CACzC,GAAI,CAEF,IAAM,EAAW,MAAM,KAAK,WAAW,QACrC,GAAG,KAAK,OAAO,cAAc,GAAG,KAAK,OAAO,WAC5C,CACE,OAAQ,MACR,QAAS,CACP,eAAgB,mBACjB,CACF,CACF,CAGD,GAAI,CAAC,EAAS,GAAI,CAChB,IAAM,EAAY,MAAM,EAAS,MAAM,CACnC,EAAe,wBAAwB,EAAS,SAEpD,GAAI,CACF,IAAM,EAAY,KAAK,MAAM,EAAU,CACvC,EAAe,EAAU,OAAS,EAAU,SAAW,OACjD,CAEN,EAAe,GAAa,EAAS,YAAc,EAGrD,MAAU,MAAM,EAAa,CAI/B,IAAM,EAAQ,MAAM,EAAS,MAAM,CAEnC,GAAI,CAAC,EAAK,OAAS,OAAO,EAAK,OAAU,SACvC,MAAU,MACR,sEACD,CAGH,OAAO,QACA,EAAO,CAKd,MAHI,aAAiB,MACT,MAAM,+BAA+B,EAAM,UAAU,CAEvD,MAAM,+BAA+B,OAAO,EAAM,GAAG,EAUnE,MAAc,gBAAgB,EAAiC,CAE7D,GAAI,EAAO,CACT,IAAM,EAAS,KAAK,WAAW,IAAI,EAAM,CACzC,GAAI,GAAU,CAAC,KAAK,eAAe,EAAO,CACxC,OAAO,EAAO,MAKlB,GAAI,CAAC,GAAS,KAAK,aAAe,CAAC,KAAK,eAAe,KAAK,YAAY,CACtE,OAAO,KAAK,YAAY,MAI1B,IAAM,EAAgB,MAAM,KAAK,YAAY,CAGvC,EAAY,EAAc,UAC5B,KAAK,KAAK,CAAG,EAAc,UAAY,IACvC,IAAA,GAEES,EAA2B,CAC/B,MAAO,EAAc,MACrB,YACD,CASD,OANI,EACF,KAAK,WAAW,IAAI,EAAO,EAAY,CAEvC,KAAK,YAAc,EAGd,EAAc,MAOvB,eAAuB,EAA8B,CAQnD,OAPK,EAAO,UAOL,KAAK,KAAK,CAAG,EAAO,UADV,GAAK,IAJb,GAgBX,MAAM,YACJ,EAAkC,EAAE,CACpC,EACiC,CACjC,GAAI,CAEF,IAAM,EAAQ,MAAM,KAAK,gBAAgB,EAAM,CAG/C,MAAO,CACL,GAAG,EACH,cAAe,UAAU,IAC1B,OACM,EAAO,CACd,IAAM,EAAU,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACtE,MAAU,MAAM,gCAAgC,IAAU,EAU9D,WAAW,EAAqB,CAC9B,KAAK,WAAW,OAAO,EAAM,CAO/B,gBAAuB,CACrB,KAAK,WAAW,OAAO,CACvB,KAAK,YAAc,KAMrB,eAGE,CACA,MAAO,CACL,eAAgB,KAAK,WAAW,KAChC,eAAgB,KAAK,cAAgB,KACtC,GCxKL,MAAMC,MAA6B,GAmCnC,SAAgB,EACd,EACA,EAAqB,EACb,CACR,MAAO,CACL,IAAM,GAAoB,CACpB,GACF,EAAM,EAAQ,EAGlB,KAAO,GAAoB,CACrB,GACF,EAAM,EAAQ,EAGlB,MAAQ,GAAoB,CACtB,GACF,EAAM,EAAQ,EAGnB,CC5FH,IAAa,EAAb,KAAiC,CAC/B,YAAY,EAA6C,CAArC,KAAA,aAAA,EAEpB,iBAAyC,CACvC,OAAO,KAAK,aAGd,uBAAuB,EAAmC,CACxD,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,KAAK,aAAa,wBAC3B,IAAK,SACH,MAAO,GACT,QACE,MAAO,MASf,MAAaE,EAAmD,CAC9D,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,qBAAsB,EACtB,aAAc,GAAK,KACnB,aAAc,IAAM,KAAO,KAC3B,SAAU,IACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACxB,CCuGD,IAAa,EAAb,KAA4B,CAU1B,YAAY,EAA+B,EAAE,CAAE,cATb,EAAE,CAUlC,KAAK,OAAS,CACZ,WAAY,EAAO,YAAc,IACjC,gBAAiB,EAAO,iBAAmB,GAC3C,uBAAwB,EAAO,wBAA0B,EACzD,cAAe,EAAO,eAAiB,GAAK,KAC5C,cAAe,EAAO,eAAiB,EAAI,KAAO,KAClD,kBAAmB,EAAO,mBAAqB,GAChD,CAED,KAAK,gBAAkB,KAAK,oBAAoB,CAWlD,UAAU,EAA4B,CACpC,KAAK,QAAQ,KAAK,EAAO,CAGrB,KAAK,QAAQ,OAAS,KAAK,OAAO,aACpC,KAAK,QAAU,KAAK,QAAQ,MAAM,CAAC,KAAK,OAAO,WAAW,EAG5D,KAAK,eAAe,CAiCtB,aACE,EACA,EACA,EACA,EACM,CACN,KAAK,UAAU,CACb,OACA,WACA,UACA,UAAW,KAAK,KAAK,CACrB,UACD,CAAC,CAmBJ,mBAAoC,CAClC,MAAO,CAAE,GAAG,KAAK,gBAAiB,CA+BpC,qBAAwC,CACtC,GAAI,KAAK,QAAQ,OAAS,KAAK,OAAO,uBACpC,MAAO,CAAE,KAAM,UAAW,WAAY,EAAG,CAG3C,IAAM,EAAgB,KAAK,4BAA4B,CACvD,GAAI,EAAc,OAAS,KAAK,OAAO,uBACrC,MAAO,CAAE,KAAM,UAAW,WAAY,GAAK,CAG7C,IAAM,EAAS,EAAc,IAC1B,GAAW,EAAO,MAAQ,EAAO,SAAW,KAC9C,CACK,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OAGnD,EACJ,EAAO,QAAQ,EAAK,IAAU,GAAO,EAAQ,IAAa,EAAG,EAAE,CAC/D,EAAO,OAEH,EADS,KAAK,KAAK,EAAS,CACM,EAGlC,EAAa,KAAK,IACtB,EACA,KAAK,QAAQ,QAAU,KAAK,OAAO,uBAAyB,GAC7D,CAeD,OAbI,EAAyB,KAAK,OAAO,kBAChC,CAAE,KAAM,WAAY,aAAY,CAGrC,EAAW,KAAK,OAAO,cAClB,CAAE,KAAM,OAAQ,aAAY,CAGjC,EAAW,KAAK,OAAO,cAClB,CAAE,KAAM,OAAQ,aAAY,CAI9B,CAAE,KAAM,OAAQ,WAAY,EAAa,GAAK,CAkBvD,sBAA+B,CAC7B,IAAM,EAAgB,KAAK,2BAA2B,GAAG,CACzD,GAAI,EAAc,SAAW,EAAG,MAAO,GAGvC,IAAI,EAAc,EACd,EAAc,EASlB,OAPA,EAAc,SAAS,EAAQ,IAAU,CACvC,IAAM,EAAS,EAAQ,EACjB,EAAa,EAAO,MAAQ,EAAO,SAAW,KACpD,GAAe,EAAa,EAC5B,GAAe,GACf,CAEK,EAAc,EAAI,EAAc,EAAc,EAmBvD,OAAc,CACZ,KAAK,QAAU,EAAE,CACjB,KAAK,gBAAkB,KAAK,oBAAoB,CAGlD,2BAAmC,EAAgC,CACjE,IAAM,EAAa,KAAK,QAAQ,OAAQ,GAAW,EAAO,QAAQ,CAClE,OAAO,EAAQ,EAAW,MAAM,CAAC,EAAM,CAAG,EAG5C,eAA8B,CAC5B,IAAM,EAAoB,KAAK,QAAQ,OAAQ,GAAW,EAAO,QAAQ,CACnE,EAAgB,KAAK,QAAQ,OAC7B,EAAkB,EAAkB,OAE1C,GAAI,IAAkB,EAAG,CACvB,KAAK,gBAAkB,KAAK,oBAAoB,CAChD,OAGF,IAAM,EAAa,EAAkB,QAClC,EAAK,IAAW,EAAM,EAAO,KAC9B,EACD,CACK,EAAY,EAAkB,QACjC,EAAK,IAAW,EAAM,EAAO,SAC9B,EACD,CAEK,EAAe,EAAY,EAAI,GAAc,EAAY,KAAQ,EACjE,EAAc,EAAkB,EAChC,EAAY,EAAI,EAGhB,EAAqB,KAAK,QAAQ,OACrC,GAAW,EAAO,UAAY,IAAA,GAChC,CASD,KAAK,gBAAkB,CACrB,eACA,QATA,EAAmB,OAAS,EACxB,EAAmB,QAChB,EAAK,IAAW,GAAO,EAAO,SAAW,GAC1C,EACD,CAAG,EAAmB,OACvB,EAKJ,cACA,YACA,gBACA,aACA,YACD,CAGH,oBAA6C,CAC3C,MAAO,CACL,aAAc,EACd,QAAS,EACT,YAAa,EACb,UAAW,EACX,cAAe,EACf,WAAY,EACZ,UAAW,EACZ,GCpZL,MAAMI,EAAuD,CAC3D,aAAc,CACZ,KAAM,eACN,aAAc,GAAK,KACnB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CACD,SAAU,CACR,KAAM,WACN,aAAc,IAAM,KACpB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CACD,WAAY,CACV,KAAM,aACN,aAAc,IAAM,KACpB,aAAc,GAAK,KAAO,KAC1B,iBAAkB,KAAO,KACzB,eAAgB,GACjB,CACF,CAEKC,EAA4D,CAChE,aAAc,CACZ,KAAM,kBACN,aAAc,EAAI,KAAO,KACzB,aAAc,GAAK,KAAO,KAC1B,iBAAkB,EAAI,KAAO,KAC7B,eAAgB,GACjB,CACD,SAAU,CACR,KAAM,cACN,aAAc,EAAI,KAAO,KACzB,aAAc,IAAM,KAAO,KAC3B,iBAAkB,GAAK,KAAO,KAC9B,eAAgB,GACjB,CACD,WAAY,CACV,KAAM,gBACN,aAAc,EAAI,KAAO,KACzB,aAAc,IAAM,KAAO,KAC3B,iBAAkB,GAAK,KAAO,KAC9B,eAAgB,GACjB,CACF,CAED,IAAa,EAAb,KAA0B,CAWxB,YAAY,EAAgC,EAA6B,EAAE,CAAE,mBAL5B,8BACnB,4BACC,yBACuB,KAGpD,KAAK,eAAiB,EACtB,KAAK,OAAS,CACZ,QAAS,EAAO,SAAW,GAC3B,kBAAmB,EAAO,mBAAqB,KAAO,KACtD,aAAc,EAAO,cAAgB,GAAK,KAC1C,aAAc,EAAO,cAAgB,GAAK,KAAO,KACjD,iBAAkB,EAAO,kBAAoB,IAAM,KACnD,kBAAmB,EAAO,mBAAqB,IAC/C,eAAgB,EAAO,gBAAkB,GACzC,iBAAkB,EAAO,kBAAoB,GAC7C,uBAAwB,EAAO,wBAA0B,GACzD,qBAAsB,EAAO,qBAC9B,CAED,KAAK,iBAAmB,KAAK,8BAA8B,CAG7D,8BAA+C,CAO7C,OANI,KAAK,OAAO,qBACP,KAAK,IACV,KAAK,OAAO,iBACZ,KAAK,OAAO,qBAAqB,iBAClC,CAEI,KAAK,OAAO,iBAGrB,0BAAkC,EAAsB,CAOtD,OANI,KAAK,OAAO,qBACP,KAAK,IACV,KAAK,OAAO,qBAAqB,aACjC,KAAK,IAAI,KAAK,OAAO,qBAAqB,aAAc,EAAK,CAC9D,CAEI,EAGT,iBAAiB,EAA4C,CAC3D,GAAI,CAAC,KAAK,OAAO,QACf,MAAO,CACL,KAAM,KAAK,OAAO,kBAClB,SAAU,QACV,OAAQ,0BACR,iBAAkB,CAAE,KAAM,UAAW,WAAY,EAAG,CACrD,CAGH,IAAM,EAAmB,KAAK,eAAe,qBAAqB,CAE9D,EAAU,KAAK,iBACf,EAAW,WACX,EAAS,GAGb,GAAI,EAAiB,OAAS,UAC5B,EAAU,KAAK,OAAO,iBACtB,EAAW,UACX,EAAS,gCACJ,CACL,IAAM,EAAmB,KAAK,eAAe,EAAiB,CAC9D,EAAU,KAAK,0BACb,EACA,EACD,CACD,EAAW,EAAiB,KAC5B,EAAS,sBAAsB,EAAiB,KAAK,gBAAgB,KAAK,MAAM,EAAiB,WAAa,IAAI,CAAC,IA0BrH,OAtBI,GAAkB,EAAiB,IACrC,EAAU,EACV,GAAU,iCAAiC,EAAe,IAI5D,EAAU,KAAK,0BAA0B,EAAQ,CAGjD,EAAU,KAAK,IACb,KAAK,OAAO,aACZ,KAAK,IAAI,KAAK,OAAO,aAAc,EAAQ,CAC5C,CAED,KAAK,iBAAmB,EACxB,KAAK,aAAe,CAClB,KAAM,EACN,WACA,SACA,mBACD,CAEM,KAAK,aAGd,kBAAkB,EAAc,EAAkB,EAAwB,CAExE,KAAK,eAAe,aAAa,EAAM,EAAU,EAAQ,CAGrD,GACF,KAAK,uBACL,KAAK,oBAAsB,IAE3B,KAAK,sBACL,KAAK,qBAAuB,GAI9B,KAAK,eAAe,EAAS,EAAU,EAAK,CAG9C,qBAA8B,CAC5B,OAAO,KAAK,iBAGd,iBAA4C,CAC1C,OAAO,KAAK,aAGd,OAAc,CACZ,KAAK,iBAAmB,KAAK,OAAO,iBACpC,KAAK,oBAAsB,EAC3B,KAAK,qBAAuB,EAC5B,KAAK,aAAe,KACpB,KAAK,kBAAoB,KAM3B,wBAAwB,EAAkC,CACxD,KAAK,kBAAoB,EAM3B,8BAKE,CACA,GAAI,CAAC,KAAK,mBAAqB,CAAC,KAAK,OAAO,uBAC1C,MAAO,CACL,YAAa,GACb,UAAW,EACX,wBAAyB,KAAK,OAAO,aACrC,mBAAoB,EACrB,CAGH,IAAM,EAAY,KAAK,kBAAkB,UACnC,EAAoB,KAAK,kBAAkB,sBAG3C,GAAsB,EAAI,GAAa,EACvC,EAA0B,KAAK,IACnC,KAAK,OAAO,aACZ,KAAK,MAAM,EAAqB,IAAM,CACvC,CAED,MAAO,CACL,YAAa,EAAY,GACzB,YACA,0BACA,qBACD,CAGH,eAAuB,EAAsD,CAC3E,IAAMC,EAAqC,CACzC,KAAM,WACN,aAAc,IAAM,KACpB,aAAc,EAAI,KAAO,KACzB,iBAAkB,IAAM,KACxB,eAAgB,GACjB,CAKK,EADJ,KAAK,OAAO,sBAAsB,eAAiB,EAAI,KAAO,KAE5D,EACA,EAEJ,GAAI,KAAK,OAAO,iBACd,OAAO,EAAiB,cAAgB,EAI1C,IAAIC,EAEJ,OAAQ,EAAiB,KAAzB,CACE,IAAK,OACH,EACE,EAAiB,WAAa,GACzB,EAAiB,YAAc,EAC/B,EAAiB,UAAY,EACpC,MACF,IAAK,OACH,EAAe,EAAiB,cAAgB,EAChD,MACF,IAAK,WACH,EAAe,EAAiB,cAAgB,EAChD,MACF,QACE,EAAe,EAAiB,UAAY,EAQhD,OAJI,KAAK,OAAO,wBAA0B,KAAK,kBACtC,KAAK,qCAAqC,EAAa,CAGzD,EAMT,qCACE,EACkB,CAClB,GAAI,CAAC,KAAK,kBAAmB,OAAO,EAEpC,IAAM,EAAW,KAAK,8BAA8B,CAC9C,EAAY,EAAS,UAuB3B,OApBI,EAAY,GACP,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,oBACvB,aAAc,KAAK,IAAI,EAAS,aAAe,GAAK,GAAK,KAAK,CAC9D,eAAgB,KAAK,IAAI,EAAS,eAAiB,IAAK,GAAI,CAC7D,CAIC,EAAY,GACP,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,kBACvB,aAAc,KAAK,IAAI,EAAS,aAAe,IAAM,GAAK,KAAK,CAC/D,eAAgB,KAAK,IAAI,EAAS,eAAiB,IAAK,GAAI,CAC7D,CAII,CACL,GAAG,EACH,KAAM,GAAG,EAAS,KAAK,sBACvB,aAAc,KAAK,IACjB,EAAS,aAAe,IACxB,EAAS,wBACV,CACD,eAAgB,EAAS,eAAiB,GAC3C,CAGH,0BACE,EACA,EACQ,CACR,IAAI,EAAa,KAAK,iBAGhB,EAAoB,KAAK,eAAe,sBAAsB,CAEpE,GAAI,EAAoB,EAAG,CAGzB,IAAM,EACJ,EAFqB,KAAK,uBAAuB,EAAiB,CAE7B,KAAK,OAAO,kBAG7C,EAAc,EAAS,eAC7B,EACE,KAAK,kBAAoB,EAAI,GAC7B,EAAkB,EAUtB,GANA,EAAa,KAAK,IAChB,EAAS,aACT,KAAK,IAAI,EAAS,aAAc,EAAW,CAC5C,CAGG,KAAK,oBAAsB,EAAG,CAEhC,IAAM,EAAkB,KAAK,IAAI,GAAK,KAAK,oBAAsB,GAAI,CACrE,GAAc,EAAI,UACT,KAAK,qBAAuB,EAAG,CAExC,IAAM,EAAiB,KAAK,IAAI,GAAK,KAAK,qBAAuB,IAAK,CACtE,GAAc,EAAI,EAGpB,OAAO,KAAK,MAAM,EAAW,CAG/B,uBAA+B,EAA4C,CACzE,OAAQ,EAAiB,KAAzB,CACE,IAAK,OACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAO,IAIb,eACE,EACA,EACA,EACM,CACN,GAAI,CAAC,EAAS,CAEZ,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,GACzB,CACD,OAIF,IAAM,EAAa,GAAQ,EAAW,KAChC,EAAU,KAAK,eAAe,mBAAmB,CAEvD,GAAI,EAAQ,aAAe,EAAG,CAC5B,IAAM,EAAmB,EAAa,EAAQ,aAE1C,EAAmB,KAAK,OAAO,kBAAoB,GAErD,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,IACzB,CACQ,EAAmB,KAAK,OAAO,kBAAoB,MAE5D,KAAK,iBAAmB,KAAK,IAC3B,KAAK,OAAO,aACZ,KAAK,iBAAmB,IACzB,KC7WI,EAAb,cAAqC,KAAM,CA8BzC,YAAY,CACV,OACA,UACA,QACA,UAMC,CACD,OAAO,CACP,KAAK,KAAO,EACZ,KAAK,MAAQ,EACb,KAAK,QAAU,EACf,KAAK,OAAS,EAmBhB,gBAA0B,CACxB,OACE,KAAK,OAAS,iBACd,KAAK,OAAS,gCC9IpB,SAAgB,EACd,EACQ,CACR,OAAO,OAAO,QAAQ,EAAS,CAC5B,KAAK,CAAC,EAAK,KAAW,GAAG,EAAI,GAAG,EAAO,OAAO,OAAO,EAAM,CAAC,GAAG,CAC/D,KAAK,IAAI,CAOd,SAAgB,EACd,EACA,EACS,CACT,OAAO,GAAU,GAAY,EAAS,EAAW,IAWnD,SAAgB,EACd,EACA,CAAE,uBAAsB,cACT,CAIf,GAAI,EACF,OAAO,KAGT,GAAI,GAAc,KAChB,OAAO,EAGT,IAAM,EAAO,EACb,GAAI,GAAQ,KACV,MAAM,IAAI,EAAgB,CACxB,KAAM,4BACN,QACE,mJACH,CAAC,CAGJ,OAAO,EAMT,SAAgB,EACd,EACA,EACA,EACgE,CAChE,GAAI,GAAmB,EACrB,MAAO,CAAC,CAAE,UAAW,EAAG,QAAS,EAAU,aAAc,EAAG,CAAC,CAI/D,IAAMC,EAIA,EAAE,CAER,GAAI,EAAmB,CAErB,IAAI,EAAc,EACd,EAAe,EAEnB,KAAO,EAAc,GAAU,CAC7B,IAAM,EAAU,KAAK,IAAI,EAAc,EAAmB,EAAS,CACnE,EAAS,KAAK,CACZ,UAAW,EACX,UACA,eACD,CAAC,CACF,EAAc,EACd,SAEG,CAEL,IAAM,EAAc,KAAK,KAAK,EAAW,EAAgB,CAEzD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAiB,IAAK,CACxC,IAAM,EAAY,EAAI,EAChB,EAAU,KAAK,IAAI,EAAY,EAAa,EAAS,CAEvD,EAAY,GACd,EAAS,KAAK,CACZ,YACA,UACA,aAAc,EACf,CAAC,EAKR,OAAO,ECvFT,eAAsB,EAAY,CAChC,WACA,SACA,SACA,uBACA,kBACA,aACA,eACA,gBACA,UAW0B,CAC1B,IAAM,EAAQ,GAAU,EAClB,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAQ,IAAA,GACrD,EAAoB,EAAa,iBAAiB,EAAe,CAEnE,EAAM,EADe,EAAkB,KAOzC,EAAO,OACN,IAAQ,KAA4B,EAAM,EAAO,OAClD,CAAC,IAED,EAAM,EAAO,MAGf,GAAM,CAAE,QAAO,OAAM,QAAS,MAAM,EAAO,MAAM,EAAO,EAAI,CACtD,EAAc,GAAQ,EACtB,EAAiB,KAAK,KAAK,CAK7B,GAAwB,IAC1B,EAAO,KAAO,EAAS,GAQzB,IAAM,EAAU,EAAS,EACzB,GAAI,CAAC,GAAwB,GAAQ,IAAY,EAAO,KACtD,MAAM,IAAI,EAAgB,CACxB,KAAM,oBACN,QAAS,wCAAwC,EAAK,uCAAuC,EAAQ,QACtG,CAAC,CAGJ,IAAM,EAAS,MAAM,EAAc,YAAY,EAAU,EAAO,CAC9D,YAAa,EAAO,IAAU,CAC5B,IAAa,EAAU,EAAO,EAAM,EAEtC,kBACD,CAAC,CAGI,EAAgB,KAAK,KAAK,CAAG,EAC7B,EAAU,EAAO,QAAU,KAAO,EAAO,OAAS,IAQxD,OANA,EAAa,kBAAkB,EAAa,EAAe,EAAQ,CAEnE,EAAO,IACL,gBAAgB,EAAU,YAAc,SAAS,IAAI,EAAY,YAAY,EAAc,MAAM,EAAkB,SAAS,YAC7H,CAEM,EAUT,SAAgB,EACd,EACA,EACA,EACA,EACA,EACS,CAaT,OAXE,GAAe,MACf,GAAgB,EAAY,QAC5B,CAAC,EAAI,gBAAgB,CAEd,GAGL,EACK,EAAc,EAAK,EAAa,CAGlC,EAAqB,EAAiB,EAAI,CAQnD,SAAgB,EACd,EACA,EACS,CACT,IAAM,EAAS,EAAI,QAAU,EAC7B,OACG,CAAC,EAAiB,EAAQ,IAAI,EAAI,IAAW,KAAO,IAAW,MAChE,EAAgB,UAAU,CClI9B,eAAsB,EAAgB,CACpC,SACA,aACA,gBACA,SACA,kBACA,gBACA,iBACA,GAAG,GAWH,CACA,GAAM,CAAE,SAAQ,aAAc,EAGxB,CAAE,QAAS,MAAM,EAAc,QAAQ,EAAO,CAG9C,EAAY,EAAK,MAAM,KAAM,GAAS,EAAK,OAAS,QAAQ,CAElE,GAAI,CAAC,EAAW,CACd,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,oBACN,QAAS,QAAQ,EAAO,gHACzB,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,IAAM,EAAc,EAAU,GAGxB,EAAW,CACf,aAAc,EAAO,MAAQ,UAC7B,SAAU,EAAO,MAAQ,2BACzB,KAAM,EAAO,MAAQ,EACrB,GAAG,EAAW,SACf,CAED,EAAO,IAAI,iCAAiC,EAAO,SAAS,IAAc,CAE1E,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAAQ,EAAQ,EAAW,EACpE,GAAc,CACb,UAAW,OACX,YACA,WACD,CACF,CAAC,CAEI,EAAQ,EAAI,GAElB,GAAI,CAAC,EAAiB,EAAQ,IAAI,EAAI,CAAC,EAAO,CAC5C,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,mBACN,QAAS,mCACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,EAAU,aAAa,EAAM,CAE7B,EAAO,IAAI,YAAY,EAAM,6BAA6B,CAI1D,EAAc,EAAM,CAEpB,EAAO,IAAI,kCAAkC,CAI7C,IAEI,EAAW,EACX,EAAY,MAAM,EAAc,aAAa,EAAM,CAEvD,KAAO,EAAU,SAAW,UAAY,EAAW,IACjD,MAAM,IAAI,QAAe,GACvB,EAAgB,WAAW,EAAS,IAAa,CAClD,CACD,EAAY,MAAM,EAAc,aAAa,EAAM,CACnD,IAGF,GAAI,EAAU,SAAW,SAAU,CACjC,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,eACN,QAAS,0CAA0C,EAAU,OAAO,GACrE,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAOR,IAAM,EAHqB,EAAU,MAAM,KACxC,GAAS,EAAK,SAAW,EAC3B,EACsC,OAEvC,GAAI,CAAC,GAAY,GAAI,CACnB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,oBACN,QAAS,2CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAUR,OAPA,EAAO,IAAI,uBAAuB,EAAW,KAAK,CAElD,EAAU,UAAU,CAClB,SAAU,EAAW,GACrB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CAAE,QAAO,aAAY,cAAa,CAO3C,eAAsB,EAAkB,CACtC,QACA,aACA,cACA,SACA,SACA,eAAe,EACf,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,GAAG,GAiByB,CAC5B,IAAI,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CAEF,IAAM,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAS,IAAA,GAEtD,EADoB,EAAa,iBAAiB,EAAe,CACnC,KAC9B,EAAU,KAAK,IAAI,EAAS,EAAW,EAAO,MAAQ,EAAE,CACxD,EAAc,MAAM,EAAO,MAAM,EAAQ,EAAQ,CAEvD,GAAI,CAAC,GAAe,CAAC,EAAY,MAC/B,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,iCACV,CAAC,CAGJ,IAAM,EAAY,EAAY,MAGxB,EAAY,KAAK,KAAK,CAEtB,EAAM,MAAM,EAAc,YAAY,EAAW,GAAI,EAAW,CACpE,kBACD,CAAC,CAEI,EAAW,KAAK,KAAK,CAAG,EAE9B,GAAI,CAAC,EAAI,OACP,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,4CACV,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,EAAU,aAAa,EAAW,GAAI,EAAe,EAAO,MAAQ,EAAE,CACtE,EAAU,kBACR,EAAgB,EAChB,EACA,EAAO,MAAQ,EAChB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,EAAS,EAAU,CAEjD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,WACA,MAAO,GAAa,EAAW,KAC/B,QAAS,GACT,WAAY,EACZ,iBACE,EAAa,iBAAiB,EAAE,kBAAkB,KACpD,iBAAkB,EAAa,iBAAiB,EAAE,SACnD,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAIzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHI,GAAQ,EAAO,OAAO,CAGtB,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,0BAA0B,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MAC7J,CAKL,EAAO,IAAI,kCAAkC,IAAQ,CAErD,GAAI,CACF,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,WACX,SAAU,EAAW,GACtB,CACD,CAAE,YAAa,mBAAoB,CACpC,OACM,EAAK,CAEZ,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,uBACN,QAAS,0CAA0C,IACnD,MAAO,EACR,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAER,OAIF,MAAM,EAAkB,CACtB,QACA,aACA,cACA,OAAQ,EACR,SACA,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAejB,GAbE,GAAU,MAAQ,EAAgB,IAElC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,mCACT,MAAO,EACR,CAAC,CAOF,EACA,EACA,EAAU,cACX,CACD,CACA,IAAM,EAAQ,EAAY,GAC1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAkB,CACtB,QACA,aACA,cACA,SACA,SACA,aAAc,EAAe,EAC7B,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,kBACA,GAAG,EACJ,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,kCAAkC,EAAM,aAAa,IAC9D,MAAO,EACR,CAAC,EC5RV,eAAsB,EACpB,EAC8B,CAC9B,GAAM,CACJ,SACA,QACA,SACA,YACA,WAAW,EAAE,CACb,gBACA,SACA,kBACA,aACE,EAGE,EAAgB,CACpB,aAAc,EAAO,MAAQ,UAC7B,SAAU,EAAO,MAAQ,2BACzB,KAAM,EAAO,MAAQ,EACrB,GAAG,EACJ,CAED,EAAO,IAAI,2BAA2B,EAAO,WAAW,IAAQ,CAGhE,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,OACX,YACA,SAAU,EACX,CACD,CAAE,YAAa,mBAAoB,CACpC,CAED,EAAO,IAAI,mCAAmC,IAAS,CAGvD,IAEI,EAAW,EACX,EAAY,MAAM,EAAc,aAAa,EAAM,CAEvD,KAAO,EAAW,IAAa,CAE7B,IAAM,EAAW,EAAU,MAAM,KAAM,GAAS,EAAK,SAAW,EAAO,CAGvE,GACE,GAAU,SAAW,UACrB,EAAS,QACR,EAAS,OAAsB,GAChC,CACA,IAAM,EAAa,EAAS,OAQ5B,OAPA,EAAO,IAAI,+BAA+B,EAAO,IAAI,EAAW,KAAK,CAErE,GAAW,UAAU,CACnB,SAAU,EAAW,GACrB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CAAE,aAAY,SAAQ,CAI/B,GAAI,GAAU,SAAW,SAAU,CACjC,IAAMC,EAAQ,IAAI,EAAgB,CAChC,KAAM,mBACN,QAAS,cAAc,EAAO,+BAC/B,CAAC,CAEF,MADA,GAAW,UAAUA,EAAM,CACrBA,EAGR,MAAM,IAAI,QAAe,GACvB,EAAgB,WAAW,EAAS,IAAa,CAClD,CACD,EAAY,MAAM,EAAc,aAAa,EAAM,CACnD,IAGF,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,eACN,QAAS,cAAc,EAAO,sCAC/B,CAAC,CAEF,MADA,GAAW,UAAU,EAAM,CACrB,EASR,eAAsB,EACpB,EACe,CACf,GAAM,CACJ,SACA,QACA,aACA,SACA,SAAS,EACT,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,aACE,EAEA,EAAe,EAAQ,cAAgB,EACvC,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CAEF,IAAM,EAAiB,EAAO,KAAO,EAAO,KAAO,EAAS,IAAA,GAEtD,EADoB,EAAa,iBAAiB,EAAe,CACnC,KAC9B,EAAU,KAAK,IAAI,EAAS,EAAW,EAAO,MAAQ,EAAE,CACxD,EAAc,MAAM,EAAO,MAAM,EAAQ,EAAQ,CAEvD,GAAI,CAAC,GAAe,CAAC,EAAY,MAC/B,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,2CAA2C,IACrD,CAAC,CAGJ,IAAM,EAAY,EAAY,MAGxB,EAAY,KAAK,KAAK,CAEtB,EAAM,MAAM,EAAc,YAAY,EAAW,GAAI,EAAW,CACpE,kBACD,CAAC,CAEI,EAAW,KAAK,KAAK,CAAG,EAE9B,GAAI,CAAC,EAAI,OACP,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,sDAAsD,IAChE,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,GAAW,aAAa,EAAW,GAAI,EAAe,EAAO,MAAQ,EAAE,CACvE,GAAW,kBACT,EAAgB,EAChB,EACA,EAAO,MAAQ,EAChB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,EAAS,EAAU,CAEjD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,WACA,MAAO,GAAa,EAAW,KAC/B,QAAS,GACT,WAAY,EACZ,iBACE,EAAa,iBAAiB,EAAE,kBAAkB,KACpD,iBAAkB,EAAa,iBAAiB,EAAE,SACnD,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAIzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHA,EAAO,OAAO,CAGV,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,6BAA6B,EAAO,IAAI,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MAC3K,CAIL,OAIF,MAAM,EAAkB,CACtB,GAAG,EACH,OAAQ,EACR,aAAc,EACf,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAejB,GAd0B,EAAgB,IAGxC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,wCAAwC,IACjD,MAAO,EACR,CAAC,CAOF,EACA,EACA,GAAW,cACZ,CACD,CACA,IAAM,EAAQ,EAAY,GAC1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAkB,CACtB,GAAG,EACH,SACA,aAAc,EAAe,EAC9B,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,mCAAmC,EAAO,aAAa,IAChE,MAAO,EACR,CAAC,MAGJ,MAAM,GAWZ,eAAsB,EACpB,EACe,CACf,GAAM,CAAE,SAAQ,QAAO,WAAU,gBAAe,SAAQ,aAAc,EAEtE,EAAO,IAAI,yBAAyB,EAAO,WAAW,IAAQ,CAE9D,GAAI,CACF,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,WACX,WACD,CACD,CAAE,YAAa,mBAAoB,CACpC,CAED,EAAO,IAAI,cAAc,EAAO,yBAAyB,OAClD,EAAK,CACZ,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,uBACN,QAAS,iCAAiC,EAAO,WAAW,IAC5D,MAAO,EACR,CAAC,CAEF,MADA,GAAW,UAAU,EAAM,CACrB,GCzWV,eAAsB,GACpB,EACA,EAC2B,CAC3B,OAAO,EAAc,yBAAyB,EAAY,CAM5D,SAAgB,GAAyB,EAIvC,CACA,MAAO,CACL,SAAU,EAAe,UAAY,KACrC,mBAAoB,EAAe,mBACnC,iBAAkB,EAAe,iBAClC,CAMH,eAAsB,EAA0B,CAC9C,gBACA,cACA,OACA,WACA,mBACA,8BACA,cAS8B,CAK9B,GACE,CAAC,GACD,CAAC,GACD,GAAoB,KAEpB,OAGF,IAAMC,EAA+B,CACnC,OACA,WACA,aAAc,IAAI,MAAM,CAAC,UAAU,CACnC,iBAAkB,EACnB,CAQD,OAN4B,MAAM,EAAc,UAC9C,EACA,EACA,CAAE,aAAY,CACf,CAQH,eAAsB,EACpB,EACA,EACe,CACV,GACL,MAAM,EAAc,aAAa,EAAiB,CCnCpD,eAAsB,EAAc,CAClC,WACA,SACA,SACA,uBACA,eAAe,EACf,kBACA,cACA,eACA,gBACA,SACA,gBACA,UACA,kBACA,UACA,GAAG,GAgByB,CAC5B,IAAI,EAAoB,EACpB,EAAgB,EAEpB,GAAI,CACF,IAAM,EAAM,MAAM,EAAY,CAC5B,WACA,SACA,SACA,uBACA,WAAY,EAAU,WACtB,kBACA,eACA,gBACA,SACD,CAAC,CAEF,GAAI,CAAC,EAAiB,EAAI,OAAQ,IAAI,EAAI,EAAI,QAAU,KACtD,MAAM,IAAI,EAAgB,CACxB,KAAM,8BACN,QAAS,4CACV,CAAC,CAaJ,GAVA,EAAgB,EAAI,OAAO,OAE3B,EAAU,aAAa,EAAU,EAAe,EAAI,OAAO,MAAQ,EAAE,CACrE,EAAU,kBACR,EAAgB,EAChB,EACA,EAAI,QAAQ,MAAQ,EACrB,CAGG,GAAe,UAAY,GAAO,CACpC,IAAM,EAAa,KAAK,MAAM,GAAU,EAAgB,GAAU,GAAG,CAC/D,EAAY,EAAgB,EAC5B,EAAgB,KAAK,KAAK,EAAI,KAAK,KAAK,CAAG,KAC3C,EAAe,EAAa,iBAAiB,CAEnD,EAAQ,YAAY,CAClB,aACA,KAAM,EACN,SAAU,EACV,MAAO,GAAa,EAAgB,KACpC,QAAS,GACT,WAAY,EACZ,iBAAkB,GAAc,kBAAkB,KAClD,iBAAkB,GAAc,SACjC,CAAC,CAGF,IAAM,EAAoB,EAAc,sBAAsB,CAC9D,EAAa,wBAAwB,EAAkB,CAGzD,GAAI,IAAkB,EAAO,MAAQ,GAAI,CAIvC,GAHI,GAAQ,EAAO,OAAO,CAGtB,GAAe,UAAY,GAAO,CACpC,IAAM,EAAiB,EAAQ,YAAY,CACvC,GACF,EAAO,IACL,qBAAqB,EAAe,UAAU,YAAY,EAAe,cAAc,iBAAiB,KAAK,MAAM,EAAe,aAAe,KAAK,CAAC,MACxJ,CAIL,EAAU,YAAY,EAAI,OAAO,CACjC,OAGF,MAAM,EAAc,CAClB,WACA,OAAQ,EACR,SACA,uBACA,cACA,eACA,kBACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,OACK,EAAK,CAEZ,GAAI,GAAe,KAkBjB,GAbE,GAAU,MAAQ,EAAgB,IAElC,EAAe,GAYf,EACE,EAVgB,aAAe,EAM/B,EALA,IAAI,EAAgB,CAClB,KAAM,gBACN,QAAS,gBACT,MAAO,EACR,CAAC,CAOF,EACA,EACA,EAAU,cACX,CACD,CACA,IAAM,EAAQ,EAAY,GAE1B,EAAoB,EAEpB,IAAM,EAAU,EAAgB,WAAW,SAAY,CACrD,MAAM,EAAc,CAClB,WACA,SACA,SACA,aAAc,EAAe,EAC7B,uBACA,cACA,eACA,kBACA,gBACA,SACA,gBACA,UACA,UACA,kBACA,GAAG,EACJ,CAAC,EACD,EAAM,CACT,IAAU,EAAQ,MAElB,MAAM,IAAI,EAAgB,CACxB,KAAM,sBACN,QAAS,8BAA8B,EAAS,aAAa,IAC7D,MAAO,EACR,CAAC,EAWV,eAAsB,EAAa,CACjC,cACA,YACA,SACA,uBACA,WACA,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,iBACA,kBAAkB,GAClB,oBAAoB,SACpB,kBACA,GAAG,GAkBmD,CACtD,GAAI,CAAC,GAAwB,EAAO,MAAQ,KAAM,CAChD,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,4BACN,QAAS,0BACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAIR,IAAIC,EACJ,GAAI,GAAmB,EAAgB,WAAW,EAAO,MAAM,CAC7D,GAAI,CACF,EAAO,IAAI,6BAA6B,CACxC,EAAW,MAAM,EAAgB,gBAC/B,IAAI,WAAW,EAAO,MAAa,CACpC,CACD,EAAO,IAAI,sBAAsB,IAAW,OACrC,EAAO,CACd,EAAO,IACL,wCAAwC,aAAiB,MAAQ,EAAM,QAAU,kBAClF,CAKL,IAAMC,EAA8B,CAClC,uBACA,YACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAW,EAAe,EAAS,CAAG,IAAA,GAChD,SAAU,EAAO,MAAQ,IAAA,GACzB,KAAM,EAAO,MAAQ,GACrB,aAAc,EAAO,cAAgB,IAAA,GACrC,WACA,kBAAmB,EAAW,EAAoB,IAAA,GACnD,CAEK,CAAE,SAAQ,UAAW,MAAM,EAAc,aAAa,EAAiB,CAE7E,GAAI,CAAC,EAAiB,EAAQ,IAAI,EAAI,GAAU,KAAM,CACpD,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,8BACN,QAAS,4CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAOR,GAJA,EAAO,IAAI,kBAAkB,EAAO,KAAK,CAEzC,EAAc,EAAO,GAAG,CAEpB,EAAO,OAAS,EAAG,CAErB,EAAU,YAAY,EAAO,CACzB,GAAQ,EAAO,OAAO,CAC1B,EAAe,EAAO,GAAG,CACzB,OAGF,IAAM,EAAqB,MAAM,EAA0B,CACzD,gBACA,cACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAO,UAAY,EAAE,CAC/B,iBAAkB,KAClB,8BACA,aACD,CAAC,CAOF,OALA,EAAU,UAAU,CAClB,SAAU,EAAO,GACjB,KAAM,EAAO,MAAQ,KACtB,CAAC,CAEK,CACL,qBACA,SAAU,EAAO,GACjB,OAAQ,EAAO,OAChB,CAQH,eAAsB,EAAa,CACjC,WACA,YACA,qBACA,cACA,SACA,uBACA,gBACA,SACA,kBACA,kBACA,gBACA,aACA,8BACA,gBACA,GAAG,GAgBmD,CACtD,IAAM,EAAM,MAAM,EAAc,UAAU,EAAS,CAC7C,EAAS,EAAI,OAEnB,GAAI,CAAC,EAAiB,EAAQ,IAAI,CAAE,CAMlC,GAAI,IAAW,IAAK,CAClB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,gBACN,QAAS,0CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAUR,OAPI,EAAiB,EAAQ,IAAI,EAG/B,MAAM,EAAwB,EAAe,EAAmB,CAI3D,MAAM,EAAa,CACxB,kBACA,cACA,YACA,SACA,uBACA,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,mBAAsB,GACtB,GAAG,EACJ,CAAC,CAGJ,IAAM,EAAS,EAAI,OACnB,GAAI,GAAU,KAAM,CAClB,IAAM,EAAQ,IAAI,EAAgB,CAChC,KAAM,8BACN,QAAS,4CACV,CAAC,CAEF,MADA,EAAU,UAAU,EAAM,CACpB,EAGR,SAAM,EAA0B,CAC9B,gBACA,cACA,KAAM,EAAO,MAAQ,EACrB,SAAU,EAAO,UAAY,EAAE,CAC/B,iBAAkB,EAClB,8BACA,aACD,CAAC,CAIE,EAAO,SAAW,EAAO,KAM7B,OAFA,EAAc,EAAO,GAAG,CAEjB,CACL,WACA,qBACA,OAAQ,EAAO,OAChB,CAOH,eAAsB,GAAkB,CACtC,SACA,WACA,qBACA,YACA,cACA,kBACA,uBACA,gBACA,kBACA,SACA,gBACA,aACA,8BACA,gBACA,iBACA,GAAG,GAiBmD,CAyBtD,OAvBI,GAAY,MAAQ,GAAsB,MAC5C,EAAO,IAAI,qCAAqC,IAAW,CACpD,MAAM,EAAa,CACxB,WACA,qBACA,YACA,cACA,SACA,kBACA,uBACA,gBACA,SACA,kBACA,gBACA,aACA,8BACA,gBACA,GAAG,EACJ,CAAC,GAIJ,EAAO,IAAI,wBAAwB,CAC5B,MAAM,EAAa,CACxB,cACA,YACA,SACA,uBACA,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,kBACA,gBACA,aACA,8BACA,gBACA,iBACA,GAAG,EACJ,CAAC,ECzfJ,eAAsB,GAAoB,CACxC,SACA,YACA,cACA,uBACA,kBACA,oBACA,cACA,eACA,gBACA,SACA,kBACA,gBACA,UACA,gBACA,aACA,8BACA,gBACA,iBACA,UAAA,EACA,yBACA,kBACA,GAAG,GAuBqD,CACxD,GAAI,CAAC,EAAO,MAAQ,EAAO,OAAS,EAAG,CACrC,EAAU,UACR,IAAI,EAAgB,CAClB,KAAM,4BACN,QAAS,6CACV,CAAC,CACH,CACD,OAIF,IAAM,EAAW,EACf,EAAO,KACP,EACA,EACD,CACD,EAAO,IAAI,iCAAiC,EAAS,OAAO,WAAW,CAGvE,IAAMC,EAAqC,CACzC,SAAU,EAAE,CACZ,cAAe,EACf,UAAW,GACX,OAAQ,GACT,CAGK,EAAkB,IAAI,IACtB,EAAgB,IAAI,IAEpB,MAA4B,CAChC,IAAM,EAAa,MAAM,KAAK,EAAc,QAAQ,CAAC,CAAC,QACnD,EAAK,IAAS,EAAM,EACrB,EACD,CACK,EAAgB,MAAM,KAAK,EAAgB,QAAQ,CAAC,CAAC,QACxD,EAAK,IAAa,EAAM,EACzB,EACD,CACD,EAAc,cACZ,EAAa,EAAI,EAAgB,EAAa,EAG5C,EAAU,YAAc,EAAa,GACvC,EAAU,WAAW,kBAAmB,EAAe,EAAW,EAItE,GAAI,CAEF,IAAM,EAAiB,MAAM,QAAQ,IACnC,EAAS,IAAI,KAAO,IAAY,CAE9B,IAAMC,EAA4B,CAChC,GAAG,EACH,KAAM,EAAQ,QAAU,EAAQ,UAChC,MAAM,MAAM,EAAO,EAAK,CAEtB,IAAM,EAAc,EAAQ,WAAa,GAAS,GAC5C,EAAY,KAAK,IACrB,EAAQ,WAAa,GAAO,EAAQ,QAAU,EAAQ,WACtD,EAAQ,QACT,CACD,OAAO,MAAM,EAAO,MAAM,EAAa,EAAU,EAEpD,CAEK,EAAe,MAAM,EAAa,CACtC,YAAa,GAAG,EAAY,WAAW,EAAQ,eAC/C,YACA,OAAQ,EACR,uBACA,kBACA,SAAU,CACR,eAAgB,OAChB,aAAc,EAAQ,aAAa,UAAU,CAC7C,cAAe,EAAS,OAAO,UAAU,CACzC,kBAAmB,EACpB,CACD,kBACA,gBACA,SACA,gBACA,aACA,8BACA,gBACA,iBACA,cAAiB,GACjB,QAAU,GACR,EAAO,IACL,WAAW,EAAQ,aAAa,mBAAmB,IACpD,CACH,QAAU,GAAS,CACjB,EAAc,IAAI,EAAQ,aAAc,EAAK,MAAQ,EAAE,CACvD,GAAqB,EAExB,CAAC,CAEF,GAAI,CAAC,EACH,MAAM,IAAI,EAAgB,CACxB,KAAM,mCACN,QAAS,mCAAmC,EAAQ,eACrD,CAAC,CAcJ,MAAO,CACL,QAZ6C,CAC7C,SAAU,EAAa,SACvB,mBAAoB,EAAa,mBACjC,aAAc,EAAQ,aACtB,UAAW,EAAQ,UACnB,QAAS,EAAQ,QACjB,OAAQ,EAAa,OACrB,gBAAiB,EAAuB,QAAQ,CAChD,aAAc,KACf,CAIC,OAAQ,EACT,EACD,CACH,CAGD,EAAc,SAAW,EAAe,IAAK,GAAW,EAAO,QAAQ,CAGvE,EAAU,UAAU,CAClB,SAAU,YAAY,EAAc,SAAS,IAAK,GAAM,EAAE,SAAS,CAAC,KAAK,IAAI,GAC7E,KAAM,EAAO,KACd,CAAC,CAGF,IAAM,EAAiB,EAAe,IACpC,MAAO,CAAE,UAAS,OAAQ,KAAoB,CAC5C,GAAI,CACF,MAAM,EAAc,CAClB,SAAU,EAAQ,SAClB,OAAQ,EAAQ,OAChB,OAAQ,EACR,uBACA,gBAAiB,EAAQ,gBACzB,cACA,eACA,gBACA,kBACA,SACA,gBACA,UACA,YAAa,EAAG,EAAO,IAAU,CAC/B,EAAgB,IAAI,EAAQ,aAAc,EAAM,CAC5C,GAAO,EAAc,IAAI,EAAQ,aAAc,EAAM,CACzD,GAAqB,EAEvB,iBAAkB,EAAW,EAAe,IAAe,CACrD,EAAU,iBACZ,EAAU,gBAAgB,EAAW,EAAe,EAAW,EAGnE,UAAY,GAAgB,CAC1B,EAAO,IACL,WAAW,EAAQ,aAAa,yBACjC,CAED,EAAgB,IACd,EAAQ,aACR,EAAc,IAAI,EAAQ,aAAa,EAAI,EAC5C,CACD,GAAqB,EAEvB,eAAgB,EAAO,KACrB,EAAO,IACL,WAAW,EAAQ,aAAa,iBAAiB,EAAa,IAAI,IACnE,CACM,GAAgB,GAAa,QAAU,IAEhD,QAAU,GAAY,CACpB,EAAQ,aAAe,GAEzB,QAAU,GAAU,CAElB,MADA,EAAO,IAAI,WAAW,EAAQ,aAAa,WAAW,IAAQ,CACxD,GAET,CAAC,OACK,EAAO,CAEd,MADA,EAAO,IAAI,WAAW,EAAQ,aAAa,kBAAkB,IAAQ,CAC/D,IAAI,EAAgB,CACxB,KAAM,iCACN,QAAS,WAAW,EAAQ,aAAa,gBACzC,MAAO,EACR,CAAC,GAGP,CAUD,GAPA,MAAM,QAAQ,IAAI,EAAe,CAGjC,EAAc,UAAY,GAC1B,EAAO,IAAI,sDAAsD,CAG7D,EAAU,UAAW,CACvB,IAAMC,EAA+B,CACnC,GAAI,YAAY,EAAc,SAAS,IAAK,GAAM,EAAE,SAAS,CAAC,KAAK,IAAI,GACvE,OAAQ,EAAO,KACf,KAAM,EAAO,KACb,QAAS,CACP,GAAI,EACJ,KAAM,kBACP,CACD,SAAU,CACR,eAAgB,OAChB,cAAe,EAAS,OAAO,UAAU,CACzC,cACD,CACF,CACD,EAAU,UAAU,EAAiB,CAIvC,IAAK,IAAM,KAAU,EACnB,EAAO,OAAO,SAAS,CAGzB,MAAO,CACL,gBACA,MAAO,SAAY,CACjB,MAAM,EACJ,EACA,EACAC,EACA,EACA,EACD,EAEJ,OACM,EAAO,CAcd,KAbA,GAAc,OAAS,GACvB,EAAc,MAAQ,EAGtB,MAAM,EACJ,EACA,EACAA,EACA,EACA,EACD,CAED,EAAU,UAAU,EAAe,CAC7B,GAOV,eAAsB,EACpB,EACA,EACA,EACA,EACA,EACe,CACf,EAAO,IAAI,8BAA8B,CAGzC,IAAK,IAAM,KAAW,EAAM,SAAU,CACpC,EAAQ,gBAAgB,OAAO,CAE/B,AAEE,EAAQ,gBADR,EAAgB,aAAa,EAAQ,aAAa,CAC3B,MAIzB,GAAI,CACF,MAAMA,EAAU,EAAQ,SAAS,OAC1B,EAAO,CACd,EAAO,IACL,+BAA+B,EAAQ,aAAa,IAAI,IACzD,CAIH,EAAe,EAAQ,SAAS,CAGlC,EAAM,UAAY,GAClB,EAAM,OAAS,GACf,EAAO,IAAI,0BAA0B,CCxUvC,eAAsB,EACpB,EACA,EACe,CACf,OAAO,IAAI,QAAe,GACxB,EAAgB,WAAW,EAAS,EAAG,CACxC,CC7CH,eAAsB,EACpB,EACA,EACA,EACA,EACA,EAAe,EACA,CACf,GAAI,CAGF,IAFY,MAAM,EAAc,aAAa,EAAS,EAE9C,SAAW,IACjB,OAGF,MAAM,IAAI,EAAgB,CACxB,KAAM,8BACN,QAAS,+CACV,CAAC,OACK,EAAK,CAGZ,GAAI,CAAC,EAAY,EAFH,EAE2B,EAAc,EAAY,CACjE,MAAM,EAUR,OAFA,MAAM,EAAK,EAFG,IAAc,IAAiB,EAEX,CAE3B,MAAM,EACX,EACA,EACA,EACA,EACA,EAAe,EAChB,EAWL,eAAsB,EAAM,CAC1B,WACA,qBACA,eACA,kBACA,kBACA,gBACA,kBACA,cACA,iBAWgB,CAEhB,KAAgB,OAAO,CAGnB,GAAgB,MAClB,EAAgB,aAAa,EAAa,CAGxC,GAAC,GAAmB,GAAY,QAIpC,MAAM,EAAU,EAAU,EAAe,EAAiB,EAAY,CAElE,GAAsB,MACxB,OAAO,EAAwB,EAAe,EAAmB,CCzFrE,IAAa,EAAb,KAA2B,CAMzB,YAAY,EAA8B,EAAE,CAAE,mBAJP,EAAE,qBACe,EAAE,uBAC/B,EAGzB,KAAK,OAAS,CACZ,gBAAiB,EAAO,iBAAmB,IAC3C,sBAAuB,EAAO,uBAAyB,GACvD,sBAAuB,CACrB,UAAW,IAAM,KACjB,UAAW,EAAI,KAAO,KACtB,cAAe,GACf,GAAG,EAAO,sBACX,CACF,CAGH,aACE,EACA,EACA,EACM,CACN,KAAK,iBAAmB,KAAK,KAAK,CAClC,KAAK,eAAiB,CACpB,WACA,YACA,gBAAiB,EACjB,YAAa,KAAK,KAAK,GAAa,KAAO,MAAM,CACjD,cAAe,EACf,aAAc,EACd,0BACA,UAAW,KAAK,iBACjB,CACD,KAAK,aAAe,EAAE,CAGxB,YAAY,EAAgD,CAC1D,IAAMC,EAA6B,CACjC,GAAG,EACH,UAAW,KAAK,KAAK,CACtB,CAED,KAAK,aAAa,KAAK,EAAa,CAGhC,KAAK,aAAa,OAAS,KAAK,OAAO,kBACzC,KAAK,aAAe,KAAK,aAAa,MAAM,CAAC,KAAK,OAAO,gBAAgB,EAIvE,KAAK,gBAAkB,EAAa,UACtC,KAAK,eAAe,iBACjB,KAAK,eAAe,iBAAmB,GAAK,EAC/C,KAAK,eAAe,eACjB,KAAK,eAAe,eAAiB,GAAK,EAAa,SAC1D,KAAK,eAAe,cACjB,KAAK,eAAe,cAAgB,GAAK,EAAa,YAI7D,YAA0C,CACxC,GAAI,CAAC,KAAK,eAAe,SACvB,OAAO,KAGT,IAAM,EAAU,KAAK,KAAK,CACpB,EAAgB,EAAU,KAAK,iBAC/B,EAAmB,KAAK,aAAa,OAAQ,GAAU,EAAM,QAAQ,CAE3E,GAAI,EAAiB,SAAW,EAC9B,OAAO,KAGT,IAAM,EAAS,EAAiB,IAAK,GAAU,EAAM,MAAM,CACrD,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACnD,EAAY,KAAK,IAAI,GAAG,EAAO,CAC/B,EAAW,KAAK,IAAI,GAAG,EAAO,CAC9B,EAAc,EAAiB,OAAS,KAAK,aAAa,OAE1DC,EAAuC,CAC3C,SAAU,KAAK,eAAe,UAAY,GAC1C,UAAW,KAAK,eAAe,WAAa,EAC5C,gBACA,gBAAiB,EAAiB,OAClC,YAAa,KAAK,aAAa,OAC/B,eACA,YACA,WACA,aAAc,KAAK,eAAe,cAAgB,EAClD,cACA,wBACE,KAAK,eAAe,yBAA2B,GACjD,UAAW,KAAK,eAAe,WAAa,EAC5C,UACD,CAKD,MAFA,MAAK,eAAiB,EAAE,CAEjB,EAGT,0BAA0D,CACxD,MAAO,CAAE,GAAG,KAAK,eAAgB,CAGnC,gBAAgB,EAAgC,CAC9C,IAAM,EAAU,KAAK,aAAa,OAAO,CACzC,OAAO,EAAQ,EAAQ,MAAM,CAAC,EAAM,CAAG,EAGzC,wBAA8C,CAC5C,GAAI,KAAK,aAAa,OAAS,EAC7B,MAAO,CACL,kBAAmB,EACnB,sBAAuB,EACvB,iBAAkB,EAClB,gBAAiB,CAAC,iCAAiC,CACnD,sBAAuB,CAAE,IAAK,IAAM,KAAM,IAAK,EAAI,KAAO,KAAM,CACjE,CAGH,IAAM,EAAmB,KAAK,aAAa,OAAQ,GAAU,EAAM,QAAQ,CACrE,EAAS,EAAiB,IAAK,GAAU,EAAM,MAAM,CAGrD,EACJ,EAAO,OAAS,EACZ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACvD,EACA,EAAgB,KAAK,kBAAkB,EAAO,CAE9C,EADc,KAAK,KAAK,EAAc,CACC,EAGvC,EAAc,EAAiB,OAAS,KAAK,aAAa,OAK1D,EAJa,KAAK,IACtB,EACA,EAAe,KAAK,OAAO,sBAAsB,UAClD,CACsC,GAAM,EAAc,GAGrD,EAAmB,KAAK,IAC5B,EACA,EAAI,KAAK,IAAI,EAAG,EAAuB,CACxC,CAiBD,MAAO,CACL,oBACA,sBAfA,KAAK,+BAA+B,EAAiB,CAgBrD,mBACA,gBAdsB,KAAK,wBAC3B,EACA,EACA,EACD,CAWC,sBAPA,KAAK,+BAA+B,EAAiB,CAQtD,CAGH,eAIE,CACA,MAAO,CACL,QAAS,KAAK,0BAA0B,CACxC,OAAQ,KAAK,iBAAiB,CAC9B,SAAU,KAAK,wBAAwB,CACxC,CAGH,OAAc,CACZ,KAAK,aAAe,EAAE,CACtB,KAAK,eAAiB,EAAE,CACxB,KAAK,iBAAmB,EAG1B,kBAA0B,EAA0B,CAClD,GAAI,EAAO,SAAW,EAAG,MAAO,GAEhC,IAAM,EAAO,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OAEpE,OAD2B,EAAO,IAAK,IAAW,EAAQ,IAAS,EAAE,CAEhD,QAAQ,EAAK,IAAS,EAAM,EAAM,EAAE,CAAG,EAAO,OAIrE,+BAAuC,EAAgC,CACrE,GAAI,EAAO,OAAS,EAAG,MAAO,IAI9B,IAAM,EAAa,KAAK,kBAAkB,EAAO,CAEjD,GAAI,OAAO,KAAK,EAAW,CAAC,OAAS,EAAG,MAAO,IAG/C,IAAM,EAAkB,OAAO,OAAO,EAAW,CAAC,IAAK,GAAU,CAC/D,IAAM,EAAS,EAAM,IAAK,GAAU,EAAM,MAAM,CAC1C,EACJ,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAO,EAAE,CAAG,EAAO,OACnD,EAAW,KAAK,kBAAkB,EAAO,CAC/C,OAAO,KAAK,KAAK,EAAS,CAAG,GAC7B,CAGI,EACJ,EAAgB,QAAQ,EAAK,IAAO,EAAM,EAAI,EAAE,CAAG,EAAgB,OACrE,OAAO,KAAK,IAAI,EAAG,EAAI,KAAK,IAAI,EAAG,EAAiB,CAAC,CAGvD,kBACE,EACgC,CAChC,IAAMC,EAAyC,EAAE,CAmBjD,OAjBA,EAAO,QAAS,GAAU,CAExB,IAAIC,EACJ,AAOK,EAPD,EAAM,KAAO,IAAM,KAAkB,OAChC,EAAM,KAAO,IAAM,KAAkB,QACrC,EAAM,KAAO,IAAM,KAAkB,QACrC,EAAM,KAAO,KAAO,KAAkB,QACtC,EAAM,KAAO,EAAI,KAAO,KAAkB,MAC1C,EAAM,KAAO,EAAI,KAAO,KAAkB,MAC1C,EAAM,KAAO,EAAI,KAAO,KAAkB,MAClC,OAEZ,EAAO,KAAY,EAAO,GAAa,EAAE,EAC9C,IAAM,EAAQ,EAAO,GACjB,GAAO,EAAM,KAAK,EAAM,EAC5B,CAEK,EAGT,wBACE,EACA,EACA,EACU,CACV,IAAMC,EAA4B,EAAE,CAyCpC,OAvCI,EAAe,KAAK,OAAO,sBAAsB,WACnD,EAAgB,KACd,gFACD,CAGC,EAAe,KAAK,OAAO,sBAAsB,WACnD,EAAgB,KACd,8DACD,CAGC,EAAc,IAChB,EAAgB,KACd,4EACD,CAGC,EAAyB,IAC3B,EAAgB,KACd,gFACD,CAID,EAAyB,IACzB,EAAe,KAAK,OAAO,sBAAsB,WAEjD,EAAgB,KACd,iEACD,CAGC,EAAgB,SAAW,GAC7B,EAAgB,KACd,yDACD,CAGI,EAGT,+BAAuC,EAGrC,CACA,GAAI,EAAO,OAAS,EAClB,MAAO,CAAE,IAAK,IAAM,KAAM,IAAK,EAAI,KAAO,KAAM,CAUlD,IAAM,EANgB,EAAO,OAAO,CAAC,MAAM,EAAG,IAAM,EAAE,MAAQ,EAAE,MAAM,CAClC,MAClC,EACA,KAAK,KAAK,EAAO,OAAS,GAAI,CAC/B,CAE8B,IAAK,GAAU,EAAM,KAAK,CACnD,EAAa,KAAK,IAAI,GAAG,EAAS,CAClC,EAAa,KAAK,IAAI,GAAG,EAAS,CAExC,MAAO,CACL,IAAK,KAAK,IAAI,GAAK,KAAM,EAAW,CACpC,IAAK,KAAK,IAAI,GAAK,KAAO,KAAM,EAAW,CAC5C,GC3SL,SAAgB,EACd,EAC0B,CAC1B,OAAO,IAAI,EAAyB,EAAU,iBAAiB,CAAG,GAChE,EAAU,uBAAuB,EAAS,CAC3C,CAuCH,SAAgB,GAAwB,CACtC,eACA,WACA,YACA,kBACA,uBACA,iBACA,UASqB,CACrB,GAAI,GAAgB,8BAAgC,GAAO,CAGzD,IAAM,EAAa,EADG,IAAI,EAAoB,EAAa,CACK,CAE1DC,EAA4C,CAChD,SAAU,GAAY,EACtB,kBACE,GAAgB,oBAAsB,OAClC,IAAA,GACA,GAAgB,kBACtB,mBAAoB,EACpB,kBACA,wBACE,GAAgB,wBAA0B,GAAK,KAAO,KACzD,CAEK,EAAqB,EAAW,kBAAkB,EAAmB,CAG3E,EAAO,IAAI,+BAA+B,EAAmB,WAAW,CACxE,IAAK,IAAM,KAAU,EAAmB,UACtC,EAAO,IAAI,OAAO,IAAS,CAE7B,IAAK,IAAM,KAAW,EAAmB,SACvC,EAAO,IAAI,cAAc,IAAU,CAYrC,OARA,GAAgB,qBAAqB,CACnC,OAAQ,EAAmB,SAC3B,UAAW,EAAmB,UAC9B,gBAAiB,EAAmB,gBACpC,UAAW,EAAmB,UAC9B,SAAU,EAAmB,SAC9B,CAAC,CAEK,MACF,CAEL,IAAM,EACJ,EAAkB,GAClB,GACA,GAAY,GAAgB,wBAA0B,GAAK,KAAO,OAClE,CAAC,EAEH,MAAO,CACL,SAAU,EAA0B,WAAa,SACjD,YACA,gBAAiB,EAA0B,EAAkB,EAC7D,UAAW,CACT,8BAA8B,EAA0B,WAAa,WACtE,CACD,SAAU,EAAE,CACb,EAOL,SAAgB,EACd,EACA,EAAsC,EACtC,EAKA,CACA,IAAMC,EAAmB,EAAE,CACrBC,EAAqB,EAAE,CAMvB,EAFa,EADG,IAAI,EAAoB,EAAa,CACK,CAElC,sBAAsB,CAClD,SAAU,EACV,kBACE,EAAQ,gBAAgB,oBAAsB,OAC1C,IAAA,GACA,EAAQ,gBAAgB,kBAC9B,mBAAoB,EAAQ,UAC5B,gBAAiB,EAAQ,gBAC1B,CAAC,CAyBF,GAvBK,EAAW,OACd,EAAO,KAAK,GAAG,EAAW,OAAO,CAI/B,EAAQ,iBAAmB,EAAQ,gBAAkB,GACvD,EAAO,KAAK,qCAAqC,CAG/C,EAAQ,WAAa,EAAQ,UAAY,MAC3C,EAAS,KAAK,8CAA8C,CAI5D,EAAQ,gBAAgB,oBAAsB,YAC9C,CAAC,EAAQ,iBAET,EAAS,KACP,iEACD,CAIC,EAAO,OAAS,EAAG,CACrB,EAAO,IAAI,mCAAmC,CAC9C,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,YAAY,IAAQ,CAInC,GAAI,EAAS,OAAS,EAAG,CACvB,EAAO,IAAI,qCAAqC,CAChD,IAAK,IAAM,KAAW,EACpB,EAAO,IAAI,cAAc,IAAU,CAIvC,MAAO,CACL,MAAO,EAAO,SAAW,EACzB,SACA,WACD,CAkDH,SAAgB,GACd,EACA,EACM,CACN,IAAM,EAAmB,EACvB,EACA,EACA,EACD,CAED,GAAI,CAAC,EAAiB,MAAO,CAC3B,IAAM,EAAe,kDAAkD,EAAiB,OAAO,KAAK,KAAK,GAEzG,MADA,EAAO,IAAI,EAAa,CAClB,IAAI,EAAgB,CACxB,KAAM,4BACN,QAAS,EACV,CAAC,ECrQN,MAAM,EAAc,kBAuBpB,SAAgB,EAAgB,EAA0B,CAYxD,OAVI,EAAW,EAAK,CACX,OAIL,OAAO,GAAS,UAAY,EAAY,KAAK,EAAK,CAC7C,MAIF,OAqCT,SAAgB,EAAW,EAAiC,CAC1D,GAAI,OAAO,GAAS,WAAY,EAC9B,MAAO,GAIT,GAAI,OAAO,WAAe,MAEpB,WAAW,MAAQ,aAAgB,WAAW,MAI9C,WAAW,MAAQ,aAAgB,WAAW,MAChD,MAAO,GAKX,IAAM,EAAM,EACZ,OACG,SAAU,GAAO,SAAU,GAAO,SAAU,KAC5C,OAAO,EAAI,MAAS,UAAmB,EAAI,OAAS,QC1FzD,MAAM,GACJ,EACA,IAEK,GAGiD,CACpD,eAAgB,mBAChB,oBAAqB,mBACrB,mBAAoB,gBACpB,gBAAiB,kBACjB,qBAAsB,kBACtB,iBAAkB,kBAClB,4BAA6B,kBAC7B,6BAA8B,kBAC9B,iBAAkB,uBAClB,oBAAqB,kBACtB,CAEe,IAhBQ,EAwV1B,SAAgB,GACd,EACA,EACA,CACE,WAAY,EACZ,SACA,cACA,oBAOa,CAIf,IAAM,EAAa,EACf,IAAI,EAAe,EAAgB,EAAY,CAC/C,EAGE,EAAiB,GAAG,EAAQ,GAAG,EAAkB,aACjD,EAAe,GAAG,EAAQ,GAAG,EAAkB,WAC/C,EAAe,GAAG,EAAQ,GAAG,EAAkB,WAG/C,EAAY,EAAQ,QAAQ,OAAQ,KAAK,CACzC,EAAc,GAAG,EAAU,uBAC3B,EAAY,GAAG,EAAU,qBAMzB,EAA2B,MAC/B,EACA,IAC2B,CAI3B,GAHA,GAAQ,IAAI,4CAA4C,EAAM,GAAG,CAG7D,gBAAiB,EAAS,CAC5B,GAAQ,IAAI,2DAA2D,CAEvE,IAAM,GADU,MAAM,EAAQ,YAAY,EAAE,CAAE,EAAM,EACzB,cAC3B,GAAI,GAAY,WAAW,UAAU,CAInC,OAHA,GAAQ,IACN,sEACD,CACM,EAAW,UAAU,EAAE,CAEhC,GAAQ,IACN,kEAAkE,IACnE,CAIH,GAAI,sBAAuB,EAAS,CAClC,GAAQ,IAAI,wDAAwD,CAEpE,IAAM,GADU,MAAM,EAAQ,kBAAkB,EAAE,CAAC,EACxB,cAC3B,GAAI,EAKF,OAJA,GAAQ,IACN,qEACD,CAEM,EAAW,WAAW,UAAU,CACnC,EAAW,UAAU,EAAE,CACvB,EAEN,GAAQ,IAAI,iDAAiD,CAI/D,OADA,GAAQ,IAAI,qDAAqD,CAC1D,MAGT,MAAO,CAEL,UAAW,KAAO,IAAqB,CACrC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAW,CAErE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,mBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,UAAU,EAAS,YAErB,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAC9B,MAAO,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAM,EAG7C,aAAc,KAAO,IAAqB,CACxC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAY,CACpE,OAAQ,SACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,uBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,2BAA2B,IAE7B,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,MAAO,CAAE,OAAQ,EAAI,OAAQ,EAG/B,aAAc,KAAO,IAAoB,CACvC,GAAQ,IAAI,gBAAgB,KAAK,UAAU,EAAK,GAAG,CACnD,IAAM,EAAM,MAAM,EAAW,QAAQ,EAAgB,CACnD,OAAQ,OACR,QAAS,CACP,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,EAAK,CAC3B,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,uBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,0BAE1C,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAgB,MAAM,EAAI,MAAM,CAEtC,OADA,GAAQ,IAAI,KAAK,UAAU,EAAa,CAAC,CAClC,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAc,EAGrD,YAAa,MAAO,EAAU,EAAM,CAAE,qBAAsB,CAC1D,GAAI,CACF,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAe,GAAG,IAAY,CACpE,OAAQ,QACR,QAAS,CACP,eAAgB,2BACjB,CACD,KAAM,EACN,OAAQ,GAAiB,OAC1B,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EACtB,MAAM,CACN,WAAa,EAAE,EAAE,CACpB,MAAM,IAAI,EAAgB,CACxB,KAAM,gBACN,QACE,EAAU,OAAS,EAAU,SAAW,wBAC1C,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAgB,MAAM,EAAI,MAAM,CACtC,MAAO,CAAE,OAAQ,EAAI,OAAQ,OAAQ,EAAc,OAC5C,EAAK,CAIZ,MAHI,aAAe,EACX,EAEF,IAAI,EAAgB,CACxB,KAAM,gBACN,QAAS,gBACT,MAAO,EACR,CAAC,GAKN,QAAS,KAAO,IAAmB,CACjC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,IAAS,CAEjE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,iBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,QAAQ,EAAO,YAEzD,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,YAAY,IAAS,CAC1B,CAAE,OAAQ,EAAI,OAAQ,KAAM,EAAM,EAG3C,QAAS,MACP,EACA,EACA,IACG,CACH,GAAQ,IAAI,YAAY,EAAO,iBAAiB,IAAY,CAC5D,IAAM,EAAM,MAAM,EAAW,QAC3B,GAAG,EAAa,GAAG,EAAO,GAAG,IAC7B,CACE,OAAQ,OACR,QAAS,CACP,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,CAAE,SAAQ,CAAC,CACjC,CACF,CAED,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,kBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,sBAAsB,IAExB,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,qBAAqB,KAAK,UAAU,EAAK,GAAG,CACjD,CAAE,OAAQ,EAAI,OAAQ,IAAK,EAAM,EAG1C,WAAY,MACV,EACA,EACA,EACA,IAGG,CACH,IAAM,EAAc,GAAS,aAAe,mBAExCC,EACJ,AAKE,EALE,IAAgB,2BAEX,EAGA,KAAK,UAAU,CAAE,UAAS,CAAC,CAGpC,IAAM,EAAM,MAAM,EAAW,QAC3B,GAAG,EAAa,GAAG,EAAM,UAAU,IACnC,CACE,OAAQ,QACR,QAAS,CACP,eAAgB,EACjB,CACD,OACD,CACF,CAED,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,sBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,iCAAiC,IAEnC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAIJ,OADc,MAAM,EAAI,MAAM,EAIhC,UAAW,KAAO,IAAkB,CAClC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,QAAS,CACrE,OAAQ,OACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,oBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,gCAAgC,IAElC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,gBAAgB,EAAM,YAAY,EAAK,SAAS,CACrD,GAGT,WAAY,KAAO,IAAkB,CACnC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,SAAU,CACtE,OAAQ,OACT,CAAC,CAEF,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,qBACD,CACK,EACJ,EAAU,OACV,EAAU,SACV,iCAAiC,IAEnC,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAGJ,IAAM,EAAQ,MAAM,EAAI,MAAM,CAE9B,OADA,GAAQ,IAAI,mBAAmB,EAAM,YAAY,EAAK,SAAS,CACxD,GAIT,aAAc,KAAO,IAAkB,CACrC,IAAM,EAAM,MAAM,EAAW,QAAQ,GAAG,EAAa,GAAG,EAAM,SAAS,CAEvE,GAAI,CAAC,EAAI,GAAI,CACX,IAAM,EAAa,MAAM,EAAI,MAAM,CAAC,WAAa,EAAE,EAAE,CAC/C,EAAY,EAChB,EAAU,KACV,gBACD,CACK,EACJ,EAAU,OAAS,EAAU,SAAW,OAAO,EAAM,YAEvD,MAAM,IAAI,EAAgB,CACxB,KAAM,EACN,QAAS,EAAU,KACf,GAAG,EAAa,IAAI,EAAU,KAAK,GACnC,EACJ,OAAQ,EAAI,OACb,CAAC,CAIJ,OADc,MAAM,EAAI,MAAM,EAKhC,oBAAqB,KAAO,IAAqB,CAC/C,IAAI,EAAQ,GAAG,EAAY,GAAG,IAK9B,GAAI,EACF,GAAI,CACF,IAAM,EAAQ,MAAM,EAAyB,EAAa,EAAS,CAC/D,GACF,GAAS,UAAU,mBAAmB,EAAM,GAC5C,GAAQ,IAAI,wCAAwC,IAAW,EAG/D,GAAQ,IACN,4DAA4D,IAC7D,OAEI,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,GAAQ,IACN,kDAAkD,IACnD,CAED,GAAQ,IACN,qEAAqE,IACtE,CAIL,IAAM,EAAK,EAAiB,OAAO,EAAM,CAczC,MAZA,GAAG,WAAe,CAChB,GAAQ,IAAI,2CAA2C,IAAW,EAGpE,EAAG,YAAgB,CACjB,GAAQ,IAAI,2CAA2C,IAAW,EAGpE,EAAG,QAAW,GAAU,CACtB,GAAQ,IAAI,8BAA8B,EAAS,IAAI,IAAQ,EAG1D,GAGT,kBAAmB,KAAO,IAAkB,CAC1C,IAAI,EAAQ,GAAG,EAAU,GAAG,IAK5B,GAAI,EACF,GAAI,CACF,IAAM,EAAQ,MAAM,EAAyB,EAAa,EAAM,CAC5D,GACF,GAAS,UAAU,mBAAmB,EAAM,GAC5C,GAAQ,IAAI,0CAA0C,IAAQ,EAG9D,GAAQ,IACN,0DAA0D,IAC3D,OAEI,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAM,QAAU,OAAO,EAAM,CACxD,GAAQ,IACN,gDAAgD,IACjD,CAED,GAAQ,IACN,mEAAmE,IACpE,CAIL,IAAM,EAAK,EAAiB,OAAO,EAAM,CAczC,MAZA,GAAG,WAAe,CAChB,GAAQ,IAAI,6CAA6C,IAAQ,EAGnE,EAAG,YAAgB,CACjB,GAAQ,IAAI,6CAA6C,IAAQ,EAGnE,EAAG,QAAW,GAAU,CACtB,GAAQ,IAAI,gCAAgC,EAAM,IAAI,IAAQ,EAGzD,GAGT,eAAiB,GAAsB,CACrC,EAAG,OAAO,EAIZ,yBACS,EAAW,YAAY,CAGhC,iCACS,EAAW,oBAAoB,CAGxC,kBAAmB,KAAO,IACjB,EAAW,kBAAkB,EAAK,CAI3C,gBAAiB,KAAO,IAAsB,CAC5C,IAAM,EAAkB,GAAG,EAAe,0BAA0B,mBAAmB,EAAU,GAEjG,GAAI,CACF,IAAM,EAAW,MAAM,EAAW,QAAQ,EAAiB,CACzD,OAAQ,MACR,QAAS,CACP,eAAgB,mBACjB,CACF,CAAC,CAUF,OARK,EAAS,IAOD,MAAM,EAAS,MAAM,EACuB,cAPvD,GAAQ,IACN,iCAAiC,EAAS,OAAO,GAAG,EAAS,aAC9D,CACM,SAKF,EAAO,CAId,OAHA,GAAQ,IACN,wDAAwD,IACzD,CACM,IAGZ,CCl5BH,IAAa,GAAb,KAAwC,CAItC,YACE,EACA,EACA,EACA,CAHQ,KAAA,cAAA,EACA,KAAA,OAAA,EACA,KAAA,QAAA,wBANiB,IAAI,wBACN,IAAI,IAW7B,MAAM,oBAAoB,EAA0C,CAElE,KAAK,qBAAqB,EAAS,CAEnC,IAAM,EAAK,MAAM,KAAK,cAAc,oBAAoB,EAAS,CAkCjE,OAjCA,KAAK,iBAAiB,IAAI,EAAU,EAAG,CAEvC,EAAG,UAAa,GAAU,CACxB,GAAI,CACF,IAAM,EAAc,EAAuB,UACzC,KAAK,MAAM,EAAM,KAAK,CACvB,CAEG,EAAY,QACV,EAAY,KAAK,OAAS,gBAC5B,KAAK,UAAU,EAAY,KAAK,QAAQ,CAG1C,KAAK,OAAO,MACV,+BAA+B,EAAY,MAAM,UAClD,OAEI,EAAO,CACd,KAAK,OAAO,MAAM,+BAA+B,IAAQ,GAI7D,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,MAAM,8BAA8B,EAAS,IAAI,IAAQ,EAGvE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,IACV,+BAA+B,EAAS,aAAa,EAAM,KAAe,YAAY,EAAM,SAC7F,CACD,KAAK,iBAAiB,OAAO,EAAS,EAGjC,EAMT,MAAM,kBAAkB,EAAuC,CAE7D,KAAK,mBAAmB,EAAM,CAE9B,IAAM,EAAK,MAAM,KAAK,cAAc,kBAAkB,EAAM,CAgD5D,OA/CA,KAAK,eAAe,IAAI,EAAO,EAAG,CAElC,EAAG,UAAa,GAAU,CACxB,GAAI,CACF,IAAM,EAAU,KAAK,MAAM,EAAM,KAAK,CAEtC,OAAQ,EAAQ,KAAhB,CACE,IAAK,aACH,KAAK,OAAO,IAAI,qCAAqC,EAAQ,KAAK,CAClE,MACF,IAAK,aACH,KAAK,OAAO,IACV,sCAAsC,EAAQ,QAAQ,QACvD,CACD,MACF,IAAK,QACH,KAAK,OAAO,MACV,yBAAyB,EAAQ,QAAQ,WAAW,EAAM,aAAa,EAAQ,OAChF,CACD,MACF,IAAK,OACH,KAAK,OAAO,IAAI,yCAAyC,IAAQ,CACjE,MACF,IAAK,aACH,KAAK,UAAU,EAAQ,QAAQ,CAC/B,MACF,QACE,KAAK,OAAO,KACV,wCAAwC,EAAQ,OACjD,QAEE,EAAO,CACd,KAAK,OAAO,MAAM,wCAAwC,IAAQ,GAItE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,MAAM,gCAAgC,EAAM,IAAI,IAAQ,EAGtE,EAAG,QAAW,GAAU,CACtB,KAAK,OAAO,IACV,iCAAiC,EAAM,aAAa,EAAM,KAAe,YAAY,EAAM,SAC5F,CACD,KAAK,eAAe,OAAO,EAAM,EAG5B,EAOT,MAAM,cAAc,EAAoC,CAMtD,OAHI,EAAG,WAAW,UAAU,EAAI,EAAG,SAAS,SAAS,CAC5C,MAAM,KAAK,oBAAoB,EAAG,CAEpC,MAAM,KAAK,kBAAkB,EAAG,CAMzC,qBAAqB,EAAwB,CAC3C,IAAM,EAAK,KAAK,iBAAiB,IAAI,EAAS,CAC1C,IACF,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,iBAAiB,OAAO,EAAS,EAO1C,mBAAmB,EAAqB,CACtC,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CACrC,IACF,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,eAAe,OAAO,EAAM,EAOrC,eAAe,EAAkB,CAE/B,KAAK,qBAAqB,EAAG,CAC7B,KAAK,mBAAmB,EAAG,CAM7B,UAAiB,CAEf,IAAK,GAAM,CAAC,EAAU,KAAO,KAAK,iBAAiB,SAAS,CAC1D,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,iBAAiB,OAAO,EAAS,CAIxC,IAAK,GAAM,CAAC,EAAO,KAAO,KAAK,eAAe,SAAS,CACrD,KAAK,cAAc,eAAe,EAAG,CACrC,KAAK,eAAe,OAAO,EAAM,CAOrC,SAAS,EAAwB,CAC/B,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CAUzC,OATI,GAAM,EAAG,aAAe,EAAG,MAC7B,EAAG,KACD,KAAK,UAAU,CACb,KAAM,OACN,UAAW,IAAI,MAAM,CAAC,aAAa,CACpC,CAAC,CACH,CACM,IAEF,GAMT,mBAAmB,EAA6C,CAC9D,OAAO,KAAK,iBAAiB,IAAI,EAAS,CAM5C,iBAAiB,EAA0C,CACzD,OAAO,KAAK,eAAe,IAAI,EAAM,CAMvC,kBAAkB,EAA2B,CAC3C,IAAM,EAAK,KAAK,iBAAiB,IAAI,EAAS,CAC9C,OAAO,GAAI,aAAe,GAAI,KAMhC,gBAAgB,EAAwB,CACtC,IAAM,EAAK,KAAK,eAAe,IAAI,EAAM,CACzC,OAAO,GAAI,aAAe,GAAI,KAMhC,YAAY,EAAqB,CAC/B,OAAO,KAAK,kBAAkB,EAAG,EAAI,KAAK,gBAAgB,EAAG,CAM/D,oBAA6B,CAC3B,OAAO,KAAK,iBAAiB,KAAO,KAAK,eAAe,KAM1D,0BAIE,CACA,MAAO,CACL,OAAQ,KAAK,iBAAiB,KAC9B,KAAM,KAAK,eAAe,KAC1B,MAAO,KAAK,iBAAiB,KAAO,KAAK,eAAe,KACzD,GCrDL,MAAaI,EAAuD,CAElE,sBAAuB,EAEvB,kBAAmB,IAEnB,iBAAkB,IAElB,YAAa,GAEb,uBAAwB,GACzB,CAuHD,SAAgB,EAAoC,CAClD,QAAS,EACT,qBAAqB,aACrB,YACA,cAAc,CAAC,IAAM,IAAM,IAAK,CAChC,YACA,kBAAkB,EAClB,oBACA,iBACA,gBACA,oBACA,gBACA,kBACA,UACA,aACA,aACA,SAAS,EAAa,GAAK,CAC3B,aACA,qBACA,gBACA,8BAA8B,GAC9B,mBACA,yBACA,kBACA,QACuC,CACvC,IAAM,EAAU,EAAS,QAAQ,MAAO,GAAG,CAGrCC,EAA2B,EAC7B,EAAK,OAAS,SACZ,IAAI,EAAkB,EAAM,EAAiB,EAAO,CACpD,IAAI,EAA2B,EAAM,EAAW,CAClD,IAAI,EAGJ,GACF,EAAO,IACL,6BAA6B,EAAK,KAAK,OAAO,EAAK,OAAS,mBAAqB,aAAa,EAAK,cAAc,GAAK,KACvH,CAIH,IAAM,EAAgB,GAAoB,EAAS,EAAoB,CACrE,SACA,aACA,cACA,mBACD,CAAC,CAGI,EAAiB,IAAI,EAAe,EAAkB,CACtD,EAAU,IAAI,EAAc,EAAc,CAG5CC,EAAmD,KAEjD,EAAkB,SAClB,IAGJ,EAAqB,MAAM,EAAc,gBAAgB,EAAU,CAC5D,GAILC,EACE,EAAyB,SAAY,CACzC,GAAI,EAAc,OAAO,EAEzB,IAAM,EAAe,MAAM,GAAiB,CAEtC,EACJ,EAAa,cACb,EAAa,cACb,EAAa,iBACT,CACE,aAAc,EAAa,aAC3B,aAAc,EAAa,aAC3B,iBAAkB,EAAa,iBAC/B,sBAAuB,EAAa,sBACrC,CACD,IAAA,GAaN,MAXA,GAAe,IAAI,EAAa,EAAgB,CAC9C,QAAS,GACT,GAAG,EACH,kBAAmB,EACnB,uBACD,CAAC,CAEF,EAAO,IACL,yDAAyD,KAAK,UAAU,EAAqB,GAC9F,CAEM,GAIH,EAAY,IAAI,GACpB,EACA,EACA,EACD,CAsjBD,OAjXA,GACE,CACE,UACA,YACA,YACA,kBACA,oBACA,iBACD,CACD,EACD,CAuWM,CAEL,OAnjBa,MACb,EACA,CACE,uBAAuB,GACvB,aACA,aACA,kBACA,YACA,gBACA,WAC2B,EAAE,GACI,CACnC,IAAIC,EAA0B,KAC1BC,EAAoC,KAElC,EAAc,MAAM,EAAmB,mBAC3C,EACA,GAAG,EAAQ,GAAG,EAAmB,aAClC,CAGD,GADA,EAAO,IAAI,gBAAgB,IAAc,CACrC,CAAC,EACH,MAAU,MAAM,mDAAmD,CAGrE,IAAM,EAAkB,MAAM,GAC5B,EACA,EACD,CACD,GAAI,EAAgB,OAAS,GAAK,EAAgB,IAAM,KAAM,CAC5D,IAAM,EAAiB,GAAyB,EAAgB,GAAG,CACnE,EAAqB,EAAe,iBACpC,EAAW,EAAe,SAG5B,IAAM,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAEnD,EAAO,EAAkB,EAAO,KAAM,CAC1C,uBACA,aACD,CAAC,CACF,EAAO,KAAO,EAEd,IAAM,EAA0B,MAAM,GAAwB,CAE/B,GAAe,UAAY,IAExD,EAAQ,aAAa,EAAa,GAAQ,EAAG,GAAK,CAKpD,IAAM,EAAqB,GAAwB,CACjD,aAHmB,MAAM,GAAiB,CAI1C,SAAU,EACV,YACA,kBACA,uBACA,eAAgB,EAChB,SACD,CAAC,CAEF,GAAI,EAAmB,WAAa,WAAY,CAC9C,EAAO,IACL,8BAA8B,EAAmB,gBAAgB,UAClE,CAED,IAAM,EAAiB,MAAM,GAAoB,CAC/C,kBACA,SACA,YACA,cACA,uBACA,gBAAiB,EAAmB,gBACpC,oBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,gBACA,aACA,8BACA,yBACA,kBACA,cAAgB,IACd,EAAU,oBAAoB,EAAG,CAG1B,MAET,eAAiB,GAAO,EAAU,qBAAqB,EAAG,CAC1D,UAAY,GACV,EAAU,EAAI,EAAe,EAAiB,EAAY,CAC5D,aACA,kBACA,YACA,UACD,CAAC,CAEF,GAAI,EACF,MAAO,CACL,MAAO,SAAY,CACjB,MAAM,EAAe,OAAO,EAE/B,CAGH,EAAO,IAAI,wDAAwD,CAIrE,IAAM,EAAS,MAAM,GAAkB,CACrC,SACA,YACA,WACA,kBACA,qBACA,kBACA,cACA,uBACA,gBACA,SACA,gBACA,aACA,8BACA,cAAgB,IACd,EAAU,oBAAoB,EAAG,CAG1B,MAET,eAAiB,GAAO,EAAU,qBAAqB,EAAG,CAC1D,aACA,kBACA,YACA,UACD,CAAC,CAEF,GAAI,EAAQ,CACV,IAAM,EAAkB,EAAuB,QAAQ,CACjD,CAAE,SAAA,EAAU,mBAAA,EAAoB,UAAW,EAE7CC,EAA4B,KAyBhC,OAvBA,EAAc,CACZ,kBACA,SAAA,EACA,SACA,SACA,uBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,kBACA,aACA,kBACA,YACA,gBACA,QAAU,GAAY,CACpB,EAAY,GAEd,UACD,CAAC,CAEK,CACL,UAAa,CACX,EAAM,CACJ,kBACA,SAAA,EACA,mBAAA,EACA,aAAc,EACd,gBAAiB,GACjB,kBACA,gBACA,cACA,gBACD,CAAC,EAEL,CAGH,MAAO,CACL,UAAa,GACd,EAwXD,eArWqB,MACrB,EACA,EACA,CACE,aACA,kBACA,YACA,gBACA,aACA,WAIE,EAAE,GAKF,CACJ,IAAM,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAEnD,EAA0B,MAAM,GAAwB,CAG9D,GAD+B,GAAe,UAAY,GAC9B,CAC1B,IAAM,EAAc,MAAM,EAAmB,mBAC3C,EACA,GAAG,EAAQ,GAAG,EAAmB,WAClC,CACD,EAAQ,aAAa,GAAe,UAAW,EAAO,MAAQ,EAAG,GAAK,CAGxE,IAAM,EAAS,MAAM,EAAgB,CACnC,SACA,aACA,gBACA,SACA,kBACA,cAAgB,GAAO,EAAU,kBAAkB,EAAG,CACtD,eAAiB,GAAO,EAAU,eAAe,EAAG,CACpD,aACA,kBACA,YACA,aACA,UACD,CAAC,CAEF,GAAI,CAAC,EACH,MAAO,CACL,MAAO,SAAY,GACnB,MAAO,SAAY,GACnB,MAAO,GACR,CAGH,GAAM,CAAE,QAAO,aAAY,eAAgB,EACrC,EAAkB,EAAuB,QAAQ,CAGvD,MAAM,EAAU,oBAAoB,EAAW,GAAG,CAElD,IAAIA,EAA4B,KA0BhC,OAxBA,EAAkB,CAChB,QACA,aACA,cACA,OAAQ,EAAW,OACnB,SACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,UACA,kBACA,kBACA,aACA,kBACA,YACA,gBACA,QAAU,GAAY,CACpB,EAAY,GAEd,UACD,CAAC,CAEK,CACL,MAAO,SAAY,CAEjB,GAAI,CACF,MAAM,EAAc,WAAW,EAAM,CACrC,EAAO,IAAI,6BAA6B,IAAQ,OACzC,EAAK,CAEZ,EAAO,IAAI,oCAAoC,IAAM,CAIvD,EAAgB,OAAO,CACnB,GACF,EAAgB,aAAa,EAAU,CAGzC,EAAU,eAAe,EAAM,CAC/B,EAAU,qBAAqB,EAAW,GAAG,EAE/C,MAAO,SAAY,CACjB,MAAM,EAAc,UAAU,EAAM,EAEtC,QACD,EAuPD,qBA3O2B,MAC3B,EACA,EACA,CACE,aACA,kBACA,gBACA,aACA,UACA,kBACA,kBACA,gBAaE,EAAE,GAKF,CAEJ,GAAM,CAAE,OAAQ,MAAM,EAAc,QAClC,EAAW,OACX,EAAW,WAAa,EACxB,EAAE,CACH,CACK,EAAQ,EAAI,GAElB,EAAO,IAAI,6BAA6B,IAAQ,CAChD,IAAa,EAAM,CAGnB,MAAM,EAAU,kBAAkB,EAAM,CAExC,IAAMC,EAGF,IAAI,IACFC,EAAiC,IAAI,IACrCC,EAAwB,EAAE,CAEhC,GAAI,CAGF,IAAM,EADe,OAAO,QAAQ,EAAO,CACT,IAAI,MAAO,CAAC,EAAQ,KAAU,CAC9D,IAAM,EAAY,EAAgB,EAAK,CAEvC,GAAI,IAAc,OAAQ,CAExB,IAAM,EAAO,EACP,EAAS,MAAM,EAAW,SAAS,EAAM,EAAU,CAqBzD,MAAO,CACL,SACA,YArBiB,MAAM,EAAoB,CAC3C,SACA,QACA,SACA,UAAW,EAAW,WAAa,EACnC,SAAU,EAAE,CACZ,gBACA,SACA,kBACA,UAAW,CACT,SAAU,CAAE,cAAe,CACzB,EAAU,IAAI,EAAQ,EAAS,CAE/B,EAAU,oBAAoB,EAAS,EAEzC,UACD,CACF,CAAC,EAIuB,WACvB,SACA,YACD,SACQ,IAAc,MAavB,OAXA,MAAM,EAAc,WAClB,EACA,EACA,CACE,UAAW,MACX,IAAK,EACL,UAAW,EAAW,WAAa,EACpC,CACD,CAAE,YAAa,mBAAoB,CACpC,CAEM,CAAE,SAAQ,WAAY,KAAM,OAAQ,KAAM,YAAW,MAO5D,OAJA,MAAM,EAAc,WAAW,EAAO,EAAQ,EAAM,CAClD,YAAa,mBACd,CAAC,CAEK,CAAE,SAAQ,WAAY,KAAM,OAAQ,KAAM,YAAW,EAE9D,CAEI,EAAoB,MAAM,QAAQ,IAAI,EAAa,CAGnD,EAA0B,MAAM,GAAwB,CACxD,EAAiB,EACpB,OACE,GACC,EAAM,YAAc,QAAU,EAAM,YAAc,EAAM,OAC3D,CACA,IAAI,MAAO,CAAE,SAAQ,aAAY,YAAa,CAC7C,IAAM,EAAkB,EAAuB,QAAQ,CACvD,EAAiB,IAAI,EAAQ,EAAgB,CAE7C,IAAMC,EAAU,IAAI,EAAc,CAChC,sBAAuB,GAAe,UAAY,GACnD,CAAC,CAEF,GAAI,CAAC,GAAc,CAAC,EAClB,MAAU,MAAM,yCAAyC,IAAS,CAGpE,GAAI,CACF,MAAM,EAAkB,CACtB,SACA,QACA,aACA,SACA,OAAQ,EAAW,OACnB,aAAc,EACd,kBACA,cACA,aAAc,EACd,gBACA,SACA,gBACA,QAAA,EACA,kBACA,QAAU,GAAY,CACpB,EAAW,KAAK,EAAQ,EAE1B,UAAW,CACT,YAAa,EAAU,EAAe,IAAe,CACnD,IAAa,EAAU,EAAe,EAAW,CAGjD,IAAM,EAAW,EACb,KAAK,MAAO,EAAgB,EAAc,IAAI,CAC9C,EACJ,IACE,EACA,EACA,EACA,EACD,EAEH,kBACA,gBACD,CACF,CAAC,CAGF,MAAM,EAAkB,CACtB,SACA,QACA,SAAU,EAAW,GACrB,gBACA,SACA,UAAW,CAAE,UAAS,CACvB,CAAC,CAEF,IAAkB,EAAO,OAClB,EAAK,CACZ,IAAM,EAAQ,aAAe,MAAQ,EAAU,MAAM,OAAO,EAAI,CAAC,CAEjE,MADA,IAAe,EAAQ,EAAM,CACvB,IAER,CAEJ,MAAM,QAAQ,IAAI,EAAe,CAEjC,EAAO,IAAI,gCAAgC,IAAQ,OAC5C,EAAK,CACZ,IAAM,EAAQ,aAAe,MAAQ,EAAU,MAAM,OAAO,EAAI,CAAC,CAGjE,MAFA,EAAO,IAAI,mCAAmC,EAAM,UAAU,CAC9D,IAAU,EAAM,CACV,EAGR,MAAO,CACL,MAAO,SAAY,CACjB,GAAI,CACF,MAAM,EAAc,WAAW,EAAM,CACrC,EAAO,IAAI,6BAA6B,IAAQ,OACzC,EAAK,CACZ,EAAO,IAAI,oCAAoC,IAAM,CAIvD,IAAK,IAAM,KAAc,EAAiB,QAAQ,CAChD,EAAW,OAAO,CAIpB,IAAK,IAAM,KAAa,EACtB,EAAgB,aAAa,EAAU,CAIzC,EAAU,eAAe,EAAM,CAC/B,IAAK,IAAM,KAAY,EAAU,QAAQ,CACvC,EAAU,qBAAqB,EAAS,EAG5C,MAAO,SAAY,CACjB,MAAM,EAAc,UAAU,EAAM,EAEtC,QACD,EAQD,MAAQ,GAAwC,EAAM,EAAO,CAG7D,QAAS,KAAO,IAAmB,CACjC,GAAM,CAAE,SAAQ,QAAS,MAAM,EAAc,QAAQ,EAAO,CAC5D,MAAO,CAAE,SAAQ,OAAM,EAGzB,QAAS,MAAO,CACd,SACA,SACA,UAAW,KAKP,CACJ,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAC1C,EACA,GAAiB,EACjB,EACD,CACD,MAAO,CAAE,SAAQ,MAAK,EAGxB,WAAY,MAAO,CACjB,QACA,SACA,UACA,iBAOO,EAAc,WAAW,EAAO,EAAQ,EAAS,CACtD,cACD,CAAC,CAGJ,UAAW,KAAO,IACT,EAAc,UAAU,EAAM,CAGvC,WAAY,KAAO,IACV,EAAc,WAAW,EAAM,CAyBxC,cAAe,KAAO,IAAmB,CACvC,GAAM,CAAE,QAAS,MAAM,EAAc,QAAQ,EAAO,CAC9C,EAAa,EAAK,MACrB,OAAQ,GAAS,EAAK,OAAS,QAAQ,CACvC,IAAK,IAAU,CACd,GAAI,EAAK,GACT,KAAM,EAAK,KACX,KAAM,EAAK,KACZ,EAAE,CAEL,MAAO,CACL,aACA,OAAQ,EAAW,SAAW,EAC/B,EAmCH,sBAAuB,MACrB,EACA,EACA,IAIG,CAEH,GAAM,CAAE,SAAQ,OAAQ,MAAM,EAAc,QAC1C,EACA,GAAS,WAAa,EACtB,EACD,CAOD,OAJI,GAAK,IAAM,GAAS,YACtB,EAAQ,WAAW,EAAI,GAAG,CAGrB,CAAE,SAAQ,MAAK,EAIxB,aAAc,KAAO,IACZ,EAAc,aAAa,EAAM,CAI1C,oBAAsB,GACpB,EAAU,oBAAoB,EAAS,CACzC,kBAAoB,GAAkB,EAAU,kBAAkB,EAAM,CACxE,cAAgB,GAAe,EAAU,cAAc,EAAG,CAC1D,eAAiB,GAAe,EAAU,eAAe,EAAG,CAC5D,uBAA0B,EAAU,UAAU,CAC9C,SAAW,GAAkB,EAAU,SAAS,EAAM,CACtD,qBAAuB,GAAe,EAAU,YAAY,EAAG,CAC/D,gCAAmC,EAAU,oBAAoB,CACjE,sCACE,EAAU,0BAA0B,CAGtC,sBAAyB,EAAe,mBAAmB,CAC3D,wBAA2B,EAAe,qBAAqB,CAC/D,wBAA2B,EAAQ,wBAAwB,CAC3D,kBAAqB,EAAQ,eAAe,CAG5C,yBAA4B,EAAc,sBAAsB,CAChE,iCACE,EAAc,8BAA8B,CAC9C,kBAAoB,GAClB,EAAc,kBAAkB,EAAK,CAGvC,6BAA8B,UACZ,MAAM,GAAwB,EAC/B,8BAA8B,CAG/C,aAAc,SAAY,CACxB,EAAe,OAAO,EACN,MAAM,GAAwB,EACtC,OAAO,CACf,EAAQ,OAAO,EAIjB,sBAAwB,GACf,EAAsB,EAAS,EAA2B,EAAO,CAG1E,2BAA4B,KAC1B,IACG,CACH,IAAMC,EAAmB,EAAE,CACrBC,EAAqB,EAAE,CAGvB,EAAe,MAAM,EAAc,gBACvC,EAAQ,UACT,CAEK,EAAa,EAAsB,EAAS,EAAc,EAAO,CAIvE,OAHA,EAAO,KAAK,GAAG,EAAW,OAAO,CACjC,EAAS,KAAK,GAAG,EAAW,SAAS,CAE9B,CACL,MAAO,EAAO,SAAW,EACzB,SACA,WACA,eACD,EAGH,kBACD,CC5mCH,IAAa,EAAb,KAA6E,CAQ3E,YAAY,EAA8C,CAA7B,KAAA,YAAA,qBAPkB,EAAE,CA2BjD,UACE,EACA,EACqB,CAErB,IAAME,EAA+C,GAAa,CAC5D,KAAK,kBAAkB,EAAO,EAAO,EACvC,EAAQ,EAAM,EAKZ,EAAc,KAAK,YAAY,UAAU,EAAe,CAGxDC,EAAoC,CACxC,cACA,QAAS,EACT,SACD,CAKD,OAHA,KAAK,cAAc,KAAK,EAAa,KAGxB,CACX,IAAM,EAAQ,KAAK,cAAc,QAAQ,EAAa,CAClD,IAAU,IACZ,KAAK,cAAc,OAAO,EAAO,EAAE,CAErC,GAAa,EAWjB,kBAA0B,EAAU,EAAsC,CACxE,GAAI,CAAC,EACH,MAAO,GAIT,GAAI,EAAO,WAAa,EAAM,OAAS,EAAO,UAC5C,MAAO,GAIT,GAAI,EAAO,WAAa,IAAA,GAAW,CAEjC,IAAM,EADY,EAAM,MACG,GAI3B,GAAI,EAAO,WAAa,SAClB,IAAY,IAAA,GACd,MAAO,WAEA,IAAY,EAAO,SAC5B,MAAO,GAUX,OALI,EAAO,aAEF,EAAO,aAAa,EAAiC,CAGvD,GAQT,sBAA+B,CAC7B,OAAO,KAAK,cAAc,OAQ5B,kBAA4B,CAC1B,OAAO,KAAK,cAAc,OAAS,EAwBrC,SAAgB,CACd,IAAK,IAAM,KAAgB,KAAK,cAC9B,EAAa,aAAa,CAE5B,KAAK,cAAgB,EAAE,CAoBzB,qBAAqB,EAAkC,CACrD,IAAK,IAAM,KAAgB,KAAK,cAC1B,EAAa,QAAU,EAAa,OAAO,WAAa,IAAA,KAC1D,EAAa,OAAO,SAAW,KCSvC,MAAMC,EAAgC,CACpC,OAAQ,OACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CA+BD,IAAa,EAAb,KAAmD,CAejD,YACE,EACA,EACA,EACA,EACA,CAJiB,KAAA,aAAA,EACA,KAAA,UAAA,EACA,KAAA,QAAA,EACA,KAAA,mBAAA,uBAjByC,sBACJ,IAAI,kCAEV,KAgBhD,KAAK,MAAQ,CAAE,GAAGK,EAAc,CAMlC,UAA4B,CAC1B,MAAO,CAAE,GAAG,KAAK,MAAO,CAM1B,aAAuB,CACrB,OACE,KAAK,MAAM,SAAW,aAAe,KAAK,MAAM,SAAW,aAO/D,iBAA2B,CACzB,OAAO,KAAK,MAAM,SAAW,YAM/B,cAAwB,CACtB,OAAO,KAAK,MAAM,SAAW,aAM/B,UAA0B,CACxB,OAAO,KAAK,MAAM,MAMpB,YAAoB,EAAwC,CAC1D,KAAK,MAAQ,CAAE,GAAG,KAAK,MAAO,GAAG,EAAQ,CACzC,KAAK,UAAU,cAAc,KAAK,MAAM,CAS1C,gBAAgB,EAAwB,CAGtC,GAAI,EAAM,YAAc,EAAU,WAAa,CAAC,KAAK,MAAM,MAAO,CAChE,KAAK,YAAY,CACf,MAAO,EAAM,MACb,YAAa,GACb,OAAQ,aACT,CAAC,CACF,OAIE,MAAC,KAAK,MAAM,OAAS,EAAM,QAAU,KAAK,MAAM,OAKpD,OAAQ,EAAM,UAAd,CACE,KAAK,EAAU,UACb,KAAK,YAAY,CACf,YAAa,GACb,OAAQ,aACT,CAAC,CACF,MAEF,KAAK,EAAU,UACb,KAAK,YAAY,CACf,OAAQ,aACR,gBAAiB,EAAM,SACvB,gBAAiB,EAAM,SACxB,CAAC,CACF,MAEF,KAAK,EAAU,UAEb,KAAK,YAAY,CACf,OAAQ,YACR,gBAAiB,EAAM,SAExB,CAAC,CACF,MAEF,KAAK,EAAU,WAEb,KAAK,YAAY,CACf,OAAQ,aACR,gBAAiB,EAAM,SACvB,gBAAiB,EAAM,SACxB,CAAC,CACF,MAEF,KAAK,EAAU,QACb,KAAK,YAAY,CACf,OACE,KAAK,MAAM,SAAW,YAClB,aACA,KAAK,MAAM,OACjB,gBAAiB,KACjB,gBAAiB,KAClB,CAAC,CACF,MAEF,KAAK,EAAU,QAAS,CAEtB,IAAM,EAAc,EAAM,SAAW,KAGjC,GAAe,KAAK,UAAU,gBAChC,KAAK,UAAU,eAAe,EAAY,CAIxC,GAAe,EAAY,OAAS,GAAK,KAAK,UAAU,WAC1D,KAAK,UAAU,UAAU,EAAY,CAGvC,KAAK,YAAY,CACf,OAAQ,UACR,gBAAiB,KACjB,gBAAiB,KACjB,cACD,CAAC,CAEF,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,UAAW,CACxB,IAAM,EAAY,MAAM,EAAM,MAAM,CACpC,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CACF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,UAAW,CACxB,IAAM,EAAY,MAAM,EAAM,MAAM,CACpC,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CACF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,gBAAkB,KACvB,MAGF,KAAK,EAAU,WACb,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CACF,KAAK,UAAU,WAAW,CAC1B,KAAK,gBAAkB,KACvB,OAYN,qBACE,EACA,EACA,EACM,CAEN,IAAM,EACJ,GAAc,EAAa,EACvB,KAAK,MAAO,EAAgB,EAAc,IAAI,CAC9C,EASN,GAPA,KAAK,YAAY,CACf,gBACA,aACA,WACD,CAAC,CAGE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAAI,KAAK,yBAAyB,CAClE,IACF,EAAW,OAAS,YACpB,EAAW,SAAW,EACtB,EAAW,cAAgB,EAC3B,EAAW,WAAa,GAI5B,KAAK,UAAU,aAAa,EAAU,EAAe,EAAW,CAsBlE,MAAM,OAAO,EAA8B,CAGzC,IAAIC,EAA4B,KAC5B,GAAS,OAAO,GAAU,UACxB,SAAU,GAAS,OAAO,EAAM,MAAS,WAC3C,EAAa,EAAM,MAKvB,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,aACA,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CAAC,CAEF,GAAI,CAEF,IAAMC,EAA6C,CACjD,WAAa,GAAkB,CAC7B,KAAK,YAAY,CACf,QACD,CAAC,CACF,KAAK,SAAS,aAAa,EAAM,EAEnC,YACE,EACA,EACA,IACG,CACH,KAAK,qBAAqB,EAAU,EAAeC,EAAW,CAC9D,KAAK,SAAS,aAAa,EAAU,EAAeA,EAAW,EAEjE,iBACE,EACA,EACA,IACG,CACH,KAAK,UAAU,kBACb,EACA,EACA,EACD,CACD,KAAK,SAAS,kBAAkB,EAAW,EAAe,EAAW,EAEvE,UAAY,GAAwB,CAOlC,GAJA,KAAK,YAAY,CACf,SAAU,IACX,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,WACpB,EAAW,SAAW,KAExB,KAAK,yBAA2B,OAIpC,QAAU,GAAiB,CAMzB,GALA,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,GAErB,KAAK,yBAA2B,KAElC,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CAKb,GAJA,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CAEE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAClC,KAAK,yBACN,CACG,IACF,EAAW,OAAS,QACpB,EAAW,MAAY,MAAM,iBAAiB,EAEhD,KAAK,yBAA2B,KAElC,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAGD,KAAK,gBAAkB,MAAM,KAAK,aAChC,EACA,KAAK,QAAQ,WACb,EACD,OACM,EAAO,CAEd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAO3D,GANA,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CAGE,KAAK,yBAA0B,CACjC,IAAM,EAAa,KAAK,YAAY,IAAI,KAAK,yBAAyB,CAClE,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,GAErB,KAAK,yBAA2B,KAGlC,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAO3B,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAQhC,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAOhC,OAAc,CACZ,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAIzB,IAAK,IAAM,KAAc,KAAK,YAAY,QAAQ,CAC5C,EAAW,iBACb,EAAW,gBAAgB,OAAO,CAGtC,KAAK,YAAY,OAAO,CACxB,KAAK,yBAA2B,KAEhC,KAAK,MAAQ,CAAE,GAAGH,EAAc,CAChC,KAAK,UAAU,cAAc,KAAK,MAAM,CAO1C,mBAAkC,CAChC,GAAI,KAAK,YAAY,OAAS,EAC5B,OAGF,IAAM,EAAS,MAAM,KAAK,KAAK,YAAY,QAAQ,CAAC,CAG9C,EAAgB,EAAO,QAC1B,EAAK,IAAU,EAAM,EAAM,SAC5B,EACD,CACK,EAAc,KAAK,MAAM,EAAgB,EAAO,OAAO,CAGvD,EAAa,EAAO,QACvB,EAAK,IAAU,GAAO,EAAM,YAAc,GAC3C,EACD,CACK,EAAgB,EAAO,QAC1B,EAAK,IAAU,EAAM,EAAM,cAC5B,EACD,CAED,KAAK,YAAY,CACf,SAAU,EACV,gBACA,WAAY,EAAa,EAAI,EAAa,KAC3C,CAAC,CAsBJ,MAAM,YAAY,EAAgD,CAChE,IAAM,EAAe,OAAO,QAAQ,EAAO,CAE3C,GAAI,EAAa,SAAW,EAC1B,MAAU,MAAM,oCAAoC,CAItD,KAAK,YAAY,OAAO,CACxB,IAAK,GAAM,CAAC,EAAQ,KAAS,OAAO,QAAQ,EAAO,CAAE,CACnD,IAAM,EAAY,EAAgB,EAAK,CACvC,KAAK,YAAY,IAAI,EAAQ,CAC3B,SACA,KAAM,EACN,OAAQ,UACR,SAAU,EACV,cAAe,EACf,WACE,IAAc,QACd,GACA,OAAO,GAAS,UAChB,SAAU,GACV,OAAO,EAAK,MAAS,SACjB,EAAK,KACL,KACN,MAAO,KACP,gBAAiB,KAClB,CAAC,CAIJ,GAAI,EAAa,SAAW,EAAG,CAC7B,IAAM,EAAa,EAAa,GAChC,GAAI,CAAC,EACH,MAAU,MAAM,oCAAoC,CAEtD,GAAM,CAAC,EAAQ,GAAa,EAE5B,KAAK,yBAA2B,EAChC,MAAM,KAAK,OAAO,EAAoB,CACtC,OAIF,GAAI,CAAC,KAAK,mBACR,MAAU,MACR,2JAED,CAIH,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,MAAO,KACP,YAAa,GACb,gBAAiB,KACjB,gBAAiB,KACjB,YAAa,KACd,CAAC,CAEF,GAAI,CAEF,IAAME,EAA6C,CACjD,WAAa,GAAkB,CAC7B,KAAK,YAAY,CAAE,QAAO,CAAC,CAC3B,KAAK,SAAS,aAAa,EAAM,EAEnC,YACE,EACA,EACA,IACG,CAEH,KAAK,SAAS,aAAa,EAAU,EAAe,EAAW,EAEjE,UAAY,GAAwB,CAElC,KAAK,YAAY,CAAE,SAAU,IAAK,CAAC,EAErC,QAAU,GAAiB,CACzB,KAAK,YAAY,CAAE,OAAQ,QAAS,QAAO,CAAC,CAC5C,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CACb,KAAK,YAAY,CAAE,OAAQ,UAAW,CAAC,CACvC,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAmCD,KAAK,gBAAkB,MAAM,KAAK,mBAChC,EACA,KAAK,QAAQ,WACb,EAnC+C,CAC/C,iBAAkB,EAAQ,EAAU,EAAe,IAAe,CAEhE,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,YACpB,EAAW,SAAW,EACtB,EAAW,cAAgB,EAC3B,EAAW,WAAa,GAI1B,KAAK,mBAAmB,EAE1B,gBAAkB,GAAW,CAC3B,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,WACpB,EAAW,SAAW,KAExB,KAAK,mBAAmB,EAE1B,cAAe,EAAQ,IAAU,CAC/B,IAAM,EAAa,KAAK,YAAY,IAAI,EAAO,CAC3C,IACF,EAAW,OAAS,QACpB,EAAW,MAAQ,IAGxB,CAQA,OACM,EAAO,CACd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAC3D,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CACF,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAQ3B,gBAA2D,CACzD,OAAO,KAAK,YAMd,SAAgB,CACd,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAIzB,IAAK,IAAM,KAAc,KAAK,YAAY,QAAQ,CAC5C,EAAW,iBACb,EAAW,gBAAgB,OAAO,CAGtC,KAAK,YAAY,OAAO,CACxB,KAAK,yBAA2B,OCx3BpC,MAAME,GAA4B,CAChC,OAAQ,OACR,SAAU,EACV,cAAe,EACf,WAAY,KACZ,MAAO,KACP,OAAQ,KACT,CAsBD,IAAa,GAAb,KAGE,CAaA,YACE,EACA,EACA,EACA,CAHiB,KAAA,SAAA,EACA,KAAA,UAAA,EACA,KAAA,QAAA,uBAdqC,oBACrB,mBACD,KAchC,KAAK,MAAQ,CAAE,GAAG,GAAc,CAMlC,UAAwB,CACtB,MAAO,CAAE,GAAG,KAAK,MAAO,CAM1B,aAAuB,CACrB,OAAO,KAAK,MAAM,SAAW,YAM/B,UAAoB,CAClB,OACG,KAAK,MAAM,SAAW,SAAW,KAAK,MAAM,SAAW,YACxD,KAAK,YAAc,KAOvB,YAAoB,EAAoC,CACtD,KAAK,MAAQ,CAAE,GAAG,KAAK,MAAO,GAAG,EAAQ,CACzC,KAAK,UAAU,cAAc,KAAK,MAAM,CAQ1C,MAAM,OAAO,EAA8B,CAEzC,IAAII,EAA4B,KAC5B,GAAS,OAAO,GAAU,UACxB,SAAU,GAAS,OAAO,EAAM,MAAS,WAC3C,EAAa,EAAM,MAKvB,KAAK,YAAY,CACf,OAAQ,YACR,SAAU,EACV,cAAe,EACf,aACA,MAAO,KACP,OAAQ,KACT,CAAC,CAEF,KAAK,UAAY,EAEjB,GAAI,CAEF,IAAM,EAAgB,CACpB,GAAG,KAAK,QACR,YACE,EACA,EACA,IACG,CAEH,AACE,KAAK,WAAW,EAGlB,IAAM,EAAkB,EACpB,KAAK,MAAO,EAAgB,EAAS,IAAI,CACzC,EAEJ,KAAK,YAAY,CACf,SAAU,EACV,gBACA,WAAY,EACb,CAAC,CAEF,KAAK,UAAU,aAAa,EAAU,EAAe,EAAM,CAC3D,KAAK,SAAS,aAAa,EAAU,EAAe,EAAM,EAE5D,iBACE,EACA,EACA,IACG,CACH,KAAK,UAAU,kBACb,EACA,EACA,EACD,CACD,KAAK,SAAS,kBAAkB,EAAW,EAAe,EAAW,EAEvE,UAAY,GAAuB,CACjC,KAAK,YAAY,CACf,OAAQ,UACR,SACA,SAAU,IACV,cAAe,EAAO,MAAQ,EAC9B,WAAY,EAAO,MAAQ,KAC5B,CAAC,CAEF,KAAK,UAAU,YAAY,EAAO,CAClC,KAAK,SAAS,YAAY,EAAO,CACjC,KAAK,gBAAkB,MAEzB,QAAU,GAAiB,CACzB,KAAK,YAAY,CACf,OAAQ,QACR,QACD,CAAC,CAEF,KAAK,UAAU,UAAU,EAAM,CAC/B,KAAK,SAAS,UAAU,EAAM,CAC9B,KAAK,gBAAkB,MAEzB,YAAe,CACb,KAAK,YAAY,CACf,OAAQ,UACT,CAAC,CAEF,KAAK,UAAU,WAAW,CAC1B,KAAK,SAAS,WAAW,CACzB,KAAK,gBAAkB,MAEzB,cAAe,KAAK,SAAS,cAC9B,CAGD,KAAK,gBAAkB,MAAM,KAAK,SAAS,EAAO,EAAc,OACzD,EAAO,CAEd,IAAM,EACJ,aAAiB,MAAQ,EAAY,MAAM,OAAO,EAAM,CAAC,CAC3D,KAAK,YAAY,CACf,OAAQ,QACR,MAAO,EACR,CAAC,CAEF,KAAK,UAAU,UAAU,EAAY,CACrC,KAAK,SAAS,UAAU,EAAY,CACpC,KAAK,gBAAkB,MAO3B,OAAc,CACR,KAAK,iBACP,KAAK,gBAAgB,OAAO,CAQhC,OAAc,CACZ,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAGzB,KAAK,MAAQ,CAAE,GAAG,GAAc,CAChC,KAAK,UAAY,KACjB,KAAK,SAAW,KAChB,KAAK,UAAU,cAAc,KAAK,MAAM,CAM1C,OAAc,CACR,KAAK,UAAU,EAAI,KAAK,YAAc,MACxC,KAAK,OAAO,KAAK,UAAU,CAO/B,SAAgB,CACd,AAEE,KAAK,mBADL,KAAK,gBAAgB,OAAO,CACL,MAEzB,KAAK,SAAW,OCnVpB,MAAa,EAAuB,EAAE,OAAO,CAC3C,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,SAAU,EAAE,OACV,EAAE,QAAQ,CACV,EAAE,MAAM,CAAC,EAAE,QAAQ,CAAE,EAAE,QAAQ,CAAE,EAAE,SAAS,CAAC,CAAC,CAC/C,CACD,aAAc,EAAE,QAAQ,CACxB,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,mBAAoB,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC,UAAU,CAClD,iBAAkB,EAAE,QAAQ,CAC7B,CAAC,CCuIF,SAAgB,GACd,EACe,CACf,MAAO,CACL,eAAgB,SAAY,CAC1B,IAAM,EAAQ,MAAM,EAAe,KAAK,eAAe,CACvD,OAAO,OAAO,OAAO,EAAM,CAAC,IAAK,GAC/B,EAAqB,MAAM,KAAK,MAAM,EAAK,CAAC,CAC7C,EAEH,yBAA0B,KAAO,IAAwB,CACvD,IAAM,EAAQ,MAAM,EAAe,KAAK,eAAe,IAAc,CACrE,OAAO,OAAO,OAAO,EAAM,CAAC,IAAK,GAC/B,EAAqB,MAAM,KAAK,MAAM,EAAK,CAAC,CAC7C,EAEH,aAAe,GACb,EAAe,WAAW,EAAiB,CAC7C,UAAW,MACT,EACA,EACA,CAAE,gBACC,CAEH,IAAM,EAAmB,eAAe,EAAY,IADxC,EAAW,UAAU,GAGjC,OADA,MAAM,EAAe,QAAQ,EAAkB,KAAK,UAAU,EAAO,CAAC,CAC/D,GAEV,CClLH,SAAgB,IAA+C,CAC7D,IAAM,EAAU,IAAI,IAEpB,MAAO,CACL,MAAM,QAAQ,EAAqC,CACjD,OAAO,EAAQ,IAAI,EAAI,EAAI,MAG7B,MAAM,QAAQ,EAAa,EAA8B,CACvD,EAAQ,IAAI,EAAK,EAAM,EAGzB,MAAM,WAAW,EAA4B,CAC3C,EAAQ,OAAO,EAAI,EAGrB,MAAM,SAA2C,CAC/C,OAAO,OAAO,YAAY,EAAQ,SAAS,CAAC,EAG9C,MAAM,KAAK,EAAiD,CAC1D,OAAO,OAAO,YACZ,MAAM,KAAK,EAAQ,SAAS,CAAC,CAAC,QAAQ,CAAC,KAAS,EAAI,WAAW,EAAO,CAAC,CACxE,EAEJ"}