@uploadista/core 0.0.20-beta.7 → 0.0.20-beta.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/flow/index.cjs +1 -1
  2. package/dist/flow/index.d.cts +2 -2
  3. package/dist/flow/index.d.mts +2 -2
  4. package/dist/flow/index.mjs +1 -1
  5. package/dist/flow-BHVkk_6W.cjs +1 -0
  6. package/dist/{flow-_OmguvZm.mjs → flow-DlhHOlMk.mjs} +2 -2
  7. package/dist/flow-DlhHOlMk.mjs.map +1 -0
  8. package/dist/index-B9V5SSxl.d.mts.map +1 -1
  9. package/dist/{index-DjhpgW08.d.cts → index-DMqaf28W.d.cts} +1483 -1483
  10. package/dist/index-DMqaf28W.d.cts.map +1 -0
  11. package/dist/{index-BY620LiC.d.mts → index-RuQUCROH.d.mts} +1483 -1483
  12. package/dist/index-RuQUCROH.d.mts.map +1 -0
  13. package/dist/index.cjs +1 -1
  14. package/dist/index.d.cts +2 -2
  15. package/dist/index.d.mts +2 -2
  16. package/dist/index.mjs +1 -1
  17. package/dist/testing/index.cjs +2 -2
  18. package/dist/testing/index.d.cts +9 -9
  19. package/dist/testing/index.d.cts.map +1 -1
  20. package/dist/testing/index.d.mts +9 -9
  21. package/dist/testing/index.d.mts.map +1 -1
  22. package/dist/testing/index.mjs +2 -2
  23. package/dist/testing/index.mjs.map +1 -1
  24. package/dist/types/index.d.cts +1 -1
  25. package/dist/types/index.d.mts +1 -1
  26. package/dist/upload/index.cjs +1 -1
  27. package/dist/upload/index.d.cts +2 -2
  28. package/dist/upload/index.d.mts +2 -2
  29. package/dist/upload/index.mjs +1 -1
  30. package/dist/{upload-tLC7uR9U.mjs → upload-C-C7hn1-.mjs} +2 -2
  31. package/dist/{upload-tLC7uR9U.mjs.map → upload-C-C7hn1-.mjs.map} +1 -1
  32. package/dist/{upload-BHDuuJ80.cjs → upload-DWBlRXHh.cjs} +1 -1
  33. package/package.json +7 -7
  34. package/src/flow/{flow-server.ts → flow-engine.ts} +106 -106
  35. package/src/flow/index.ts +10 -10
  36. package/src/flow/nodes/input-node.ts +5 -5
  37. package/src/flow/nodes/transform-node.ts +11 -14
  38. package/src/flow/typed-flow.ts +22 -20
  39. package/src/testing/index.ts +1 -1
  40. package/src/testing/{mock-upload-server.ts → mock-upload-engine.ts} +10 -10
  41. package/src/upload/index.ts +1 -1
  42. package/src/upload/{upload-server.ts → upload-engine.ts} +44 -40
  43. package/dist/flow-Cv8vCBQ2.cjs +0 -1
  44. package/dist/flow-_OmguvZm.mjs.map +0 -1
  45. package/dist/index-BY620LiC.d.mts.map +0 -1
  46. package/dist/index-DjhpgW08.d.cts.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"upload-tLC7uR9U.mjs","names":["captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n>","parsedMetadata: Record<string, string>","metadataObject: Record<string, string>","file: UploadFile","onAbort","initialUpload: UploadFile","completedUpload: UploadFile","chunks: Uint8Array[]","capabilities: DataStoreCapabilities","validateUploadStrategy: (strategy: UploadStrategy) => boolean","reasoning: string[]","warnings: string[]","strategy: UploadStrategy","errors: string[]"],"sources":["../src/upload/mime.ts","../src/upload/create-upload.ts","../src/upload/convert-to-stream.ts","../src/upload/write-to-store.ts","../src/upload/upload-chunk.ts","../src/upload/upload-url.ts","../src/upload/upload-server.ts","../src/upload/upload-strategy-negotiator.ts"],"sourcesContent":["/**\n * Helper to check if buffer matches a byte pattern at given offset\n */\nfunction checkBytes(\n buffer: Uint8Array,\n pattern: number[],\n offset = 0,\n): boolean {\n if (buffer.length < offset + pattern.length) return false;\n return pattern.every((byte, i) => buffer[offset + i] === byte);\n}\n\n/**\n * Helper to check if buffer matches a string pattern at given offset\n */\nfunction checkString(buffer: Uint8Array, str: string, offset = 0): boolean {\n if (buffer.length < offset + str.length) return false;\n for (let i = 0; i < str.length; i++) {\n if (buffer[offset + i] !== str.charCodeAt(i)) return false;\n }\n return true;\n}\n\n/**\n * Detect MIME type from buffer using magic bytes (file signatures).\n * Supports a wide range of common file types including images, videos, audio, documents, and archives.\n *\n * @param buffer - File content as Uint8Array\n * @param filename - Optional filename for extension-based fallback\n * @returns Detected MIME type or \"application/octet-stream\" if unknown\n */\nexport const detectMimeType = (\n buffer: Uint8Array,\n filename?: string,\n): string => {\n if (buffer.length === 0) {\n return \"application/octet-stream\";\n }\n\n // ===== IMAGES =====\n\n // PNG: 89 50 4E 47 0D 0A 1A 0A\n if (checkBytes(buffer, [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])) {\n return \"image/png\";\n }\n\n // JPEG: FF D8 FF\n if (checkBytes(buffer, [0xff, 0xd8, 0xff])) {\n return \"image/jpeg\";\n }\n\n // GIF87a or GIF89a\n if (checkString(buffer, \"GIF87a\") || checkString(buffer, \"GIF89a\")) {\n return \"image/gif\";\n }\n\n // WebP: RIFF....WEBP\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WEBP\", 8)\n ) {\n return \"image/webp\";\n }\n\n // AVIF: ....ftypavif or ....ftypavis\n if (\n buffer.length >= 12 &&\n checkBytes(buffer, [0x00, 0x00, 0x00], 0) &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"avif\", 8) || checkString(buffer, \"avis\", 8))\n ) {\n return \"image/avif\";\n }\n\n // HEIC/HEIF: ....ftypheic or ....ftypheif or ....ftypmif1\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"heic\", 8) ||\n checkString(buffer, \"heif\", 8) ||\n checkString(buffer, \"mif1\", 8))\n ) {\n return \"image/heic\";\n }\n\n // BMP: 42 4D\n if (checkBytes(buffer, [0x42, 0x4d])) {\n return \"image/bmp\";\n }\n\n // TIFF (little-endian): 49 49 2A 00\n if (checkBytes(buffer, [0x49, 0x49, 0x2a, 0x00])) {\n return \"image/tiff\";\n }\n\n // TIFF (big-endian): 4D 4D 00 2A\n if (checkBytes(buffer, [0x4d, 0x4d, 0x00, 0x2a])) {\n return \"image/tiff\";\n }\n\n // ICO: 00 00 01 00\n if (checkBytes(buffer, [0x00, 0x00, 0x01, 0x00])) {\n return \"image/x-icon\";\n }\n\n // SVG (XML-based, check for <svg or <?xml)\n if (buffer.length >= 5) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n if (\n text.includes(\"<svg\") ||\n (text.includes(\"<?xml\") && text.includes(\"<svg\"))\n ) {\n return \"image/svg+xml\";\n }\n }\n\n // ===== VIDEOS =====\n\n // MP4/M4V/M4A: ....ftyp\n if (buffer.length >= 12 && checkString(buffer, \"ftyp\", 4)) {\n const subtype = new TextDecoder().decode(buffer.slice(8, 12));\n if (\n subtype.startsWith(\"mp4\") ||\n subtype.startsWith(\"M4\") ||\n subtype.startsWith(\"isom\")\n ) {\n return \"video/mp4\";\n }\n }\n\n // WebM: 1A 45 DF A3\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3])) {\n return \"video/webm\";\n }\n\n // AVI: RIFF....AVI\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"AVI \", 8)\n ) {\n return \"video/x-msvideo\";\n }\n\n // MOV (QuickTime): ....moov or ....mdat or ....free\n if (\n buffer.length >= 8 &&\n (checkString(buffer, \"moov\", 4) ||\n checkString(buffer, \"mdat\", 4) ||\n checkString(buffer, \"free\", 4))\n ) {\n return \"video/quicktime\";\n }\n\n // MKV: 1A 45 DF A3 (same as WebM but check for Matroska)\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3]) && buffer.length >= 100) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, 100),\n );\n if (text.includes(\"matroska\")) {\n return \"video/x-matroska\";\n }\n }\n\n // ===== AUDIO =====\n\n // MP3: FF FB or FF F3 or FF F2 or ID3\n if (\n checkBytes(buffer, [0xff, 0xfb]) ||\n checkBytes(buffer, [0xff, 0xf3]) ||\n checkBytes(buffer, [0xff, 0xf2]) ||\n checkString(buffer, \"ID3\")\n ) {\n return \"audio/mpeg\";\n }\n\n // WAV: RIFF....WAVE\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WAVE\", 8)\n ) {\n return \"audio/wav\";\n }\n\n // FLAC: 66 4C 61 43 (fLaC)\n if (checkString(buffer, \"fLaC\")) {\n return \"audio/flac\";\n }\n\n // OGG: 4F 67 67 53 (OggS)\n if (checkString(buffer, \"OggS\")) {\n return \"audio/ogg\";\n }\n\n // M4A: ....ftypM4A\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n checkString(buffer, \"M4A\", 8)\n ) {\n return \"audio/mp4\";\n }\n\n // ===== DOCUMENTS =====\n\n // PDF: 25 50 44 46 (%PDF)\n if (checkString(buffer, \"%PDF\")) {\n return \"application/pdf\";\n }\n\n // ===== ARCHIVES =====\n\n // ZIP: 50 4B 03 04 or 50 4B 05 06 (empty archive) or 50 4B 07 08 (spanned archive)\n if (\n checkBytes(buffer, [0x50, 0x4b, 0x03, 0x04]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x05, 0x06]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x07, 0x08])\n ) {\n // Could be ZIP, DOCX, XLSX, PPTX, JAR, APK, etc.\n // Check for Office formats\n if (buffer.length >= 1024) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(buffer);\n if (text.includes(\"word/\"))\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n if (text.includes(\"xl/\"))\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n if (text.includes(\"ppt/\"))\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n }\n return \"application/zip\";\n }\n\n // RAR: 52 61 72 21 1A 07 (Rar!)\n if (checkBytes(buffer, [0x52, 0x61, 0x72, 0x21, 0x1a, 0x07])) {\n return \"application/x-rar-compressed\";\n }\n\n // 7Z: 37 7A BC AF 27 1C\n if (checkBytes(buffer, [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c])) {\n return \"application/x-7z-compressed\";\n }\n\n // GZIP: 1F 8B\n if (checkBytes(buffer, [0x1f, 0x8b])) {\n return \"application/gzip\";\n }\n\n // TAR (ustar): \"ustar\" at offset 257\n if (buffer.length >= 262 && checkString(buffer, \"ustar\", 257)) {\n return \"application/x-tar\";\n }\n\n // ===== FONTS =====\n\n // WOFF: 77 4F 46 46 (wOFF)\n if (checkString(buffer, \"wOFF\")) {\n return \"font/woff\";\n }\n\n // WOFF2: 77 4F 46 32 (wOF2)\n if (checkString(buffer, \"wOF2\")) {\n return \"font/woff2\";\n }\n\n // TTF: 00 01 00 00 00\n if (checkBytes(buffer, [0x00, 0x01, 0x00, 0x00, 0x00])) {\n return \"font/ttf\";\n }\n\n // OTF: 4F 54 54 4F (OTTO)\n if (checkString(buffer, \"OTTO\")) {\n return \"font/otf\";\n }\n\n // ===== TEXT =====\n\n // JSON (basic check for { or [)\n if (buffer.length >= 1) {\n const firstByte = buffer[0];\n if (firstByte === 0x7b || firstByte === 0x5b) {\n // { or [\n try {\n const text = new TextDecoder(\"utf-8\").decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n JSON.parse(text.trim());\n return \"application/json\";\n } catch {\n // Not valid JSON\n }\n }\n }\n\n // Fallback to extension-based detection\n if (filename) {\n const ext = filename.split(\".\").pop()?.toLowerCase();\n switch (ext) {\n // Images\n case \"jpg\":\n case \"jpeg\":\n return \"image/jpeg\";\n case \"png\":\n return \"image/png\";\n case \"gif\":\n return \"image/gif\";\n case \"webp\":\n return \"image/webp\";\n case \"avif\":\n return \"image/avif\";\n case \"heic\":\n case \"heif\":\n return \"image/heic\";\n case \"bmp\":\n return \"image/bmp\";\n case \"tiff\":\n case \"tif\":\n return \"image/tiff\";\n case \"ico\":\n return \"image/x-icon\";\n case \"svg\":\n return \"image/svg+xml\";\n\n // Videos\n case \"mp4\":\n case \"m4v\":\n return \"video/mp4\";\n case \"webm\":\n return \"video/webm\";\n case \"avi\":\n return \"video/x-msvideo\";\n case \"mov\":\n return \"video/quicktime\";\n case \"mkv\":\n return \"video/x-matroska\";\n\n // Audio\n case \"mp3\":\n return \"audio/mpeg\";\n case \"wav\":\n return \"audio/wav\";\n case \"flac\":\n return \"audio/flac\";\n case \"ogg\":\n return \"audio/ogg\";\n case \"m4a\":\n return \"audio/mp4\";\n\n // Documents\n case \"pdf\":\n return \"application/pdf\";\n case \"docx\":\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n case \"xlsx\":\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n case \"pptx\":\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n\n // Archives\n case \"zip\":\n return \"application/zip\";\n case \"rar\":\n return \"application/x-rar-compressed\";\n case \"7z\":\n return \"application/x-7z-compressed\";\n case \"gz\":\n case \"gzip\":\n return \"application/gzip\";\n case \"tar\":\n return \"application/x-tar\";\n\n // Fonts\n case \"woff\":\n return \"font/woff\";\n case \"woff2\":\n return \"font/woff2\";\n case \"ttf\":\n return \"font/ttf\";\n case \"otf\":\n return \"font/otf\";\n\n // Text\n case \"txt\":\n return \"text/plain\";\n case \"json\":\n return \"application/json\";\n case \"xml\":\n return \"application/xml\";\n case \"html\":\n case \"htm\":\n return \"text/html\";\n case \"css\":\n return \"text/css\";\n case \"js\":\n return \"application/javascript\";\n case \"csv\":\n return \"text/csv\";\n\n default:\n return \"application/octet-stream\";\n }\n }\n\n return \"application/octet-stream\";\n};\n\n/**\n * Compare two MIME types with lenient matching.\n * Matches on major type (e.g., \"image/*\") to allow for minor variations.\n *\n * @param declared - MIME type provided by client\n * @param detected - MIME type detected from file content\n * @returns true if MIME types are compatible\n *\n * @example\n * compareMimeTypes(\"image/png\", \"image/apng\") // true\n * compareMimeTypes(\"image/jpeg\", \"image/png\") // true (both images)\n * compareMimeTypes(\"image/png\", \"application/pdf\") // false\n */\nexport function compareMimeTypes(declared: string, detected: string): boolean {\n // Exact match\n if (declared === detected) {\n return true;\n }\n\n // Extract major types (e.g., \"image\" from \"image/png\")\n const declaredMajor = declared.split(\"/\")[0];\n const detectedMajor = detected.split(\"/\")[0];\n\n // Compare major types for lenient matching\n return declaredMajor === detectedMajor;\n}\n","import { Effect, Metric, MetricBoundaries, Option } from \"effect\";\nimport {\n type EventEmitter,\n type InputFile,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport type { GenerateIdShape } from \"../utils/generate-id\";\n\n/**\n * Captures the current Effect trace context for distributed tracing.\n *\n * Uses Effect's `currentSpan` to get the active span, which is more reliable\n * than OpenTelemetry's `trace.getActiveSpan()` when using @effect/opentelemetry\n * because Effect manages its own span context that may not be synchronized\n * with OpenTelemetry's global context.\n *\n * @returns Effect that yields TraceContext if there's an active span, undefined otherwise\n */\nconst captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n> = Effect.gen(function* () {\n const spanOption = yield* Effect.currentSpan.pipe(Effect.option);\n return Option.match(spanOption, {\n onNone: () => undefined,\n onSome: (span) => ({\n traceId: span.traceId,\n spanId: span.spanId,\n traceFlags: span.sampled ? 1 : 0,\n }),\n });\n});\n\n/**\n * Creates a new upload and initializes it in the storage system.\n *\n * This function handles the initial upload creation process including:\n * - Generating a unique upload ID\n * - Routing to appropriate data store based on storage ID\n * - Creating the upload record in the data store\n * - Storing upload metadata in KV store\n * - Emitting upload started events\n * - Parsing and validating metadata\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for upload creation, file sizes, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param inputFile - Input file configuration including storage, size, type, etc.\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for upload lifecycle events\n * @param generateId - ID generator for creating unique upload identifiers\n * @returns Effect that yields the created UploadFile\n *\n * @example\n * ```typescript\n * // Create a new upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * metadata: JSON.stringify({ category: \"photos\" })\n * };\n *\n * const createEffect = createUpload(\n * inputFile,\n * \"client-123\",\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter,\n * generateId\n * }\n * );\n *\n * // Run with dependencies\n * const upload = await Effect.runPromise(\n * createEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer),\n * Effect.provide(generateIdLayer)\n * )\n * );\n * ```\n */\nexport const createUpload = (\n inputFile: InputFile,\n clientId: string | null,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId: GenerateIdShape;\n },\n) =>\n Effect.gen(function* () {\n // Capture the parent \"upload\" span's trace context FIRST\n // This allows subsequent chunk uploads to be siblings of upload-create\n // under the same parent \"upload\" span\n const traceContext = yield* captureTraceContextEffect;\n const creationDate = new Date().toISOString();\n\n // Now run the actual upload creation inside a child span\n const fileCreated = yield* Effect.gen(function* () {\n // Get datastore using Effect\n const dataStore = yield* dataStoreService.getDataStore(\n inputFile.storageId,\n clientId,\n );\n\n const id = yield* generateId.generateId();\n const { size, type, fileName, lastModified, metadata, flow } = inputFile;\n\n let parsedMetadata: Record<string, string> = {};\n if (metadata) {\n try {\n parsedMetadata = JSON.parse(metadata) as Record<string, string>;\n } catch {\n parsedMetadata = {};\n }\n }\n\n const metadataObject: Record<string, string> = {\n ...parsedMetadata,\n type,\n fileName: fileName ?? \"\",\n };\n if (lastModified) {\n metadataObject.lastModified = lastModified.toString();\n }\n\n const file: UploadFile = {\n id,\n size,\n metadata: metadataObject,\n offset: 0,\n creationDate,\n storage: {\n id: inputFile.storageId,\n type,\n path: \"\",\n bucket: dataStore.bucket,\n },\n flow,\n traceContext,\n };\n\n // Create file using Effect\n const created = yield* dataStore.create(file);\n\n // Store in KV store\n yield* kvStore.set(id, created);\n\n // Emit event\n yield* eventEmitter.emit(id, {\n type: UploadEventType.UPLOAD_STARTED,\n data: created,\n flow: created.flow,\n });\n\n return created;\n }).pipe(\n // upload-create is a CHILD span of the parent \"upload\" span\n Effect.withSpan(\"upload-create\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n );\n\n return fileCreated;\n }).pipe(\n // Parent \"upload\" span wraps the entire upload lifecycle\n // upload-create and upload-chunk will be children of this span\n Effect.withSpan(\"upload\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n // Track upload creation metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment upload created counter\n yield* Metric.increment(\n Metric.counter(\"upload_created_total\", {\n description: \"Total number of uploads created\",\n }),\n );\n\n // Record file size\n if (file.size) {\n const fileSizeHistogram = Metric.histogram(\n \"upload_file_size_bytes\",\n MetricBoundaries.exponential({\n start: 1024,\n factor: 2,\n count: 25,\n }),\n );\n yield* Metric.update(fileSizeHistogram, file.size);\n }\n\n // Track active uploads gauge\n const activeUploadsGauge = Metric.gauge(\"active_uploads\");\n yield* Metric.increment(activeUploadsGauge);\n }),\n ),\n // Add structured logging\n Effect.tap((file) =>\n Effect.logInfo(\"Upload created\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Log error\n yield* Effect.logError(\"Upload creation failed\").pipe(\n Effect.annotateLogs({\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.storage_id\": inputFile.storageId,\n error: String(error),\n }),\n );\n\n // Track failed upload metric\n yield* Metric.increment(\n Metric.counter(\"upload_failed_total\", {\n description: \"Total number of uploads that failed\",\n }),\n );\n }),\n ),\n );\n","import { Stream } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Converts a ReadableStream to an Effect Stream.\n *\n * This utility function wraps a ReadableStream in an Effect Stream, providing\n * proper error handling and integration with the Effect ecosystem. It's used\n * throughout the upload system to convert raw streams into Effect-compatible\n * streams for processing.\n *\n * The function handles:\n * - Stream conversion with proper error mapping\n * - UploadistaError creation for stream errors\n * - Integration with Effect Stream processing\n *\n * @param data - The ReadableStream to convert\n * @returns Effect Stream that can be processed with Effect operations\n *\n * @example\n * ```typescript\n * // Convert a file stream to Effect Stream\n * const fileStream = new ReadableStream(...);\n * const effectStream = convertToStream(fileStream);\n *\n * // Process with Effect operations\n * const processedStream = effectStream.pipe(\n * Stream.map((chunk) => processChunk(chunk)),\n * Stream.filter((chunk) => chunk.length > 0)\n * );\n *\n * // Run the stream\n * await Stream.runForEach(processedStream, (chunk) =>\n * Effect.logInfo(`Processed chunk: ${chunk.length} bytes`)\n * );\n * ```\n */\nexport function convertToStream<T>(data: ReadableStream<T>) {\n return Stream.fromReadableStream(\n () => data,\n (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n }),\n );\n}\n","import { Effect, Ref } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport { StreamLimiterEffect } from \"../streams/stream-limiter\";\nimport type { DataStore, UploadEvent, UploadFile } from \"../types\";\nimport { type EventEmitter, UploadEventType } from \"../types\";\nimport { convertToStream } from \"./convert-to-stream\";\n\n/**\n * Configuration options for writing data to a data store.\n *\n * @property data - The stream of data to write\n * @property upload - Upload file metadata\n * @property dataStore - Target data store for writing\n * @property maxFileSize - Maximum allowed file size in bytes\n * @property controller - AbortController for cancellation\n * @property eventEmitter - Event emitter for progress tracking\n * @property uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n */\ntype WriteToStoreOptions = {\n data: ReadableStream<Uint8Array>;\n upload: UploadFile;\n dataStore: DataStore<UploadFile>;\n maxFileSize: number;\n controller: AbortController;\n eventEmitter: EventEmitter<UploadEvent>;\n uploadProgressInterval?: number;\n};\n\n/**\n * Writes data stream to a data store with progress tracking and size limits.\n *\n * This function handles the core data writing logic including:\n * - Stream conversion and processing\n * - File size validation and limiting\n * - Progress tracking with throttled events\n * - Abort signal handling for cancellation\n * - Error handling and cleanup\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Structured logging for debugging and monitoring\n * - Progress event emission with throttling\n * - Error handling with proper UploadistaError types\n *\n * @param data - The stream of data to write to storage\n * @param upload - Upload file metadata containing ID, offset, etc.\n * @param dataStore - Target data store for writing the data\n * @param maxFileSize - Maximum allowed file size in bytes\n * @param controller - AbortController for handling cancellation\n * @param eventEmitter - Event emitter for progress tracking\n * @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n * @returns Effect that yields the number of bytes written\n *\n * @example\n * ```typescript\n * // Write data to store with progress tracking\n * const writeEffect = writeToStore({\n * data: fileStream,\n * upload: uploadMetadata,\n * dataStore: s3DataStore,\n * maxFileSize: 100_000_000, // 100MB\n * controller: abortController,\n * eventEmitter: progressEmitter,\n * uploadProgressInterval: 500 // Emit progress every 500ms\n * });\n *\n * // Run with error handling\n * const bytesWritten = await Effect.runPromise(\n * writeEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to write to store\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport function writeToStore({\n data,\n upload,\n dataStore,\n maxFileSize,\n controller,\n eventEmitter,\n uploadProgressInterval = 200,\n}: WriteToStoreOptions) {\n return Effect.gen(function* () {\n const stream = convertToStream(data);\n // Check if already aborted\n if (controller.signal.aborted) {\n return yield* Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n\n // Create an AbortController to manage the stream pipeline\n const abortController = new AbortController();\n const { signal } = abortController;\n\n // Set up abort handling\n const onAbort = () => {\n // stream.cancel();\n abortController.abort();\n };\n\n controller.signal.addEventListener(\"abort\", onAbort, { once: true });\n\n return yield* Effect.acquireUseRelease(\n Effect.sync(() => ({ signal, onAbort })),\n ({ signal: _signal }) =>\n Effect.gen(function* () {\n // Create a ref to track the last progress emission time for throttling\n const lastEmitTime = yield* Ref.make(0);\n\n // Create the stream limiter\n const limiter = StreamLimiterEffect.limit({\n maxSize: maxFileSize,\n });\n\n // Pipe the data through the limiter\n const limitedStream = limiter(stream);\n\n // Write to the data store with progress tracking\n const offset = yield* dataStore.write(\n {\n stream: limitedStream,\n file_id: upload.id,\n offset: upload.offset,\n },\n {\n onProgress: (newOffset: number) => {\n // Simple throttling using timestamp check\n const now = Date.now();\n Ref.get(lastEmitTime)\n .pipe(\n Effect.flatMap((lastTime) => {\n if (now - lastTime >= uploadProgressInterval) {\n return Effect.gen(function* () {\n yield* Ref.set(lastEmitTime, now);\n yield* eventEmitter.emit(upload.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: upload.id,\n progress: newOffset,\n total: upload.size ?? 0,\n },\n flow: upload.flow,\n });\n });\n }\n return Effect.void;\n }),\n Effect.runPromise,\n )\n .catch(() => {\n // Ignore errors during progress emission\n });\n },\n },\n );\n\n return offset;\n }).pipe(\n Effect.catchAll((error) => {\n if (error instanceof Error && error.name === \"AbortError\") {\n return Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n if (error instanceof UploadistaError) {\n return Effect.fail(error);\n }\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error }),\n );\n }),\n ),\n ({ onAbort }) =>\n Effect.sync(() => {\n controller.signal.removeEventListener(\"abort\", onAbort);\n }),\n );\n }).pipe(\n // Add tracing span for write operation\n Effect.withSpan(\"upload-write-to-store\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n \"upload.max_file_size\": maxFileSize.toString(),\n \"upload.file_size\": upload.size?.toString() ?? \"0\",\n },\n }),\n // Add structured logging for write operation\n Effect.tap((offset) =>\n Effect.logDebug(\"Data written to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"write.offset\": offset.toString(),\n \"write.bytes_written\": (offset - upload.offset).toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to write to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n error: error instanceof UploadistaError ? error.code : String(error),\n }),\n ),\n ),\n );\n}\n","import { Effect, Metric, MetricBoundaries, Tracer } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport {\n type DataStore,\n type EventEmitter,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport { computeChecksum } from \"../utils/checksum\";\nimport { compareMimeTypes, detectMimeType } from \"./mime\";\nimport { writeToStore } from \"./write-to-store\";\n\n/**\n * Creates an ExternalSpan from stored trace context.\n * Used for linking chunk uploads to the original upload trace.\n */\nfunction createExternalSpan(traceContext: UploadFileTraceContext) {\n return Tracer.externalSpan({\n traceId: traceContext.traceId,\n spanId: traceContext.spanId,\n sampled: traceContext.traceFlags === 1,\n });\n}\n\n/**\n * Creates an \"upload-complete\" span Effect that captures the full upload duration.\n * This span is a sibling of upload-create and upload-chunk under the parent \"upload\" span.\n *\n * Note: The span's visual duration in tracing UIs will be short (instant), but the\n * actual upload duration is captured in the \"upload.total_duration_ms\" attribute.\n *\n * @param file - The completed upload file\n * @param parentSpan - The parent span to link to\n * @returns Effect that creates and completes the span\n */\nconst createUploadCompleteSpanEffect = (\n file: UploadFile,\n parentSpan: Tracer.ExternalSpan,\n): Effect.Effect<void> => {\n const creationTime = new Date(file.creationDate as string).getTime();\n const totalDurationMs = Date.now() - creationTime;\n\n return Effect.void.pipe(\n Effect.withSpan(\"upload-complete\", {\n attributes: {\n \"upload.id\": file.id,\n \"upload.size\": file.size ?? 0,\n \"upload.total_duration_ms\": totalDurationMs,\n \"upload.storage_id\": file.storage.id,\n \"upload.file_name\": file.metadata?.fileName ?? \"unknown\",\n \"upload.creation_date\": file.creationDate as string,\n \"upload.completion_date\": new Date().toISOString(),\n },\n parent: parentSpan,\n }),\n );\n};\n\n/**\n * Uploads a chunk of data for an existing upload.\n *\n * This function handles the core chunk upload logic including:\n * - Retrieving upload metadata from KV store\n * - Routing to appropriate data store based on storage ID\n * - Writing chunk data to storage with progress tracking\n * - Updating upload offset and metadata\n * - Emitting progress events\n * - Validating upload completion (checksum, MIME type)\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for chunk size, throughput, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param uploadId - Unique identifier for the upload\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param chunk - ReadableStream containing the chunk data to upload\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for progress and validation events\n * @returns Effect that yields the updated UploadFile with new offset\n *\n * @example\n * ```typescript\n * // Upload a chunk for an existing upload\n * const uploadChunkEffect = uploadChunk(\n * \"upload-123\",\n * \"client-456\",\n * chunkStream,\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter\n * }\n * );\n *\n * // Run with dependencies\n * const result = await Effect.runPromise(\n * uploadChunkEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * )\n * );\n * ```\n */\nexport const uploadChunk = (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n },\n) =>\n Effect.gen(function* () {\n // Get file from KV store first to check for trace context\n const file = yield* kvStore.get(uploadId);\n\n // Create external span from stored trace context if available\n // This links chunk uploads to the original upload trace\n const parentSpan = file.traceContext\n ? createExternalSpan(file.traceContext)\n : undefined;\n\n // Core chunk processing logic\n const processChunk = Effect.gen(function* () {\n // Get datastore\n const dataStore = yield* dataStoreService.getDataStore(\n file.storage.id,\n clientId,\n );\n\n // Note: AbortController could be used for cancellation if needed\n\n // Write to store using writeToStore Effect\n const controller = new AbortController();\n\n const chunkSize = yield* writeToStore({\n dataStore,\n data: chunk,\n upload: file,\n maxFileSize: 100_000_000,\n controller,\n uploadProgressInterval: 200,\n eventEmitter,\n });\n\n file.offset = chunkSize;\n\n // Update KV store\n yield* kvStore.set(uploadId, file);\n\n // Emit progress event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: file.id,\n progress: file.offset,\n total: file.size ?? 0,\n },\n flow: file.flow,\n });\n\n // Check if upload is complete and run validation\n if (file.size && file.offset === file.size) {\n yield* validateUpload({\n file,\n dataStore,\n eventEmitter,\n });\n\n // Create \"upload-complete\" span that captures the full upload duration\n // This span shows the total time from upload creation to completion\n if (file.traceContext) {\n const completeParentSpan = createExternalSpan(file.traceContext);\n yield* createUploadCompleteSpanEffect(file, completeParentSpan);\n }\n }\n\n return file;\n }).pipe(\n // Add tracing span for chunk upload with parent from stored trace context\n Effect.withSpan(\"upload-chunk\", {\n attributes: {\n \"upload.id\": uploadId,\n \"chunk.upload_id\": uploadId,\n \"upload.has_trace_context\": file.traceContext ? \"true\" : \"false\",\n },\n parent: parentSpan,\n }),\n );\n\n return yield* processChunk;\n }).pipe(\n // Track chunk upload metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment chunk uploaded counter\n yield* Metric.increment(\n Metric.counter(\"chunk_uploaded_total\", {\n description: \"Total number of chunks uploaded\",\n }),\n );\n\n // Record chunk size\n const chunkSize = file.offset;\n const chunkSizeHistogram = Metric.histogram(\n \"chunk_size_bytes\",\n MetricBoundaries.linear({\n start: 262_144,\n width: 262_144,\n count: 20,\n }),\n );\n yield* Metric.update(chunkSizeHistogram, chunkSize);\n\n // Update throughput gauge\n if (file.size && file.size > 0) {\n const throughput = chunkSize; // bytes processed\n const throughputGauge = Metric.gauge(\n \"upload_throughput_bytes_per_second\",\n );\n yield* Metric.set(throughputGauge, throughput);\n }\n }),\n ),\n // Add structured logging for chunk progress\n Effect.tap((file) =>\n Effect.logDebug(\"Chunk uploaded\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"chunk.size\": file.offset.toString(),\n \"chunk.progress\":\n file.size && file.size > 0\n ? ((file.offset / file.size) * 100).toFixed(2)\n : \"0\",\n \"upload.total_size\": file.size?.toString() ?? \"0\",\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Chunk upload failed\").pipe(\n Effect.annotateLogs({\n \"upload.id\": uploadId,\n error: String(error),\n }),\n ),\n ),\n );\n\n/**\n * Validates an upload after completion.\n *\n * Performs comprehensive validation including:\n * - Checksum validation (if provided) using the specified algorithm\n * - MIME type validation (if required by data store capabilities)\n * - File size validation against data store limits\n *\n * Validation results are emitted as events and failures result in:\n * - Cleanup of uploaded data from storage\n * - Removal of metadata from KV store\n * - Appropriate error responses\n *\n * The function respects data store capabilities for validation limits\n * and provides detailed error information for debugging.\n *\n * @param file - The upload file to validate\n * @param dataStore - Data store containing the uploaded file\n * @param eventEmitter - Event emitter for validation events\n * @returns Effect that completes validation or fails with UploadistaError\n *\n * @example\n * ```typescript\n * // Validate upload after completion\n * const validationEffect = validateUpload({\n * file: completedUpload,\n * dataStore: s3DataStore,\n * eventEmitter: progressEmitter\n * });\n *\n * // Run validation\n * await Effect.runPromise(validationEffect);\n * ```\n */\nconst validateUpload = ({\n file,\n dataStore,\n eventEmitter,\n}: {\n file: UploadFile;\n dataStore: DataStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n}): Effect.Effect<void, UploadistaError, never> =>\n Effect.gen(function* () {\n const capabilities = dataStore.getCapabilities();\n\n // Check if file exceeds max validation size\n if (\n capabilities.maxValidationSize &&\n file.size &&\n file.size > capabilities.maxValidationSize\n ) {\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_WARNING,\n data: {\n id: file.id,\n message: `File size (${file.size} bytes) exceeds max validation size (${capabilities.maxValidationSize} bytes). Validation skipped.`,\n },\n flow: file.flow,\n });\n return;\n }\n\n // Read file from datastore for validation\n const fileBytes = yield* dataStore.read(file.id);\n\n // Validate checksum if provided\n if (file.checksum && file.checksumAlgorithm) {\n const computedChecksum = yield* computeChecksum(\n fileBytes,\n file.checksumAlgorithm,\n );\n\n if (computedChecksum !== file.checksum) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"checksum_mismatch\",\n expected: file.checksum,\n actual: computedChecksum,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with checksum mismatch error\n return yield* UploadistaError.fromCode(\"CHECKSUM_MISMATCH\", {\n body: `Checksum validation failed. Expected: ${file.checksum}, Got: ${computedChecksum}`,\n details: {\n uploadId: file.id,\n expected: file.checksum,\n actual: computedChecksum,\n algorithm: file.checksumAlgorithm,\n },\n }).toEffect();\n }\n\n // Emit checksum validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"checksum\",\n algorithm: file.checksumAlgorithm,\n },\n flow: file.flow,\n });\n }\n\n // Validate MIME type if required by capabilities\n if (capabilities.requiresMimeTypeValidation) {\n const detectedMimeType = detectMimeType(fileBytes);\n const declaredMimeType = file.metadata?.type as string | undefined;\n\n if (\n declaredMimeType &&\n !compareMimeTypes(declaredMimeType, detectedMimeType)\n ) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"mimetype_mismatch\",\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with MIME type mismatch error\n return yield* UploadistaError.fromCode(\"MIMETYPE_MISMATCH\", {\n body: `MIME type validation failed. Expected: ${declaredMimeType}, Detected: ${detectedMimeType}`,\n details: {\n uploadId: file.id,\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n }).toEffect();\n }\n\n // Emit MIME type validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"mimetype\",\n },\n flow: file.flow,\n });\n }\n }).pipe(\n Effect.withSpan(\"validate-upload\", {\n attributes: {\n \"upload.id\": file.id,\n \"validation.checksum_provided\": file.checksum ? \"true\" : \"false\",\n \"validation.mime_required\": dataStore.getCapabilities()\n .requiresMimeTypeValidation\n ? \"true\"\n : \"false\",\n },\n }),\n );\n","import { Effect, Metric } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Fetches a file from a remote URL.\n *\n * This function handles HTTP requests to remote URLs for file uploads,\n * including proper error handling, metrics tracking, and observability.\n *\n * Features:\n * - HTTP request with proper error handling\n * - Effect tracing for performance monitoring\n * - Metrics tracking for URL-based uploads\n * - Structured logging for debugging\n * - Response validation and error reporting\n *\n * @param url - The remote URL to fetch the file from\n * @returns Effect that yields the Response object\n *\n * @example\n * ```typescript\n * // Fetch a file from URL\n * const fetchEffect = fetchFile(\"https://example.com/image.jpg\");\n *\n * // Run with error handling\n * const response = await Effect.runPromise(\n * fetchEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to fetch file\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport const fetchFile = (url: string) => {\n return Effect.tryPromise({\n try: async () => {\n return await fetch(url);\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for URL fetch\n Effect.withSpan(\"upload-fetch-url\", {\n attributes: {\n \"upload.url\": url,\n \"upload.operation\": \"fetch\",\n },\n }),\n // Track URL fetch metrics\n Effect.tap((response) =>\n Effect.gen(function* () {\n // Increment URL upload counter\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_total\", {\n description: \"Total number of URL-based uploads\",\n }),\n );\n\n // Track success/failure\n if (response.ok) {\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_success_total\", {\n description: \"Total number of successful URL-based uploads\",\n }),\n );\n }\n }),\n ),\n // Add structured logging\n Effect.tap((response) =>\n Effect.logInfo(\"URL fetch completed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n \"response.status\": response.status.toString(),\n \"response.ok\": response.ok.toString(),\n \"response.content_length\":\n response.headers.get(\"content-length\") ?? \"unknown\",\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Track failed URL upload\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_failed_total\", {\n description: \"Total number of failed URL-based uploads\",\n }),\n );\n\n // Log error\n yield* Effect.logError(\"URL fetch failed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n error: String(error),\n }),\n );\n }),\n ),\n );\n};\n\n/**\n * Converts a Response object to an ArrayBuffer.\n *\n * This function safely converts HTTP response data to binary format\n * for processing and storage, with proper error handling and observability.\n *\n * Features:\n * - Safe conversion from Response to ArrayBuffer\n * - Effect tracing for performance monitoring\n * - Structured logging for debugging\n * - Error handling with proper UploadistaError types\n *\n * @param response - The HTTP Response object to convert\n * @returns Effect that yields the ArrayBuffer data\n *\n * @example\n * ```typescript\n * // Convert response to buffer\n * const bufferEffect = arrayBuffer(response);\n *\n * // Use in upload pipeline\n * const buffer = await Effect.runPromise(\n * bufferEffect.pipe(\n * Effect.tap((buffer) =>\n * Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)\n * )\n * )\n * );\n * ```\n */\nexport const arrayBuffer = (response: Response) => {\n return Effect.tryPromise({\n try: async () => {\n return await response.arrayBuffer();\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for buffer conversion\n Effect.withSpan(\"upload-convert-to-buffer\", {\n attributes: {\n \"upload.operation\": \"arrayBuffer\",\n },\n }),\n // Add structured logging\n Effect.tap((buffer) =>\n Effect.logDebug(\"Response converted to array buffer\").pipe(\n Effect.annotateLogs({\n \"buffer.size\": buffer.byteLength.toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to convert response to array buffer\").pipe(\n Effect.annotateLogs({\n error: String(error),\n }),\n ),\n ),\n );\n};\n","import { Context, Effect, Layer, Stream } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n EventEmitter,\n InputFile,\n KvStore,\n Middleware,\n StreamingConfig,\n UploadEvent,\n UploadFile,\n WebSocketConnection,\n} from \"../types\";\nimport {\n UploadEventEmitter,\n UploadEventType,\n UploadFileDataStores,\n UploadFileKVStore,\n} from \"../types\";\nimport { GenerateId, type GenerateIdShape } from \"../utils/generate-id\";\nimport { createUpload } from \"./create-upload\";\nimport { uploadChunk } from \"./upload-chunk\";\nimport { arrayBuffer, fetchFile } from \"./upload-url\";\n\n/**\n * Legacy configuration options for UploadServer.\n *\n * @deprecated Use Effect Layers instead of this configuration object.\n * This type is kept for backward compatibility.\n *\n * @property dataStore - DataStore instance or factory function\n * @property kvStore - KV store for upload metadata\n * @property eventEmitter - Event emitter for upload progress\n * @property generateId - Optional ID generator (defaults to UUID)\n * @property middlewares - Optional request middlewares\n * @property withTracing - Enable Effect tracing for debugging\n */\nexport type UploadServerOptions = {\n dataStore:\n | ((storageId: string) => Promise<DataStore<UploadFile>>)\n | DataStore<UploadFile>;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId?: GenerateIdShape;\n middlewares?: Middleware[];\n withTracing?: boolean;\n};\n\n/**\n * UploadServer service interface.\n *\n * This is the core upload handling service that provides all file upload operations.\n * It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.\n *\n * All operations return Effect types for composable, type-safe error handling.\n *\n * @property createUpload - Initiates a new upload and returns metadata\n * @property uploadChunk - Uploads a chunk of data for an existing upload\n * @property getCapabilities - Returns storage backend capabilities\n * @property upload - Complete upload in one operation (create + upload data)\n * @property uploadFromUrl - Uploads a file from a remote URL\n * @property getUpload - Retrieves upload metadata by ID\n * @property read - Reads the complete uploaded file data\n * @property delete - Deletes an upload and its data\n * @property subscribeToUploadEvents - Subscribes WebSocket to upload progress events\n * @property unsubscribeFromUploadEvents - Unsubscribes from upload events\n *\n * @example\n * ```typescript\n * // Basic upload flow\n * const program = Effect.gen(function* () {\n * const server = yield* UploadServer;\n *\n * // 1. Create upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\"\n * };\n * const upload = yield* server.createUpload(inputFile, \"client123\");\n *\n * // 2. Upload chunks\n * const chunk = new ReadableStream(...);\n * const updated = yield* server.uploadChunk(upload.id, \"client123\", chunk);\n *\n * // 3. Read the uploaded file\n * const data = yield* server.read(upload.id, \"client123\");\n *\n * return upload;\n * });\n *\n * // Upload with WebSocket progress tracking\n * const uploadWithProgress = Effect.gen(function* () {\n * const server = yield* UploadServer;\n *\n * // Subscribe to progress events\n * yield* server.subscribeToUploadEvents(uploadId, websocket);\n *\n * // Upload (events will be emitted automatically)\n * const result = yield* server.upload(inputFile, clientId, stream);\n *\n * // Unsubscribe when done\n * yield* server.unsubscribeFromUploadEvents(uploadId);\n *\n * return result;\n * });\n *\n * // Upload from URL\n * const urlUpload = Effect.gen(function* () {\n * const server = yield* UploadServer;\n *\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 0, // Unknown initially\n * type: \"image/png\",\n * fileName: \"remote-image.png\"\n * };\n *\n * const upload = yield* server.uploadFromUrl(\n * inputFile,\n * \"client123\",\n * \"https://example.com/image.png\"\n * );\n *\n * return upload;\n * });\n * ```\n */\nexport type UploadServerShape = {\n createUpload: (\n inputFile: InputFile,\n clientId: string | null,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getCapabilities: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStoreCapabilities, UploadistaError>;\n upload: (\n file: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getUpload: (uploadId: string) => Effect.Effect<UploadFile, UploadistaError>;\n /**\n * Reads the complete uploaded file data as bytes (buffered mode).\n * For large files, consider using readStream() for memory efficiency.\n */\n read: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Falls back to buffered read if the underlying DataStore doesn't support streaming.\n *\n * @param uploadId - The unique identifier of the upload to read\n * @param clientId - Client identifier for multi-tenant routing\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const server = yield* UploadServer;\n * const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });\n * // Process stream chunk by chunk with bounded memory\n * yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));\n * ```\n */\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n /**\n * Uploads file content from a stream with unknown final size.\n * Creates upload with deferred length, streams content to storage,\n * and updates the upload record with final size when complete.\n *\n * Falls back to buffered upload if the underlying DataStore\n * doesn't support streaming writes.\n *\n * @param file - Input file configuration (size is optional)\n * @param clientId - Client identifier for multi-tenant routing\n * @param stream - Effect Stream of byte chunks to upload\n * @returns The completed UploadFile with final size\n *\n * @example\n * ```typescript\n * const server = yield* UploadServer;\n * const result = yield* server.uploadStream(\n * {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * fileName: \"optimized.webp\",\n * },\n * clientId,\n * transformedStream,\n * );\n * console.log(`Uploaded ${result.size} bytes`);\n * ```\n */\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n delete: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<void, UploadistaError>;\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n unsubscribeFromUploadEvents: (\n uploadId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the UploadServer service.\n *\n * Use this tag to access the UploadServer in an Effect context.\n * The server must be provided via a Layer or dependency injection.\n *\n * @example\n * ```typescript\n * // Access UploadServer in an Effect\n * const uploadEffect = Effect.gen(function* () {\n * const server = yield* UploadServer;\n * const upload = yield* server.createUpload(inputFile, clientId);\n * return upload;\n * });\n *\n * // Provide UploadServer layer\n * const program = uploadEffect.pipe(\n * Effect.provide(uploadServer),\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(dataStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * );\n * ```\n */\nexport class UploadServer extends Context.Tag(\"UploadServer\")<\n UploadServer,\n UploadServerShape\n>() {}\n\n/**\n * Creates the UploadServer implementation.\n *\n * This function constructs the UploadServer service by composing all required\n * dependencies (KV store, data stores, event emitter, ID generator). It implements\n * all upload operations defined in UploadServerShape.\n *\n * The server automatically handles:\n * - Upload lifecycle management (create, resume, complete)\n * - Progress tracking and event emission\n * - Storage backend routing based on storageId\n * - Error handling with proper UploadistaError types\n *\n * @returns An Effect that yields the UploadServerShape implementation\n *\n * @example\n * ```typescript\n * // Create a custom UploadServer layer\n * const myUploadServer = Layer.effect(\n * UploadServer,\n * createUploadServer()\n * );\n *\n * // Use in a program\n * const program = Effect.gen(function* () {\n * const server = yield* UploadServer;\n * // Use server operations...\n * }).pipe(Effect.provide(myUploadServer));\n * ```\n */\nexport function createUploadServer() {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n const eventEmitter = yield* UploadEventEmitter;\n const generateId = yield* GenerateId;\n const dataStoreService = yield* UploadFileDataStores;\n\n return {\n upload: (\n inputFile: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) =>\n Effect.gen(function* () {\n const response = yield* fetchFile(url);\n const buffer = yield* arrayBuffer(response);\n\n // Create a readable stream from the buffer\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n },\n });\n\n const fileCreated = yield* createUpload(\n { ...inputFile, size: buffer.byteLength },\n clientId,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n },\n );\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n createUpload: (inputFile: InputFile, clientId: string | null) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return fileCreated;\n }),\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const file = yield* uploadChunk(uploadId, clientId, chunk, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n return file;\n }),\n getUpload: (uploadId: string) =>\n Effect.gen(function* () {\n const file = yield* kvStore.get(uploadId);\n return file;\n }),\n read: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n return yield* dataStore.read(uploadId);\n }),\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n\n // Check if the DataStore supports streaming reads\n const capabilities = dataStore.getCapabilities();\n if (capabilities.supportsStreamingRead && dataStore.readStream) {\n // Use native streaming\n yield* Effect.logDebug(\n `Using streaming read for file ${uploadId}`,\n );\n return yield* dataStore.readStream(uploadId, config);\n }\n\n // Fallback: read entire file and convert to stream\n yield* Effect.logDebug(\n `Falling back to buffered read for file ${uploadId} (streaming not supported)`,\n );\n const bytes = yield* dataStore.read(uploadId);\n\n // Convert buffered bytes to a single-chunk stream\n return Stream.succeed(bytes);\n }),\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n // Get the data store for this storage\n const dataStore = yield* dataStoreService.getDataStore(\n file.storageId,\n clientId,\n );\n\n // Check if the DataStore supports streaming writes\n const capabilities = dataStore.getCapabilities();\n\n // Generate upload ID\n const uploadId = yield* generateId.generateId();\n\n if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n // Use native streaming write - DO NOT call createUpload as it would\n // create an S3 multipart upload that we won't use (writeStream creates its own)\n yield* Effect.logDebug(\n `Using streaming write for file ${uploadId}`,\n );\n\n // Parse metadata\n const metadata =\n typeof file.metadata === \"string\"\n ? JSON.parse(file.metadata)\n : file.metadata || {};\n\n // Convert metadata to Record<string, string> if present\n const stringMetadata = Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)]),\n );\n\n // Create initial upload record in KV store (without creating S3 multipart upload)\n const initialUpload: UploadFile = {\n id: uploadId,\n offset: 0,\n size: file.size ?? 0,\n storage: {\n id: file.storageId,\n type: dataStore.getCapabilities().supportsStreamingWrite\n ? \"streaming\"\n : \"default\",\n },\n metadata,\n creationDate: new Date().toISOString(),\n };\n yield* kvStore.set(uploadId, initialUpload);\n\n // Emit started event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_STARTED,\n data: initialUpload,\n });\n\n const result = yield* dataStore.writeStream(uploadId, {\n stream,\n contentType: file.type,\n sizeHint: file.sizeHint,\n metadata: stringMetadata,\n });\n\n // Update the upload record with the final size and URL\n const completedUpload: UploadFile = {\n ...initialUpload,\n size: result.size,\n offset: result.size,\n storage: {\n ...initialUpload.storage,\n path: result.path,\n },\n ...(result.url && { url: result.url }),\n };\n\n yield* kvStore.set(uploadId, completedUpload);\n\n // Emit completion event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_COMPLETE,\n data: completedUpload,\n });\n\n return completedUpload;\n }\n\n // Fallback: buffer the stream and use regular upload (which calls createUpload + uploadChunk)\n yield* Effect.logWarning(\n `Falling back to buffered upload for file ${uploadId} (streaming write not supported)`,\n );\n\n // Collect stream into a buffer\n const chunks: Uint8Array[] = [];\n yield* Stream.runForEach(stream, (chunk) =>\n Effect.sync(() => {\n chunks.push(chunk);\n }),\n );\n\n // Calculate total size\n const totalSize = chunks.reduce((acc, chunk) => acc + chunk.length, 0);\n\n // Create a combined buffer\n const buffer = new Uint8Array(totalSize);\n let offset = 0;\n for (const chunk of chunks) {\n buffer.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Create a readable stream from the buffer\n const readableStream = new ReadableStream({\n start(controller) {\n controller.enqueue(buffer);\n controller.close();\n },\n });\n\n // For fallback, use the regular flow with createUpload + uploadChunk\n const inputFile: InputFile = {\n ...file,\n size: totalSize,\n };\n\n const uploadFile = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId: { generateId: () => Effect.succeed(uploadId) },\n });\n\n // Use regular uploadChunk\n return yield* uploadChunk(uploadId, clientId, readableStream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n delete: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n yield* dataStore.remove(uploadId);\n yield* kvStore.delete(uploadId);\n return;\n }),\n getCapabilities: (storageId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const dataStore = yield* dataStoreService.getDataStore(\n storageId,\n clientId,\n );\n return dataStore.getCapabilities();\n }),\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(uploadId, connection);\n }),\n unsubscribeFromUploadEvents: (uploadId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(uploadId);\n }),\n } satisfies UploadServerShape;\n });\n}\n\n/**\n * Pre-built UploadServer Effect Layer.\n *\n * This layer provides a ready-to-use UploadServer implementation that can be\n * composed with other layers to build a complete upload system.\n *\n * Required dependencies:\n * - UploadFileKVStore: For storing upload metadata\n * - UploadFileDataStores: For routing to storage backends\n * - UploadEventEmitter: For progress events\n * - GenerateId: For creating upload IDs\n *\n * @example\n * ```typescript\n * // Compose a complete upload system\n * const fullUploadSystem = Layer.mergeAll(\n * uploadServer,\n * uploadFileKvStore,\n * dataStoreLayer,\n * uploadEventEmitter,\n * generateIdLayer\n * );\n *\n * // Use in application\n * const app = Effect.gen(function* () {\n * const server = yield* UploadServer;\n * // Perform uploads...\n * }).pipe(Effect.provide(fullUploadSystem));\n * ```\n */\nexport const uploadServer = Layer.effect(UploadServer, createUploadServer());\n","import type { DataStoreCapabilities, UploadStrategy } from \"../types\";\n\n/**\n * Configuration options for upload strategy negotiation.\n *\n * @property fileSize - Size of the file to be uploaded in bytes\n * @property preferredStrategy - Preferred upload strategy (single, parallel, resumable)\n * @property preferredChunkSize - Preferred chunk size in bytes\n * @property parallelUploads - Number of parallel upload connections\n * @property minChunkSizeForParallel - Minimum file size to consider parallel uploads\n */\nexport type UploadStrategyOptions = {\n fileSize: number;\n preferredStrategy?: UploadStrategy;\n preferredChunkSize?: number;\n parallelUploads?: number;\n minChunkSizeForParallel?: number;\n};\n\n/**\n * Result of upload strategy negotiation.\n *\n * @property strategy - The negotiated upload strategy\n * @property chunkSize - The negotiated chunk size in bytes\n * @property parallelUploads - The negotiated number of parallel uploads\n * @property reasoning - Array of reasoning strings explaining the decisions\n * @property warnings - Array of warning messages about adjustments made\n */\nexport type NegotiatedStrategy = {\n strategy: UploadStrategy;\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n};\n\n/**\n * Negotiates the optimal upload strategy based on data store capabilities and file characteristics.\n *\n * This class analyzes data store capabilities, file size, and user preferences to determine\n * the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size\n * and parallel connection count.\n *\n * The negotiator considers:\n * - Data store capabilities (parallel uploads, resumable uploads, concatenation)\n * - File size and chunk size constraints\n * - User preferences and requirements\n * - Performance optimization opportunities\n *\n * @example\n * ```typescript\n * // Create negotiator for S3 data store\n * const negotiator = new UploadStrategyNegotiator(\n * s3Capabilities,\n * (strategy) => s3Capabilities.supportsStrategy(strategy)\n * );\n *\n * // Negotiate strategy for large file\n * const result = negotiator.negotiateStrategy({\n * fileSize: 100_000_000, // 100MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB chunks\n * parallelUploads: 4\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * ```\n */\nexport class UploadStrategyNegotiator {\n /**\n * Creates a new upload strategy negotiator.\n *\n * @param capabilities - Data store capabilities and constraints\n * @param validateUploadStrategy - Function to validate if a strategy is supported\n */\n constructor(\n private capabilities: DataStoreCapabilities,\n private validateUploadStrategy: (strategy: UploadStrategy) => boolean,\n ) {}\n\n /**\n * Negotiates the optimal upload strategy based on options and data store capabilities.\n *\n * This method analyzes the provided options and data store capabilities to determine\n * the best upload strategy, chunk size, and parallel upload settings. It considers\n * user preferences, file size, and data store constraints to make optimal decisions.\n *\n * The negotiation process:\n * 1. Validates preferred strategy against data store capabilities\n * 2. Automatically selects strategy based on file size and capabilities\n * 3. Adjusts chunk size to fit within data store constraints\n * 4. Validates parallel upload settings\n * 5. Ensures final strategy is supported by the data store\n *\n * @param options - Upload strategy options including file size and preferences\n * @returns Negotiated strategy with reasoning and warnings\n *\n * @example\n * ```typescript\n * const result = negotiator.negotiateStrategy({\n * fileSize: 50_000_000, // 50MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB\n * parallelUploads: 3\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.parallelUploads); // 3\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * console.log(result.warnings); // [] (no warnings)\n * ```\n */\n negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy {\n const reasoning: string[] = [];\n const warnings: string[] = [];\n\n let strategy: UploadStrategy = \"single\";\n let chunkSize =\n options.preferredChunkSize ??\n this.capabilities.optimalChunkSize ??\n 1024 * 1024;\n let parallelUploads = options.parallelUploads ?? 1;\n\n // Check if data store supports the preferred strategy\n if (options.preferredStrategy) {\n if (!this.validateUploadStrategy(options.preferredStrategy)) {\n warnings.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store, falling back`,\n );\n } else {\n strategy = options.preferredStrategy;\n reasoning.push(`Using preferred strategy: ${strategy}`);\n }\n }\n\n // Automatic strategy selection based on capabilities and file size\n if (\n !options.preferredStrategy ||\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n if (\n this.capabilities.supportsParallelUploads &&\n options.fileSize > (options.minChunkSizeForParallel ?? 10 * 1024 * 1024)\n ) {\n strategy = \"parallel\";\n reasoning.push(\n `Selected parallel upload for large file (${options.fileSize} bytes)`,\n );\n } else {\n strategy = \"single\";\n reasoning.push(\n this.capabilities.supportsParallelUploads\n ? `Selected single upload for small file (${options.fileSize} bytes)`\n : \"Selected single upload (parallel not supported by data store)\",\n );\n }\n }\n\n // Validate and adjust chunk size based on data store constraints\n if (\n this.capabilities.minChunkSize &&\n chunkSize < this.capabilities.minChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} below minimum ${this.capabilities.minChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.minChunkSize;\n }\n\n if (\n this.capabilities.maxChunkSize &&\n chunkSize > this.capabilities.maxChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} above maximum ${this.capabilities.maxChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.maxChunkSize;\n }\n\n // Validate parallel upload settings\n if (strategy === \"parallel\") {\n if (\n this.capabilities.maxConcurrentUploads &&\n parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n warnings.push(\n `Parallel uploads ${parallelUploads} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`,\n );\n parallelUploads = this.capabilities.maxConcurrentUploads;\n }\n\n // Check if file would exceed max parts limit\n if (this.capabilities.maxParts) {\n const estimatedParts = Math.ceil(options.fileSize / chunkSize);\n if (estimatedParts > this.capabilities.maxParts) {\n const minChunkForParts = Math.ceil(\n options.fileSize / this.capabilities.maxParts,\n );\n warnings.push(\n `Estimated parts ${estimatedParts} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`,\n );\n chunkSize = Math.max(chunkSize, minChunkForParts);\n }\n }\n }\n\n // Final validation - ensure strategy is still valid after adjustments\n if (!this.validateUploadStrategy(strategy)) {\n warnings.push(\n `Final strategy validation failed, falling back to single upload`,\n );\n strategy = \"single\";\n parallelUploads = 1;\n }\n\n // Add capability information to reasoning\n reasoning.push(\n `Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`,\n );\n\n return {\n strategy,\n chunkSize,\n parallelUploads: strategy === \"parallel\" ? parallelUploads : 1,\n reasoning,\n warnings,\n };\n }\n\n /**\n * Gets the data store capabilities used by this negotiator.\n *\n * @returns The data store capabilities and constraints\n */\n getDataStoreCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n /**\n * Validates upload strategy configuration against data store capabilities.\n *\n * This method checks if the provided configuration is valid for the current\n * data store capabilities without performing the actual negotiation. It's\n * useful for pre-validation before attempting to negotiate a strategy.\n *\n * @param options - Upload strategy options to validate\n * @returns Validation result with validity flag and error messages\n *\n * @example\n * ```typescript\n * const validation = negotiator.validateConfiguration({\n * fileSize: 10_000_000,\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 1_000_000,\n * parallelUploads: 5\n * });\n *\n * if (!validation.valid) {\n * console.log(\"Configuration errors:\", validation.errors);\n * // Handle validation errors\n * }\n * ```\n */\n validateConfiguration(options: UploadStrategyOptions): {\n valid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n\n if (\n options.preferredStrategy &&\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n errors.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store`,\n );\n }\n\n if (options.preferredChunkSize) {\n if (\n this.capabilities.minChunkSize &&\n options.preferredChunkSize < this.capabilities.minChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`,\n );\n }\n if (\n this.capabilities.maxChunkSize &&\n options.preferredChunkSize > this.capabilities.maxChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`,\n );\n }\n }\n\n if (\n options.parallelUploads &&\n this.capabilities.maxConcurrentUploads &&\n options.parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n errors.push(\n `Parallel uploads ${options.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`,\n );\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n }\n}\n"],"mappings":"8VAGA,SAAS,EACP,EACA,EACA,EAAS,EACA,CAET,OADI,EAAO,OAAS,EAAS,EAAQ,OAAe,GAC7C,EAAQ,OAAO,EAAM,IAAM,EAAO,EAAS,KAAO,EAAK,CAMhE,SAAS,EAAY,EAAoB,EAAa,EAAS,EAAY,CACzE,GAAI,EAAO,OAAS,EAAS,EAAI,OAAQ,MAAO,GAChD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAI,OAAQ,IAC9B,GAAI,EAAO,EAAS,KAAO,EAAI,WAAW,EAAE,CAAE,MAAO,GAEvD,MAAO,GAWT,MAAa,GACX,EACA,IACW,CACX,GAAI,EAAO,SAAW,EACpB,MAAO,2BAMT,GAAI,EAAW,EAAQ,CAAC,IAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAK,CAAC,CACtE,MAAO,YAIT,GAAI,EAAW,EAAQ,CAAC,IAAM,IAAM,IAAK,CAAC,CACxC,MAAO,aAIT,GAAI,EAAY,EAAQ,SAAS,EAAI,EAAY,EAAQ,SAAS,CAChE,MAAO,YAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAK,CAAE,EAAE,EACzC,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAAI,EAAY,EAAQ,OAAQ,EAAE,EAEjE,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAK,CAAC,CAClC,MAAO,YAST,GALI,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,EAAK,CAAC,EAK5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,GAAK,CAAC,CAC9C,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAK,CAAC,CAC9C,MAAO,eAIT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CACD,GACE,EAAK,SAAS,OAAO,EACpB,EAAK,SAAS,QAAQ,EAAI,EAAK,SAAS,OAAO,CAEhD,MAAO,gBAOX,GAAI,EAAO,QAAU,IAAM,EAAY,EAAQ,OAAQ,EAAE,CAAE,CACzD,IAAM,EAAU,IAAI,aAAa,CAAC,OAAO,EAAO,MAAM,EAAG,GAAG,CAAC,CAC7D,GACE,EAAQ,WAAW,MAAM,EACzB,EAAQ,WAAW,KAAK,EACxB,EAAQ,WAAW,OAAO,CAE1B,MAAO,YAKX,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,CAC9C,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,kBAIT,GACE,EAAO,QAAU,IAChB,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,EAAI,EAAO,QAAU,KACtD,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,IAAI,CACrB,CACQ,SAAS,WAAW,CAC3B,MAAO,mBAOX,GACE,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAY,EAAQ,MAAM,CAE1B,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,MAAO,EAAE,CAE7B,MAAO,YAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,kBAMT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,CAC5C,CAGA,GAAI,EAAO,QAAU,KAAM,CACzB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OAAO,EAAO,CACtE,GAAI,EAAK,SAAS,QAAQ,CACxB,MAAO,0EACT,GAAI,EAAK,SAAS,MAAM,CACtB,MAAO,oEACT,GAAI,EAAK,SAAS,OAAO,CACvB,MAAO,4EAEX,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,GAAM,GAAM,EAAK,CAAC,CAC1D,MAAO,+BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAM,IAAM,IAAM,GAAM,GAAK,CAAC,CAC1D,MAAO,8BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAK,CAAC,CAClC,MAAO,mBAIT,GAAI,EAAO,QAAU,KAAO,EAAY,EAAQ,QAAS,IAAI,CAC3D,MAAO,oBAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAM,EAAK,CAAC,CACpD,MAAO,WAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,WAMT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAY,EAAO,GACzB,GAAI,IAAc,KAAQ,IAAc,GAEtC,GAAI,CACF,IAAM,EAAO,IAAI,YAAY,QAAQ,CAAC,OACpC,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CAED,OADA,KAAK,MAAM,EAAK,MAAM,CAAC,CAChB,wBACD,GAOZ,GAAI,EAEF,OADY,EAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,CACpD,CAEE,IAAK,MACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,OACH,MAAO,aACT,IAAK,OACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACL,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,eACT,IAAK,MACH,MAAO,gBAGT,IAAK,MACL,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,mBAGT,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YAGT,IAAK,MACH,MAAO,kBACT,IAAK,OACH,MAAO,0EACT,IAAK,OACH,MAAO,oEACT,IAAK,OACH,MAAO,4EAGT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,+BACT,IAAK,KACH,MAAO,8BACT,IAAK,KACL,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,oBAGT,IAAK,OACH,MAAO,YACT,IAAK,QACH,MAAO,aACT,IAAK,MACH,MAAO,WACT,IAAK,MACH,MAAO,WAGT,IAAK,MACH,MAAO,aACT,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,kBACT,IAAK,OACL,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,WACT,IAAK,KACH,MAAO,yBACT,IAAK,MACH,MAAO,WAET,QACE,MAAO,2BAIb,MAAO,4BAgBT,SAAgB,EAAiB,EAAkB,EAA2B,CAW5E,OATI,IAAa,EACR,GAIa,EAAS,MAAM,IAAI,CAAC,KACpB,EAAS,MAAM,IAAI,CAAC,GCvZ5C,MAAMA,EAEF,EAAO,IAAI,WAAa,CAC1B,IAAM,EAAa,MAAO,EAAO,YAAY,KAAK,EAAO,OAAO,CAChE,OAAO,EAAO,MAAM,EAAY,CAC9B,WAAc,IAAA,GACd,OAAS,IAAU,CACjB,QAAS,EAAK,QACd,OAAQ,EAAK,OACb,WAAY,EAAK,QAAU,EAAI,EAChC,EACF,CAAC,EACF,CA4DW,GACX,EACA,EACA,CACE,mBACA,UACA,eACA,gBAQF,EAAO,IAAI,WAAa,CAItB,IAAM,EAAe,MAAO,EACtB,EAAe,IAAI,MAAM,CAAC,aAAa,CA0E7C,OAvEoB,MAAO,EAAO,IAAI,WAAa,CAEjD,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAU,UACV,EACD,CAEK,EAAK,MAAO,EAAW,YAAY,CACnC,CAAE,OAAM,OAAM,WAAU,eAAc,WAAU,QAAS,EAE3DC,EAAyC,EAAE,CAC/C,GAAI,EACF,GAAI,CACF,EAAiB,KAAK,MAAM,EAAS,MAC/B,CACN,EAAiB,EAAE,CAIvB,IAAMC,EAAyC,CAC7C,GAAG,EACH,OACA,SAAU,GAAY,GACvB,CACG,IACF,EAAe,aAAe,EAAa,UAAU,EAGvD,IAAMC,EAAmB,CACvB,KACA,OACA,SAAU,EACV,OAAQ,EACR,eACA,QAAS,CACP,GAAI,EAAU,UACd,OACA,KAAM,GACN,OAAQ,EAAU,OACnB,CACD,OACA,eACD,CAGK,EAAU,MAAO,EAAU,OAAO,EAAK,CAY7C,OATA,MAAO,EAAQ,IAAI,EAAI,EAAQ,CAG/B,MAAO,EAAa,KAAK,EAAI,CAC3B,KAAM,EAAgB,eACtB,KAAM,EACN,KAAM,EAAQ,KACf,CAAC,CAEK,GACP,CAAC,KAED,EAAO,SAAS,gBAAiB,CAC/B,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CACH,EAGD,CAAC,KAGD,EAAO,SAAS,SAAU,CACxB,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAStB,GAPA,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGG,EAAK,KAAM,CACb,IAAM,EAAoB,EAAO,UAC/B,yBACA,EAAiB,YAAY,CAC3B,MAAO,KACP,OAAQ,EACR,MAAO,GACR,CAAC,CACH,CACD,MAAO,EAAO,OAAO,EAAmB,EAAK,KAAK,CAIpD,IAAM,EAAqB,EAAO,MAAM,iBAAiB,CACzD,MAAO,EAAO,UAAU,EAAmB,EAC3C,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAChC,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,SAAS,yBAAyB,CAAC,KAC/C,EAAO,aAAa,CAClB,mBAAoB,EAAU,UAAY,UAC1C,oBAAqB,EAAU,UAC/B,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CAGD,MAAO,EAAO,UACZ,EAAO,QAAQ,sBAAuB,CACpC,YAAa,sCACd,CAAC,CACH,EACD,CACH,CACF,CChOH,SAAgB,EAAmB,EAAyB,CAC1D,OAAO,EAAO,uBACN,EACL,GACC,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACpB,CAAC,CACL,CCgCH,SAAgB,EAAa,CAC3B,OACA,SACA,YACA,cACA,aACA,eACA,yBAAyB,KACH,CACtB,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAS,EAAgB,EAAK,CAEpC,GAAI,EAAW,OAAO,QACpB,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAIhE,IAAM,EAAkB,IAAI,gBACtB,CAAE,UAAW,EAGb,MAAgB,CAEpB,EAAgB,OAAO,EAKzB,OAFA,EAAW,OAAO,iBAAiB,QAAS,EAAS,CAAE,KAAM,GAAM,CAAC,CAE7D,MAAO,EAAO,kBACnB,EAAO,UAAY,CAAE,SAAQ,UAAS,EAAE,EACvC,CAAE,OAAQ,KACT,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAe,MAAO,EAAI,KAAK,EAAE,CAQjC,EALU,EAAoB,MAAM,CACxC,QAAS,EACV,CAAC,CAG4B,EAAO,CAyCrC,OAtCe,MAAO,EAAU,MAC9B,CACE,OAAQ,EACR,QAAS,EAAO,GAChB,OAAQ,EAAO,OAChB,CACD,CACE,WAAa,GAAsB,CAEjC,IAAM,EAAM,KAAK,KAAK,CACtB,EAAI,IAAI,EAAa,CAClB,KACC,EAAO,QAAS,GACV,EAAM,GAAY,EACb,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAI,IAAI,EAAc,EAAI,CACjC,MAAO,EAAa,KAAK,EAAO,GAAI,CAClC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAO,GACX,SAAU,EACV,MAAO,EAAO,MAAQ,EACvB,CACD,KAAM,EAAO,KACd,CAAC,EACF,CAEG,EAAO,KACd,CACF,EAAO,WACR,CACA,UAAY,GAEX,EAEP,CACF,EAGD,CAAC,KACD,EAAO,SAAU,GACX,aAAiB,OAAS,EAAM,OAAS,aACpC,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAErD,aAAiB,EACZ,EAAO,KAAK,EAAM,CAEpB,EAAO,KACZ,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAC/D,CACD,CACH,EACF,CAAE,QAAA,KACD,EAAO,SAAW,CAChB,EAAW,OAAO,oBAAoB,QAASC,EAAQ,EACvD,CACL,EACD,CAAC,KAED,EAAO,SAAS,wBAAyB,CACvC,WAAY,CACV,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,uBAAwB,EAAY,UAAU,CAC9C,mBAAoB,EAAO,MAAM,UAAU,EAAI,IAChD,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,wBAAwB,CAAC,KACvC,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,eAAgB,EAAO,UAAU,CACjC,uBAAwB,EAAS,EAAO,QAAQ,UAAU,CAC3D,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,2BAA2B,CAAC,KAC1C,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,MAAO,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CACrE,CAAC,CACH,CACF,CACF,CC7LH,SAAS,EAAmB,EAAsC,CAChE,OAAO,EAAO,aAAa,CACzB,QAAS,EAAa,QACtB,OAAQ,EAAa,OACrB,QAAS,EAAa,aAAe,EACtC,CAAC,CAcJ,MAAM,GACJ,EACA,IACwB,CACxB,IAAM,EAAe,IAAI,KAAK,EAAK,aAAuB,CAAC,SAAS,CAC9D,EAAkB,KAAK,KAAK,CAAG,EAErC,OAAO,EAAO,KAAK,KACjB,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,cAAe,EAAK,MAAQ,EAC5B,2BAA4B,EAC5B,oBAAqB,EAAK,QAAQ,GAClC,mBAAoB,EAAK,UAAU,UAAY,UAC/C,uBAAwB,EAAK,aAC7B,yBAA0B,IAAI,MAAM,CAAC,aAAa,CACnD,CACD,OAAQ,EACT,CAAC,CACH,EAoDU,GACX,EACA,EACA,EACA,CACE,mBACA,UACA,kBAOF,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAO,MAAO,EAAQ,IAAI,EAAS,CAInC,EAAa,EAAK,aACpB,EAAmB,EAAK,aAAa,CACrC,IAAA,GAsEJ,OAAO,MAnEc,EAAO,IAAI,WAAa,CAE3C,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,QAAQ,GACb,EACD,CAiDD,MAhCA,GAAK,OAVa,MAAO,EAAa,CACpC,YACA,KAAM,EACN,OAAQ,EACR,YAAa,IACb,WAPiB,IAAI,gBAQrB,uBAAwB,IACxB,eACD,CAAC,CAKF,MAAO,EAAQ,IAAI,EAAU,EAAK,CAGlC,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAK,GACT,SAAU,EAAK,OACf,MAAO,EAAK,MAAQ,EACrB,CACD,KAAM,EAAK,KACZ,CAAC,CAGE,EAAK,MAAQ,EAAK,SAAW,EAAK,OACpC,MAAO,EAAe,CACpB,OACA,YACA,eACD,CAAC,CAIE,EAAK,eAEP,MAAO,EAA+B,EADX,EAAmB,EAAK,aAAa,CACD,GAI5D,GACP,CAAC,KAED,EAAO,SAAS,eAAgB,CAC9B,WAAY,CACV,YAAa,EACb,kBAAmB,EACnB,2BAA4B,EAAK,aAAe,OAAS,QAC1D,CACD,OAAQ,EACT,CAAC,CACH,EAGD,CAAC,KAED,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGD,IAAM,EAAY,EAAK,OACjB,EAAqB,EAAO,UAChC,mBACA,EAAiB,OAAO,CACtB,MAAO,OACP,MAAO,OACP,MAAO,GACR,CAAC,CACH,CAID,GAHA,MAAO,EAAO,OAAO,EAAoB,EAAU,CAG/C,EAAK,MAAQ,EAAK,KAAO,EAAG,CAC9B,IAAM,EAAa,EACb,EAAkB,EAAO,MAC7B,qCACD,CACD,MAAO,EAAO,IAAI,EAAiB,EAAW,GAEhD,CACH,CAED,EAAO,IAAK,GACV,EAAO,SAAS,iBAAiB,CAAC,KAChC,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,aAAc,EAAK,OAAO,UAAU,CACpC,iBACE,EAAK,MAAQ,EAAK,KAAO,GACnB,EAAK,OAAS,EAAK,KAAQ,KAAK,QAAQ,EAAE,CAC5C,IACN,oBAAqB,EAAK,MAAM,UAAU,EAAI,IAC/C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,sBAAsB,CAAC,KACrC,EAAO,aAAa,CAClB,YAAa,EACb,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CAoCG,GAAkB,CACtB,OACA,YACA,kBAMA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAe,EAAU,iBAAiB,CAGhD,GACE,EAAa,mBACb,EAAK,MACL,EAAK,KAAO,EAAa,kBACzB,CACA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,QAAS,cAAc,EAAK,KAAK,uCAAuC,EAAa,kBAAkB,8BACxG,CACD,KAAM,EAAK,KACZ,CAAC,CACF,OAIF,IAAM,EAAY,MAAO,EAAU,KAAK,EAAK,GAAG,CAGhD,GAAI,EAAK,UAAY,EAAK,kBAAmB,CAC3C,IAAM,EAAmB,MAAO,EAC9B,EACA,EAAK,kBACN,CAED,GAAI,IAAqB,EAAK,SAiB5B,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EAAK,SACf,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,yCAAyC,EAAK,SAAS,SAAS,IACtE,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EAAK,SACf,OAAQ,EACR,UAAW,EAAK,kBACjB,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WAChB,UAAW,EAAK,kBACjB,CACD,KAAM,EAAK,KACZ,CAAC,CAIJ,GAAI,EAAa,2BAA4B,CAC3C,IAAM,EAAmB,EAAe,EAAU,CAC5C,EAAmB,EAAK,UAAU,KAExC,GACE,GACA,CAAC,EAAiB,EAAkB,EAAiB,CAkBrD,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EACV,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,0CAA0C,EAAiB,cAAc,IAC/E,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EACV,OAAQ,EACT,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WACjB,CACD,KAAM,EAAK,KACZ,CAAC,GAEJ,CAAC,KACD,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,+BAAgC,EAAK,SAAW,OAAS,QACzD,2BAA4B,EAAU,iBAAiB,CACpD,2BACC,OACA,QACL,CACF,CAAC,CACH,CC3YU,EAAa,GACjB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,MAAM,EAAI,CAEzB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,aAAc,EACd,mBAAoB,QACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,wBAAyB,CACtC,YAAa,oCACd,CAAC,CACH,CAGG,EAAS,KACX,MAAO,EAAO,UACZ,EAAO,QAAQ,gCAAiC,CAC9C,YAAa,+CACd,CAAC,CACH,GAEH,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,sBAAsB,CAAC,KACpC,EAAO,aAAa,CAClB,aAAc,EACd,kBAAmB,EAAS,OAAO,UAAU,CAC7C,cAAe,EAAS,GAAG,UAAU,CACrC,0BACE,EAAS,QAAQ,IAAI,iBAAiB,EAAI,UAC7C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,+BAAgC,CAC7C,YAAa,2CACd,CAAC,CACH,CAGD,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,aAAc,EACd,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAiCU,EAAe,GACnB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,EAAS,aAAa,CAErC,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,2BAA4B,CAC1C,WAAY,CACV,mBAAoB,cACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,qCAAqC,CAAC,KACpD,EAAO,aAAa,CAClB,cAAe,EAAO,WAAW,UAAU,CAC5C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,6CAA6C,CAAC,KAC5D,EAAO,aAAa,CAClB,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CCqFH,IAAa,EAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GAgCJ,SAAgB,GAAqB,CACnC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAU,MAAO,EACjB,EAAe,MAAO,EACtB,EAAa,MAAO,EACpB,EAAmB,MAAO,EAEhC,MAAO,CACL,QACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAOtB,OAAO,MAAO,GANM,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EACoC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,eACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAS,MAAO,EADL,MAAO,EAAU,EAAI,CACK,CAGrC,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,IAAI,WAAW,EAAO,CAAC,CAC1C,EAAW,OAAO,EAErB,CAAC,CAYF,OAAO,MAAO,GAVM,MAAO,EACzB,CAAE,GAAG,EAAW,KAAM,EAAO,WAAY,CACzC,EACA,CACE,mBACA,UACA,eACA,aACD,CACF,EACqC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,cAAe,EAAsB,IACnC,EAAO,IAAI,WAAa,CAOtB,OANoB,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EAEF,CACJ,aACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAMtB,OALa,MAAO,EAAY,EAAU,EAAU,EAAO,CACzD,mBACA,UACA,eACD,CAAC,EAEF,CACJ,UAAY,GACV,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAQ,IAAI,EAAS,EAEzC,CACJ,MAAO,EAAkB,IACvB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAAO,OAJW,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACuB,KAAK,EAAS,EACtC,CACJ,YACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CACrC,EAAY,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,CAID,GADqB,EAAU,iBAAiB,CAC/B,uBAAyB,EAAU,WAKlD,OAHA,MAAO,EAAO,SACZ,iCAAiC,IAClC,CACM,MAAO,EAAU,WAAW,EAAU,EAAO,CAItD,MAAO,EAAO,SACZ,0CAA0C,EAAS,4BACpD,CACD,IAAM,EAAQ,MAAO,EAAU,KAAK,EAAS,CAG7C,OAAO,EAAO,QAAQ,EAAM,EAC5B,CACJ,cACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,UACL,EACD,CAGK,EAAe,EAAU,iBAAiB,CAG1C,EAAW,MAAO,EAAW,YAAY,CAE/C,GAAI,EAAa,wBAA0B,EAAU,YAAa,CAGhE,MAAO,EAAO,SACZ,kCAAkC,IACnC,CAGD,IAAM,EACJ,OAAO,EAAK,UAAa,SACrB,KAAK,MAAM,EAAK,SAAS,CACzB,EAAK,UAAY,EAAE,CAGnB,EAAiB,OAAO,YAC5B,OAAO,QAAQ,EAAS,CAAC,KAAK,CAAC,EAAG,KAAO,CAAC,EAAG,OAAO,EAAE,CAAC,CAAC,CACzD,CAGKC,EAA4B,CAChC,GAAI,EACJ,OAAQ,EACR,KAAM,EAAK,MAAQ,EACnB,QAAS,CACP,GAAI,EAAK,UACT,KAAM,EAAU,iBAAiB,CAAC,uBAC9B,YACA,UACL,CACD,WACA,aAAc,IAAI,MAAM,CAAC,aAAa,CACvC,CACD,MAAO,EAAQ,IAAI,EAAU,EAAc,CAG3C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,eACtB,KAAM,EACP,CAAC,CAEF,IAAM,EAAS,MAAO,EAAU,YAAY,EAAU,CACpD,SACA,YAAa,EAAK,KAClB,SAAU,EAAK,SACf,SAAU,EACX,CAAC,CAGIC,EAA8B,CAClC,GAAG,EACH,KAAM,EAAO,KACb,OAAQ,EAAO,KACf,QAAS,CACP,GAAG,EAAc,QACjB,KAAM,EAAO,KACd,CACD,GAAI,EAAO,KAAO,CAAE,IAAK,EAAO,IAAK,CACtC,CAUD,OARA,MAAO,EAAQ,IAAI,EAAU,EAAgB,CAG7C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,gBACtB,KAAM,EACP,CAAC,CAEK,EAIT,MAAO,EAAO,WACZ,4CAA4C,EAAS,kCACtD,CAGD,IAAMC,EAAuB,EAAE,CAC/B,MAAO,EAAO,WAAW,EAAS,GAChC,EAAO,SAAW,CAChB,EAAO,KAAK,EAAM,EAClB,CACH,CAGD,IAAM,EAAY,EAAO,QAAQ,EAAK,IAAU,EAAM,EAAM,OAAQ,EAAE,CAGhE,EAAS,IAAI,WAAW,EAAU,CACpC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAIlB,IAAM,EAAiB,IAAI,eAAe,CACxC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAO,CAC1B,EAAW,OAAO,EAErB,CAAC,CAgBF,OARmB,MAAO,EALG,CAC3B,GAAG,EACH,KAAM,EACP,CAEiD,EAAU,CAC1D,mBACA,UACA,eACA,WAAY,CAAE,eAAkB,EAAO,QAAQ,EAAS,CAAE,CAC3D,CAAC,CAGK,MAAO,EAAY,EAAU,EAAU,EAAgB,CAC5D,mBACA,UACA,eACD,CAAC,EACF,CACJ,QAAS,EAAkB,IACzB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAJkB,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACgB,OAAO,EAAS,CACjC,MAAO,EAAQ,OAAO,EAAS,EAE/B,CACJ,iBAAkB,EAAmB,IACnC,EAAO,IAAI,WAAa,CAKtB,OAJkB,MAAO,EAAiB,aACxC,EACA,EACD,EACgB,iBAAiB,EAClC,CACJ,yBACE,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAU,EAAW,EACnD,CACJ,4BAA8B,GAC5B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAS,EACzC,CACL,EACD,CAiCJ,MAAa,EAAe,EAAM,OAAO,EAAc,GAAoB,CAAC,CCxiB5E,IAAa,EAAb,KAAsC,CAOpC,YACE,EACA,EACA,CAFQ,KAAA,aAAA,EACA,KAAA,uBAAA,EAoCV,kBAAkB,EAAoD,CACpE,IAAMG,EAAsB,EAAE,CACxBC,EAAqB,EAAE,CAEzBC,EAA2B,SAC3B,EACF,EAAQ,oBACR,KAAK,aAAa,kBAClB,KAAO,KACL,EAAkB,EAAQ,iBAAmB,EA2DjD,GAxDI,EAAQ,oBACL,KAAK,uBAAuB,EAAQ,kBAAkB,EAKzD,EAAW,EAAQ,kBACnB,EAAU,KAAK,6BAA6B,IAAW,EALvD,EAAS,KACP,uBAAuB,EAAQ,kBAAkB,6CAClD,GASH,CAAC,EAAQ,mBACT,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,IAGrD,KAAK,aAAa,yBAClB,EAAQ,UAAY,EAAQ,yBAA2B,GAAK,KAAO,OAEnE,EAAW,WACX,EAAU,KACR,4CAA4C,EAAQ,SAAS,SAC9D,GAED,EAAW,SACX,EAAU,KACR,KAAK,aAAa,wBACd,0CAA0C,EAAQ,SAAS,SAC3D,gEACL,GAMH,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI9B,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI5B,IAAa,aAEb,KAAK,aAAa,sBAClB,EAAkB,KAAK,aAAa,uBAEpC,EAAS,KACP,oBAAoB,EAAgB,mBAAmB,KAAK,aAAa,qBAAqB,aAC/F,CACD,EAAkB,KAAK,aAAa,sBAIlC,KAAK,aAAa,UAAU,CAC9B,IAAM,EAAiB,KAAK,KAAK,EAAQ,SAAW,EAAU,CAC9D,GAAI,EAAiB,KAAK,aAAa,SAAU,CAC/C,IAAM,EAAmB,KAAK,KAC5B,EAAQ,SAAW,KAAK,aAAa,SACtC,CACD,EAAS,KACP,mBAAmB,EAAe,mBAAmB,KAAK,aAAa,SAAS,yBACjF,CACD,EAAY,KAAK,IAAI,EAAW,EAAiB,EAmBvD,OAbK,KAAK,uBAAuB,EAAS,GACxC,EAAS,KACP,kEACD,CACD,EAAW,SACX,EAAkB,GAIpB,EAAU,KACR,qCAAqC,KAAK,aAAa,wBAAwB,kBAAkB,KAAK,aAAa,sBAAsB,cAAc,KAAK,aAAa,2BAC1K,CAEM,CACL,WACA,YACA,gBAAiB,IAAa,WAAa,EAAkB,EAC7D,YACA,WACD,CAQH,0BAAkD,CAChD,OAAO,KAAK,aA4Bd,sBAAsB,EAGpB,CACA,IAAMC,EAAmB,EAAE,CAwC3B,OArCE,EAAQ,mBACR,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,EAEvD,EAAO,KACL,uBAAuB,EAAQ,kBAAkB,+BAClD,CAGC,EAAQ,qBAER,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,CAGD,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,EAKH,EAAQ,iBACR,KAAK,aAAa,sBAClB,EAAQ,gBAAkB,KAAK,aAAa,sBAE5C,EAAO,KACL,oBAAoB,EAAQ,gBAAgB,8BAA8B,KAAK,aAAa,uBAC7F,CAGI,CACL,MAAO,EAAO,SAAW,EACzB,SACD"}
1
+ {"version":3,"file":"upload-C-C7hn1-.mjs","names":["captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n>","parsedMetadata: Record<string, string>","metadataObject: Record<string, string>","file: UploadFile","onAbort","initialUpload: UploadFile","completedUpload: UploadFile","chunks: Uint8Array[]","capabilities: DataStoreCapabilities","validateUploadStrategy: (strategy: UploadStrategy) => boolean","reasoning: string[]","warnings: string[]","strategy: UploadStrategy","errors: string[]"],"sources":["../src/upload/mime.ts","../src/upload/create-upload.ts","../src/upload/convert-to-stream.ts","../src/upload/write-to-store.ts","../src/upload/upload-chunk.ts","../src/upload/upload-url.ts","../src/upload/upload-engine.ts","../src/upload/upload-strategy-negotiator.ts"],"sourcesContent":["/**\n * Helper to check if buffer matches a byte pattern at given offset\n */\nfunction checkBytes(\n buffer: Uint8Array,\n pattern: number[],\n offset = 0,\n): boolean {\n if (buffer.length < offset + pattern.length) return false;\n return pattern.every((byte, i) => buffer[offset + i] === byte);\n}\n\n/**\n * Helper to check if buffer matches a string pattern at given offset\n */\nfunction checkString(buffer: Uint8Array, str: string, offset = 0): boolean {\n if (buffer.length < offset + str.length) return false;\n for (let i = 0; i < str.length; i++) {\n if (buffer[offset + i] !== str.charCodeAt(i)) return false;\n }\n return true;\n}\n\n/**\n * Detect MIME type from buffer using magic bytes (file signatures).\n * Supports a wide range of common file types including images, videos, audio, documents, and archives.\n *\n * @param buffer - File content as Uint8Array\n * @param filename - Optional filename for extension-based fallback\n * @returns Detected MIME type or \"application/octet-stream\" if unknown\n */\nexport const detectMimeType = (\n buffer: Uint8Array,\n filename?: string,\n): string => {\n if (buffer.length === 0) {\n return \"application/octet-stream\";\n }\n\n // ===== IMAGES =====\n\n // PNG: 89 50 4E 47 0D 0A 1A 0A\n if (checkBytes(buffer, [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])) {\n return \"image/png\";\n }\n\n // JPEG: FF D8 FF\n if (checkBytes(buffer, [0xff, 0xd8, 0xff])) {\n return \"image/jpeg\";\n }\n\n // GIF87a or GIF89a\n if (checkString(buffer, \"GIF87a\") || checkString(buffer, \"GIF89a\")) {\n return \"image/gif\";\n }\n\n // WebP: RIFF....WEBP\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WEBP\", 8)\n ) {\n return \"image/webp\";\n }\n\n // AVIF: ....ftypavif or ....ftypavis\n if (\n buffer.length >= 12 &&\n checkBytes(buffer, [0x00, 0x00, 0x00], 0) &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"avif\", 8) || checkString(buffer, \"avis\", 8))\n ) {\n return \"image/avif\";\n }\n\n // HEIC/HEIF: ....ftypheic or ....ftypheif or ....ftypmif1\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"heic\", 8) ||\n checkString(buffer, \"heif\", 8) ||\n checkString(buffer, \"mif1\", 8))\n ) {\n return \"image/heic\";\n }\n\n // BMP: 42 4D\n if (checkBytes(buffer, [0x42, 0x4d])) {\n return \"image/bmp\";\n }\n\n // TIFF (little-endian): 49 49 2A 00\n if (checkBytes(buffer, [0x49, 0x49, 0x2a, 0x00])) {\n return \"image/tiff\";\n }\n\n // TIFF (big-endian): 4D 4D 00 2A\n if (checkBytes(buffer, [0x4d, 0x4d, 0x00, 0x2a])) {\n return \"image/tiff\";\n }\n\n // ICO: 00 00 01 00\n if (checkBytes(buffer, [0x00, 0x00, 0x01, 0x00])) {\n return \"image/x-icon\";\n }\n\n // SVG (XML-based, check for <svg or <?xml)\n if (buffer.length >= 5) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n if (\n text.includes(\"<svg\") ||\n (text.includes(\"<?xml\") && text.includes(\"<svg\"))\n ) {\n return \"image/svg+xml\";\n }\n }\n\n // ===== VIDEOS =====\n\n // MP4/M4V/M4A: ....ftyp\n if (buffer.length >= 12 && checkString(buffer, \"ftyp\", 4)) {\n const subtype = new TextDecoder().decode(buffer.slice(8, 12));\n if (\n subtype.startsWith(\"mp4\") ||\n subtype.startsWith(\"M4\") ||\n subtype.startsWith(\"isom\")\n ) {\n return \"video/mp4\";\n }\n }\n\n // WebM: 1A 45 DF A3\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3])) {\n return \"video/webm\";\n }\n\n // AVI: RIFF....AVI\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"AVI \", 8)\n ) {\n return \"video/x-msvideo\";\n }\n\n // MOV (QuickTime): ....moov or ....mdat or ....free\n if (\n buffer.length >= 8 &&\n (checkString(buffer, \"moov\", 4) ||\n checkString(buffer, \"mdat\", 4) ||\n checkString(buffer, \"free\", 4))\n ) {\n return \"video/quicktime\";\n }\n\n // MKV: 1A 45 DF A3 (same as WebM but check for Matroska)\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3]) && buffer.length >= 100) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, 100),\n );\n if (text.includes(\"matroska\")) {\n return \"video/x-matroska\";\n }\n }\n\n // ===== AUDIO =====\n\n // MP3: FF FB or FF F3 or FF F2 or ID3\n if (\n checkBytes(buffer, [0xff, 0xfb]) ||\n checkBytes(buffer, [0xff, 0xf3]) ||\n checkBytes(buffer, [0xff, 0xf2]) ||\n checkString(buffer, \"ID3\")\n ) {\n return \"audio/mpeg\";\n }\n\n // WAV: RIFF....WAVE\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WAVE\", 8)\n ) {\n return \"audio/wav\";\n }\n\n // FLAC: 66 4C 61 43 (fLaC)\n if (checkString(buffer, \"fLaC\")) {\n return \"audio/flac\";\n }\n\n // OGG: 4F 67 67 53 (OggS)\n if (checkString(buffer, \"OggS\")) {\n return \"audio/ogg\";\n }\n\n // M4A: ....ftypM4A\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n checkString(buffer, \"M4A\", 8)\n ) {\n return \"audio/mp4\";\n }\n\n // ===== DOCUMENTS =====\n\n // PDF: 25 50 44 46 (%PDF)\n if (checkString(buffer, \"%PDF\")) {\n return \"application/pdf\";\n }\n\n // ===== ARCHIVES =====\n\n // ZIP: 50 4B 03 04 or 50 4B 05 06 (empty archive) or 50 4B 07 08 (spanned archive)\n if (\n checkBytes(buffer, [0x50, 0x4b, 0x03, 0x04]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x05, 0x06]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x07, 0x08])\n ) {\n // Could be ZIP, DOCX, XLSX, PPTX, JAR, APK, etc.\n // Check for Office formats\n if (buffer.length >= 1024) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(buffer);\n if (text.includes(\"word/\"))\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n if (text.includes(\"xl/\"))\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n if (text.includes(\"ppt/\"))\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n }\n return \"application/zip\";\n }\n\n // RAR: 52 61 72 21 1A 07 (Rar!)\n if (checkBytes(buffer, [0x52, 0x61, 0x72, 0x21, 0x1a, 0x07])) {\n return \"application/x-rar-compressed\";\n }\n\n // 7Z: 37 7A BC AF 27 1C\n if (checkBytes(buffer, [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c])) {\n return \"application/x-7z-compressed\";\n }\n\n // GZIP: 1F 8B\n if (checkBytes(buffer, [0x1f, 0x8b])) {\n return \"application/gzip\";\n }\n\n // TAR (ustar): \"ustar\" at offset 257\n if (buffer.length >= 262 && checkString(buffer, \"ustar\", 257)) {\n return \"application/x-tar\";\n }\n\n // ===== FONTS =====\n\n // WOFF: 77 4F 46 46 (wOFF)\n if (checkString(buffer, \"wOFF\")) {\n return \"font/woff\";\n }\n\n // WOFF2: 77 4F 46 32 (wOF2)\n if (checkString(buffer, \"wOF2\")) {\n return \"font/woff2\";\n }\n\n // TTF: 00 01 00 00 00\n if (checkBytes(buffer, [0x00, 0x01, 0x00, 0x00, 0x00])) {\n return \"font/ttf\";\n }\n\n // OTF: 4F 54 54 4F (OTTO)\n if (checkString(buffer, \"OTTO\")) {\n return \"font/otf\";\n }\n\n // ===== TEXT =====\n\n // JSON (basic check for { or [)\n if (buffer.length >= 1) {\n const firstByte = buffer[0];\n if (firstByte === 0x7b || firstByte === 0x5b) {\n // { or [\n try {\n const text = new TextDecoder(\"utf-8\").decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n JSON.parse(text.trim());\n return \"application/json\";\n } catch {\n // Not valid JSON\n }\n }\n }\n\n // Fallback to extension-based detection\n if (filename) {\n const ext = filename.split(\".\").pop()?.toLowerCase();\n switch (ext) {\n // Images\n case \"jpg\":\n case \"jpeg\":\n return \"image/jpeg\";\n case \"png\":\n return \"image/png\";\n case \"gif\":\n return \"image/gif\";\n case \"webp\":\n return \"image/webp\";\n case \"avif\":\n return \"image/avif\";\n case \"heic\":\n case \"heif\":\n return \"image/heic\";\n case \"bmp\":\n return \"image/bmp\";\n case \"tiff\":\n case \"tif\":\n return \"image/tiff\";\n case \"ico\":\n return \"image/x-icon\";\n case \"svg\":\n return \"image/svg+xml\";\n\n // Videos\n case \"mp4\":\n case \"m4v\":\n return \"video/mp4\";\n case \"webm\":\n return \"video/webm\";\n case \"avi\":\n return \"video/x-msvideo\";\n case \"mov\":\n return \"video/quicktime\";\n case \"mkv\":\n return \"video/x-matroska\";\n\n // Audio\n case \"mp3\":\n return \"audio/mpeg\";\n case \"wav\":\n return \"audio/wav\";\n case \"flac\":\n return \"audio/flac\";\n case \"ogg\":\n return \"audio/ogg\";\n case \"m4a\":\n return \"audio/mp4\";\n\n // Documents\n case \"pdf\":\n return \"application/pdf\";\n case \"docx\":\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n case \"xlsx\":\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n case \"pptx\":\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n\n // Archives\n case \"zip\":\n return \"application/zip\";\n case \"rar\":\n return \"application/x-rar-compressed\";\n case \"7z\":\n return \"application/x-7z-compressed\";\n case \"gz\":\n case \"gzip\":\n return \"application/gzip\";\n case \"tar\":\n return \"application/x-tar\";\n\n // Fonts\n case \"woff\":\n return \"font/woff\";\n case \"woff2\":\n return \"font/woff2\";\n case \"ttf\":\n return \"font/ttf\";\n case \"otf\":\n return \"font/otf\";\n\n // Text\n case \"txt\":\n return \"text/plain\";\n case \"json\":\n return \"application/json\";\n case \"xml\":\n return \"application/xml\";\n case \"html\":\n case \"htm\":\n return \"text/html\";\n case \"css\":\n return \"text/css\";\n case \"js\":\n return \"application/javascript\";\n case \"csv\":\n return \"text/csv\";\n\n default:\n return \"application/octet-stream\";\n }\n }\n\n return \"application/octet-stream\";\n};\n\n/**\n * Compare two MIME types with lenient matching.\n * Matches on major type (e.g., \"image/*\") to allow for minor variations.\n *\n * @param declared - MIME type provided by client\n * @param detected - MIME type detected from file content\n * @returns true if MIME types are compatible\n *\n * @example\n * compareMimeTypes(\"image/png\", \"image/apng\") // true\n * compareMimeTypes(\"image/jpeg\", \"image/png\") // true (both images)\n * compareMimeTypes(\"image/png\", \"application/pdf\") // false\n */\nexport function compareMimeTypes(declared: string, detected: string): boolean {\n // Exact match\n if (declared === detected) {\n return true;\n }\n\n // Extract major types (e.g., \"image\" from \"image/png\")\n const declaredMajor = declared.split(\"/\")[0];\n const detectedMajor = detected.split(\"/\")[0];\n\n // Compare major types for lenient matching\n return declaredMajor === detectedMajor;\n}\n","import { Effect, Metric, MetricBoundaries, Option } from \"effect\";\nimport {\n type EventEmitter,\n type InputFile,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport type { GenerateIdShape } from \"../utils/generate-id\";\n\n/**\n * Captures the current Effect trace context for distributed tracing.\n *\n * Uses Effect's `currentSpan` to get the active span, which is more reliable\n * than OpenTelemetry's `trace.getActiveSpan()` when using @effect/opentelemetry\n * because Effect manages its own span context that may not be synchronized\n * with OpenTelemetry's global context.\n *\n * @returns Effect that yields TraceContext if there's an active span, undefined otherwise\n */\nconst captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n> = Effect.gen(function* () {\n const spanOption = yield* Effect.currentSpan.pipe(Effect.option);\n return Option.match(spanOption, {\n onNone: () => undefined,\n onSome: (span) => ({\n traceId: span.traceId,\n spanId: span.spanId,\n traceFlags: span.sampled ? 1 : 0,\n }),\n });\n});\n\n/**\n * Creates a new upload and initializes it in the storage system.\n *\n * This function handles the initial upload creation process including:\n * - Generating a unique upload ID\n * - Routing to appropriate data store based on storage ID\n * - Creating the upload record in the data store\n * - Storing upload metadata in KV store\n * - Emitting upload started events\n * - Parsing and validating metadata\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for upload creation, file sizes, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param inputFile - Input file configuration including storage, size, type, etc.\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for upload lifecycle events\n * @param generateId - ID generator for creating unique upload identifiers\n * @returns Effect that yields the created UploadFile\n *\n * @example\n * ```typescript\n * // Create a new upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * metadata: JSON.stringify({ category: \"photos\" })\n * };\n *\n * const createEffect = createUpload(\n * inputFile,\n * \"client-123\",\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter,\n * generateId\n * }\n * );\n *\n * // Run with dependencies\n * const upload = await Effect.runPromise(\n * createEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer),\n * Effect.provide(generateIdLayer)\n * )\n * );\n * ```\n */\nexport const createUpload = (\n inputFile: InputFile,\n clientId: string | null,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId: GenerateIdShape;\n },\n) =>\n Effect.gen(function* () {\n // Capture the parent \"upload\" span's trace context FIRST\n // This allows subsequent chunk uploads to be siblings of upload-create\n // under the same parent \"upload\" span\n const traceContext = yield* captureTraceContextEffect;\n const creationDate = new Date().toISOString();\n\n // Now run the actual upload creation inside a child span\n const fileCreated = yield* Effect.gen(function* () {\n // Get datastore using Effect\n const dataStore = yield* dataStoreService.getDataStore(\n inputFile.storageId,\n clientId,\n );\n\n const id = yield* generateId.generateId();\n const { size, type, fileName, lastModified, metadata, flow } = inputFile;\n\n let parsedMetadata: Record<string, string> = {};\n if (metadata) {\n try {\n parsedMetadata = JSON.parse(metadata) as Record<string, string>;\n } catch {\n parsedMetadata = {};\n }\n }\n\n const metadataObject: Record<string, string> = {\n ...parsedMetadata,\n type,\n fileName: fileName ?? \"\",\n };\n if (lastModified) {\n metadataObject.lastModified = lastModified.toString();\n }\n\n const file: UploadFile = {\n id,\n size,\n metadata: metadataObject,\n offset: 0,\n creationDate,\n storage: {\n id: inputFile.storageId,\n type,\n path: \"\",\n bucket: dataStore.bucket,\n },\n flow,\n traceContext,\n };\n\n // Create file using Effect\n const created = yield* dataStore.create(file);\n\n // Store in KV store\n yield* kvStore.set(id, created);\n\n // Emit event\n yield* eventEmitter.emit(id, {\n type: UploadEventType.UPLOAD_STARTED,\n data: created,\n flow: created.flow,\n });\n\n return created;\n }).pipe(\n // upload-create is a CHILD span of the parent \"upload\" span\n Effect.withSpan(\"upload-create\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n );\n\n return fileCreated;\n }).pipe(\n // Parent \"upload\" span wraps the entire upload lifecycle\n // upload-create and upload-chunk will be children of this span\n Effect.withSpan(\"upload\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n // Track upload creation metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment upload created counter\n yield* Metric.increment(\n Metric.counter(\"upload_created_total\", {\n description: \"Total number of uploads created\",\n }),\n );\n\n // Record file size\n if (file.size) {\n const fileSizeHistogram = Metric.histogram(\n \"upload_file_size_bytes\",\n MetricBoundaries.exponential({\n start: 1024,\n factor: 2,\n count: 25,\n }),\n );\n yield* Metric.update(fileSizeHistogram, file.size);\n }\n\n // Track active uploads gauge\n const activeUploadsGauge = Metric.gauge(\"active_uploads\");\n yield* Metric.increment(activeUploadsGauge);\n }),\n ),\n // Add structured logging\n Effect.tap((file) =>\n Effect.logInfo(\"Upload created\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Log error\n yield* Effect.logError(\"Upload creation failed\").pipe(\n Effect.annotateLogs({\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.storage_id\": inputFile.storageId,\n error: String(error),\n }),\n );\n\n // Track failed upload metric\n yield* Metric.increment(\n Metric.counter(\"upload_failed_total\", {\n description: \"Total number of uploads that failed\",\n }),\n );\n }),\n ),\n );\n","import { Stream } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Converts a ReadableStream to an Effect Stream.\n *\n * This utility function wraps a ReadableStream in an Effect Stream, providing\n * proper error handling and integration with the Effect ecosystem. It's used\n * throughout the upload system to convert raw streams into Effect-compatible\n * streams for processing.\n *\n * The function handles:\n * - Stream conversion with proper error mapping\n * - UploadistaError creation for stream errors\n * - Integration with Effect Stream processing\n *\n * @param data - The ReadableStream to convert\n * @returns Effect Stream that can be processed with Effect operations\n *\n * @example\n * ```typescript\n * // Convert a file stream to Effect Stream\n * const fileStream = new ReadableStream(...);\n * const effectStream = convertToStream(fileStream);\n *\n * // Process with Effect operations\n * const processedStream = effectStream.pipe(\n * Stream.map((chunk) => processChunk(chunk)),\n * Stream.filter((chunk) => chunk.length > 0)\n * );\n *\n * // Run the stream\n * await Stream.runForEach(processedStream, (chunk) =>\n * Effect.logInfo(`Processed chunk: ${chunk.length} bytes`)\n * );\n * ```\n */\nexport function convertToStream<T>(data: ReadableStream<T>) {\n return Stream.fromReadableStream(\n () => data,\n (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n }),\n );\n}\n","import { Effect, Ref } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport { StreamLimiterEffect } from \"../streams/stream-limiter\";\nimport type { DataStore, UploadEvent, UploadFile } from \"../types\";\nimport { type EventEmitter, UploadEventType } from \"../types\";\nimport { convertToStream } from \"./convert-to-stream\";\n\n/**\n * Configuration options for writing data to a data store.\n *\n * @property data - The stream of data to write\n * @property upload - Upload file metadata\n * @property dataStore - Target data store for writing\n * @property maxFileSize - Maximum allowed file size in bytes\n * @property controller - AbortController for cancellation\n * @property eventEmitter - Event emitter for progress tracking\n * @property uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n */\ntype WriteToStoreOptions = {\n data: ReadableStream<Uint8Array>;\n upload: UploadFile;\n dataStore: DataStore<UploadFile>;\n maxFileSize: number;\n controller: AbortController;\n eventEmitter: EventEmitter<UploadEvent>;\n uploadProgressInterval?: number;\n};\n\n/**\n * Writes data stream to a data store with progress tracking and size limits.\n *\n * This function handles the core data writing logic including:\n * - Stream conversion and processing\n * - File size validation and limiting\n * - Progress tracking with throttled events\n * - Abort signal handling for cancellation\n * - Error handling and cleanup\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Structured logging for debugging and monitoring\n * - Progress event emission with throttling\n * - Error handling with proper UploadistaError types\n *\n * @param data - The stream of data to write to storage\n * @param upload - Upload file metadata containing ID, offset, etc.\n * @param dataStore - Target data store for writing the data\n * @param maxFileSize - Maximum allowed file size in bytes\n * @param controller - AbortController for handling cancellation\n * @param eventEmitter - Event emitter for progress tracking\n * @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n * @returns Effect that yields the number of bytes written\n *\n * @example\n * ```typescript\n * // Write data to store with progress tracking\n * const writeEffect = writeToStore({\n * data: fileStream,\n * upload: uploadMetadata,\n * dataStore: s3DataStore,\n * maxFileSize: 100_000_000, // 100MB\n * controller: abortController,\n * eventEmitter: progressEmitter,\n * uploadProgressInterval: 500 // Emit progress every 500ms\n * });\n *\n * // Run with error handling\n * const bytesWritten = await Effect.runPromise(\n * writeEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to write to store\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport function writeToStore({\n data,\n upload,\n dataStore,\n maxFileSize,\n controller,\n eventEmitter,\n uploadProgressInterval = 200,\n}: WriteToStoreOptions) {\n return Effect.gen(function* () {\n const stream = convertToStream(data);\n // Check if already aborted\n if (controller.signal.aborted) {\n return yield* Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n\n // Create an AbortController to manage the stream pipeline\n const abortController = new AbortController();\n const { signal } = abortController;\n\n // Set up abort handling\n const onAbort = () => {\n // stream.cancel();\n abortController.abort();\n };\n\n controller.signal.addEventListener(\"abort\", onAbort, { once: true });\n\n return yield* Effect.acquireUseRelease(\n Effect.sync(() => ({ signal, onAbort })),\n ({ signal: _signal }) =>\n Effect.gen(function* () {\n // Create a ref to track the last progress emission time for throttling\n const lastEmitTime = yield* Ref.make(0);\n\n // Create the stream limiter\n const limiter = StreamLimiterEffect.limit({\n maxSize: maxFileSize,\n });\n\n // Pipe the data through the limiter\n const limitedStream = limiter(stream);\n\n // Write to the data store with progress tracking\n const offset = yield* dataStore.write(\n {\n stream: limitedStream,\n file_id: upload.id,\n offset: upload.offset,\n },\n {\n onProgress: (newOffset: number) => {\n // Simple throttling using timestamp check\n const now = Date.now();\n Ref.get(lastEmitTime)\n .pipe(\n Effect.flatMap((lastTime) => {\n if (now - lastTime >= uploadProgressInterval) {\n return Effect.gen(function* () {\n yield* Ref.set(lastEmitTime, now);\n yield* eventEmitter.emit(upload.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: upload.id,\n progress: newOffset,\n total: upload.size ?? 0,\n },\n flow: upload.flow,\n });\n });\n }\n return Effect.void;\n }),\n Effect.runPromise,\n )\n .catch(() => {\n // Ignore errors during progress emission\n });\n },\n },\n );\n\n return offset;\n }).pipe(\n Effect.catchAll((error) => {\n if (error instanceof Error && error.name === \"AbortError\") {\n return Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n if (error instanceof UploadistaError) {\n return Effect.fail(error);\n }\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error }),\n );\n }),\n ),\n ({ onAbort }) =>\n Effect.sync(() => {\n controller.signal.removeEventListener(\"abort\", onAbort);\n }),\n );\n }).pipe(\n // Add tracing span for write operation\n Effect.withSpan(\"upload-write-to-store\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n \"upload.max_file_size\": maxFileSize.toString(),\n \"upload.file_size\": upload.size?.toString() ?? \"0\",\n },\n }),\n // Add structured logging for write operation\n Effect.tap((offset) =>\n Effect.logDebug(\"Data written to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"write.offset\": offset.toString(),\n \"write.bytes_written\": (offset - upload.offset).toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to write to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n error: error instanceof UploadistaError ? error.code : String(error),\n }),\n ),\n ),\n );\n}\n","import { Effect, Metric, MetricBoundaries, Tracer } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport {\n type DataStore,\n type EventEmitter,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport { computeChecksum } from \"../utils/checksum\";\nimport { compareMimeTypes, detectMimeType } from \"./mime\";\nimport { writeToStore } from \"./write-to-store\";\n\n/**\n * Creates an ExternalSpan from stored trace context.\n * Used for linking chunk uploads to the original upload trace.\n */\nfunction createExternalSpan(traceContext: UploadFileTraceContext) {\n return Tracer.externalSpan({\n traceId: traceContext.traceId,\n spanId: traceContext.spanId,\n sampled: traceContext.traceFlags === 1,\n });\n}\n\n/**\n * Creates an \"upload-complete\" span Effect that captures the full upload duration.\n * This span is a sibling of upload-create and upload-chunk under the parent \"upload\" span.\n *\n * Note: The span's visual duration in tracing UIs will be short (instant), but the\n * actual upload duration is captured in the \"upload.total_duration_ms\" attribute.\n *\n * @param file - The completed upload file\n * @param parentSpan - The parent span to link to\n * @returns Effect that creates and completes the span\n */\nconst createUploadCompleteSpanEffect = (\n file: UploadFile,\n parentSpan: Tracer.ExternalSpan,\n): Effect.Effect<void> => {\n const creationTime = new Date(file.creationDate as string).getTime();\n const totalDurationMs = Date.now() - creationTime;\n\n return Effect.void.pipe(\n Effect.withSpan(\"upload-complete\", {\n attributes: {\n \"upload.id\": file.id,\n \"upload.size\": file.size ?? 0,\n \"upload.total_duration_ms\": totalDurationMs,\n \"upload.storage_id\": file.storage.id,\n \"upload.file_name\": file.metadata?.fileName ?? \"unknown\",\n \"upload.creation_date\": file.creationDate as string,\n \"upload.completion_date\": new Date().toISOString(),\n },\n parent: parentSpan,\n }),\n );\n};\n\n/**\n * Uploads a chunk of data for an existing upload.\n *\n * This function handles the core chunk upload logic including:\n * - Retrieving upload metadata from KV store\n * - Routing to appropriate data store based on storage ID\n * - Writing chunk data to storage with progress tracking\n * - Updating upload offset and metadata\n * - Emitting progress events\n * - Validating upload completion (checksum, MIME type)\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for chunk size, throughput, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param uploadId - Unique identifier for the upload\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param chunk - ReadableStream containing the chunk data to upload\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for progress and validation events\n * @returns Effect that yields the updated UploadFile with new offset\n *\n * @example\n * ```typescript\n * // Upload a chunk for an existing upload\n * const uploadChunkEffect = uploadChunk(\n * \"upload-123\",\n * \"client-456\",\n * chunkStream,\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter\n * }\n * );\n *\n * // Run with dependencies\n * const result = await Effect.runPromise(\n * uploadChunkEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * )\n * );\n * ```\n */\nexport const uploadChunk = (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n },\n) =>\n Effect.gen(function* () {\n // Get file from KV store first to check for trace context\n const file = yield* kvStore.get(uploadId);\n\n // Create external span from stored trace context if available\n // This links chunk uploads to the original upload trace\n const parentSpan = file.traceContext\n ? createExternalSpan(file.traceContext)\n : undefined;\n\n // Core chunk processing logic\n const processChunk = Effect.gen(function* () {\n // Get datastore\n const dataStore = yield* dataStoreService.getDataStore(\n file.storage.id,\n clientId,\n );\n\n // Note: AbortController could be used for cancellation if needed\n\n // Write to store using writeToStore Effect\n const controller = new AbortController();\n\n const chunkSize = yield* writeToStore({\n dataStore,\n data: chunk,\n upload: file,\n maxFileSize: 100_000_000,\n controller,\n uploadProgressInterval: 200,\n eventEmitter,\n });\n\n file.offset = chunkSize;\n\n // Update KV store\n yield* kvStore.set(uploadId, file);\n\n // Emit progress event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: file.id,\n progress: file.offset,\n total: file.size ?? 0,\n },\n flow: file.flow,\n });\n\n // Check if upload is complete and run validation\n if (file.size && file.offset === file.size) {\n yield* validateUpload({\n file,\n dataStore,\n eventEmitter,\n });\n\n // Create \"upload-complete\" span that captures the full upload duration\n // This span shows the total time from upload creation to completion\n if (file.traceContext) {\n const completeParentSpan = createExternalSpan(file.traceContext);\n yield* createUploadCompleteSpanEffect(file, completeParentSpan);\n }\n }\n\n return file;\n }).pipe(\n // Add tracing span for chunk upload with parent from stored trace context\n Effect.withSpan(\"upload-chunk\", {\n attributes: {\n \"upload.id\": uploadId,\n \"chunk.upload_id\": uploadId,\n \"upload.has_trace_context\": file.traceContext ? \"true\" : \"false\",\n },\n parent: parentSpan,\n }),\n );\n\n return yield* processChunk;\n }).pipe(\n // Track chunk upload metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment chunk uploaded counter\n yield* Metric.increment(\n Metric.counter(\"chunk_uploaded_total\", {\n description: \"Total number of chunks uploaded\",\n }),\n );\n\n // Record chunk size\n const chunkSize = file.offset;\n const chunkSizeHistogram = Metric.histogram(\n \"chunk_size_bytes\",\n MetricBoundaries.linear({\n start: 262_144,\n width: 262_144,\n count: 20,\n }),\n );\n yield* Metric.update(chunkSizeHistogram, chunkSize);\n\n // Update throughput gauge\n if (file.size && file.size > 0) {\n const throughput = chunkSize; // bytes processed\n const throughputGauge = Metric.gauge(\n \"upload_throughput_bytes_per_second\",\n );\n yield* Metric.set(throughputGauge, throughput);\n }\n }),\n ),\n // Add structured logging for chunk progress\n Effect.tap((file) =>\n Effect.logDebug(\"Chunk uploaded\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"chunk.size\": file.offset.toString(),\n \"chunk.progress\":\n file.size && file.size > 0\n ? ((file.offset / file.size) * 100).toFixed(2)\n : \"0\",\n \"upload.total_size\": file.size?.toString() ?? \"0\",\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Chunk upload failed\").pipe(\n Effect.annotateLogs({\n \"upload.id\": uploadId,\n error: String(error),\n }),\n ),\n ),\n );\n\n/**\n * Validates an upload after completion.\n *\n * Performs comprehensive validation including:\n * - Checksum validation (if provided) using the specified algorithm\n * - MIME type validation (if required by data store capabilities)\n * - File size validation against data store limits\n *\n * Validation results are emitted as events and failures result in:\n * - Cleanup of uploaded data from storage\n * - Removal of metadata from KV store\n * - Appropriate error responses\n *\n * The function respects data store capabilities for validation limits\n * and provides detailed error information for debugging.\n *\n * @param file - The upload file to validate\n * @param dataStore - Data store containing the uploaded file\n * @param eventEmitter - Event emitter for validation events\n * @returns Effect that completes validation or fails with UploadistaError\n *\n * @example\n * ```typescript\n * // Validate upload after completion\n * const validationEffect = validateUpload({\n * file: completedUpload,\n * dataStore: s3DataStore,\n * eventEmitter: progressEmitter\n * });\n *\n * // Run validation\n * await Effect.runPromise(validationEffect);\n * ```\n */\nconst validateUpload = ({\n file,\n dataStore,\n eventEmitter,\n}: {\n file: UploadFile;\n dataStore: DataStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n}): Effect.Effect<void, UploadistaError, never> =>\n Effect.gen(function* () {\n const capabilities = dataStore.getCapabilities();\n\n // Check if file exceeds max validation size\n if (\n capabilities.maxValidationSize &&\n file.size &&\n file.size > capabilities.maxValidationSize\n ) {\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_WARNING,\n data: {\n id: file.id,\n message: `File size (${file.size} bytes) exceeds max validation size (${capabilities.maxValidationSize} bytes). Validation skipped.`,\n },\n flow: file.flow,\n });\n return;\n }\n\n // Read file from datastore for validation\n const fileBytes = yield* dataStore.read(file.id);\n\n // Validate checksum if provided\n if (file.checksum && file.checksumAlgorithm) {\n const computedChecksum = yield* computeChecksum(\n fileBytes,\n file.checksumAlgorithm,\n );\n\n if (computedChecksum !== file.checksum) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"checksum_mismatch\",\n expected: file.checksum,\n actual: computedChecksum,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with checksum mismatch error\n return yield* UploadistaError.fromCode(\"CHECKSUM_MISMATCH\", {\n body: `Checksum validation failed. Expected: ${file.checksum}, Got: ${computedChecksum}`,\n details: {\n uploadId: file.id,\n expected: file.checksum,\n actual: computedChecksum,\n algorithm: file.checksumAlgorithm,\n },\n }).toEffect();\n }\n\n // Emit checksum validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"checksum\",\n algorithm: file.checksumAlgorithm,\n },\n flow: file.flow,\n });\n }\n\n // Validate MIME type if required by capabilities\n if (capabilities.requiresMimeTypeValidation) {\n const detectedMimeType = detectMimeType(fileBytes);\n const declaredMimeType = file.metadata?.type as string | undefined;\n\n if (\n declaredMimeType &&\n !compareMimeTypes(declaredMimeType, detectedMimeType)\n ) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"mimetype_mismatch\",\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with MIME type mismatch error\n return yield* UploadistaError.fromCode(\"MIMETYPE_MISMATCH\", {\n body: `MIME type validation failed. Expected: ${declaredMimeType}, Detected: ${detectedMimeType}`,\n details: {\n uploadId: file.id,\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n }).toEffect();\n }\n\n // Emit MIME type validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"mimetype\",\n },\n flow: file.flow,\n });\n }\n }).pipe(\n Effect.withSpan(\"validate-upload\", {\n attributes: {\n \"upload.id\": file.id,\n \"validation.checksum_provided\": file.checksum ? \"true\" : \"false\",\n \"validation.mime_required\": dataStore.getCapabilities()\n .requiresMimeTypeValidation\n ? \"true\"\n : \"false\",\n },\n }),\n );\n","import { Effect, Metric } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Fetches a file from a remote URL.\n *\n * This function handles HTTP requests to remote URLs for file uploads,\n * including proper error handling, metrics tracking, and observability.\n *\n * Features:\n * - HTTP request with proper error handling\n * - Effect tracing for performance monitoring\n * - Metrics tracking for URL-based uploads\n * - Structured logging for debugging\n * - Response validation and error reporting\n *\n * @param url - The remote URL to fetch the file from\n * @returns Effect that yields the Response object\n *\n * @example\n * ```typescript\n * // Fetch a file from URL\n * const fetchEffect = fetchFile(\"https://example.com/image.jpg\");\n *\n * // Run with error handling\n * const response = await Effect.runPromise(\n * fetchEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to fetch file\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport const fetchFile = (url: string) => {\n return Effect.tryPromise({\n try: async () => {\n return await fetch(url);\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for URL fetch\n Effect.withSpan(\"upload-fetch-url\", {\n attributes: {\n \"upload.url\": url,\n \"upload.operation\": \"fetch\",\n },\n }),\n // Track URL fetch metrics\n Effect.tap((response) =>\n Effect.gen(function* () {\n // Increment URL upload counter\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_total\", {\n description: \"Total number of URL-based uploads\",\n }),\n );\n\n // Track success/failure\n if (response.ok) {\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_success_total\", {\n description: \"Total number of successful URL-based uploads\",\n }),\n );\n }\n }),\n ),\n // Add structured logging\n Effect.tap((response) =>\n Effect.logInfo(\"URL fetch completed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n \"response.status\": response.status.toString(),\n \"response.ok\": response.ok.toString(),\n \"response.content_length\":\n response.headers.get(\"content-length\") ?? \"unknown\",\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Track failed URL upload\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_failed_total\", {\n description: \"Total number of failed URL-based uploads\",\n }),\n );\n\n // Log error\n yield* Effect.logError(\"URL fetch failed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n error: String(error),\n }),\n );\n }),\n ),\n );\n};\n\n/**\n * Converts a Response object to an ArrayBuffer.\n *\n * This function safely converts HTTP response data to binary format\n * for processing and storage, with proper error handling and observability.\n *\n * Features:\n * - Safe conversion from Response to ArrayBuffer\n * - Effect tracing for performance monitoring\n * - Structured logging for debugging\n * - Error handling with proper UploadistaError types\n *\n * @param response - The HTTP Response object to convert\n * @returns Effect that yields the ArrayBuffer data\n *\n * @example\n * ```typescript\n * // Convert response to buffer\n * const bufferEffect = arrayBuffer(response);\n *\n * // Use in upload pipeline\n * const buffer = await Effect.runPromise(\n * bufferEffect.pipe(\n * Effect.tap((buffer) =>\n * Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)\n * )\n * )\n * );\n * ```\n */\nexport const arrayBuffer = (response: Response) => {\n return Effect.tryPromise({\n try: async () => {\n return await response.arrayBuffer();\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for buffer conversion\n Effect.withSpan(\"upload-convert-to-buffer\", {\n attributes: {\n \"upload.operation\": \"arrayBuffer\",\n },\n }),\n // Add structured logging\n Effect.tap((buffer) =>\n Effect.logDebug(\"Response converted to array buffer\").pipe(\n Effect.annotateLogs({\n \"buffer.size\": buffer.byteLength.toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to convert response to array buffer\").pipe(\n Effect.annotateLogs({\n error: String(error),\n }),\n ),\n ),\n );\n};\n","import { Context, Effect, Layer, Stream } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n EventEmitter,\n InputFile,\n KvStore,\n Middleware,\n StreamingConfig,\n UploadEvent,\n UploadFile,\n WebSocketConnection,\n} from \"../types\";\nimport {\n UploadEventEmitter,\n UploadEventType,\n UploadFileDataStores,\n UploadFileKVStore,\n} from \"../types\";\nimport { GenerateId, type GenerateIdShape } from \"../utils/generate-id\";\nimport { createUpload } from \"./create-upload\";\nimport { uploadChunk } from \"./upload-chunk\";\nimport { arrayBuffer, fetchFile } from \"./upload-url\";\n\n/**\n * Legacy configuration options for UploadEngine.\n *\n * @deprecated Use Effect Layers instead of this configuration object.\n * This type is kept for backward compatibility.\n *\n * @property dataStore - DataStore instance or factory function\n * @property kvStore - KV store for upload metadata\n * @property eventEmitter - Event emitter for upload progress\n * @property generateId - Optional ID generator (defaults to UUID)\n * @property middlewares - Optional request middlewares\n * @property withTracing - Enable Effect tracing for debugging\n */\nexport type UploadEngineOptions = {\n dataStore:\n | ((storageId: string) => Promise<DataStore<UploadFile>>)\n | DataStore<UploadFile>;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId?: GenerateIdShape;\n middlewares?: Middleware[];\n withTracing?: boolean;\n};\n\n/**\n * UploadEngine service interface.\n *\n * This is the core upload handling service that provides all file upload operations.\n * It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.\n *\n * All operations return Effect types for composable, type-safe error handling.\n *\n * @property createUpload - Initiates a new upload and returns metadata\n * @property uploadChunk - Uploads a chunk of data for an existing upload\n * @property getCapabilities - Returns storage backend capabilities\n * @property upload - Complete upload in one operation (create + upload data)\n * @property uploadFromUrl - Uploads a file from a remote URL\n * @property getUpload - Retrieves upload metadata by ID\n * @property read - Reads the complete uploaded file data\n * @property delete - Deletes an upload and its data\n * @property subscribeToUploadEvents - Subscribes WebSocket to upload progress events\n * @property unsubscribeFromUploadEvents - Unsubscribes from upload events\n *\n * @example\n * ```typescript\n * // Basic upload flow\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // 1. Create upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\"\n * };\n * const upload = yield* server.createUpload(inputFile, \"client123\");\n *\n * // 2. Upload chunks\n * const chunk = new ReadableStream(...);\n * const updated = yield* server.uploadChunk(upload.id, \"client123\", chunk);\n *\n * // 3. Read the uploaded file\n * const data = yield* server.read(upload.id, \"client123\");\n *\n * return upload;\n * });\n *\n * // Upload with WebSocket progress tracking\n * const uploadWithProgress = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // Subscribe to progress events\n * yield* server.subscribeToUploadEvents(uploadId, websocket);\n *\n * // Upload (events will be emitted automatically)\n * const result = yield* server.upload(inputFile, clientId, stream);\n *\n * // Unsubscribe when done\n * yield* server.unsubscribeFromUploadEvents(uploadId);\n *\n * return result;\n * });\n *\n * // Upload from URL\n * const urlUpload = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 0, // Unknown initially\n * type: \"image/png\",\n * fileName: \"remote-image.png\"\n * };\n *\n * const upload = yield* server.uploadFromUrl(\n * inputFile,\n * \"client123\",\n * \"https://example.com/image.png\"\n * );\n *\n * return upload;\n * });\n * ```\n */\nexport type UploadEngineShape = {\n createUpload: (\n inputFile: InputFile,\n clientId: string | null,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getCapabilities: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStoreCapabilities, UploadistaError>;\n upload: (\n file: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getUpload: (uploadId: string) => Effect.Effect<UploadFile, UploadistaError>;\n /**\n * Reads the complete uploaded file data as bytes (buffered mode).\n * For large files, consider using readStream() for memory efficiency.\n */\n read: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Falls back to buffered read if the underlying DataStore doesn't support streaming.\n *\n * @param uploadId - The unique identifier of the upload to read\n * @param clientId - Client identifier for multi-tenant routing\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });\n * // Process stream chunk by chunk with bounded memory\n * yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));\n * ```\n */\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) => Effect.Effect<\n Stream.Stream<Uint8Array, UploadistaError>,\n UploadistaError\n >;\n /**\n * Uploads file content from a stream with unknown final size.\n * Creates upload with deferred length, streams content to storage,\n * and updates the upload record with final size when complete.\n *\n * Falls back to buffered upload if the underlying DataStore\n * doesn't support streaming writes.\n *\n * @param file - Input file configuration (size is optional)\n * @param clientId - Client identifier for multi-tenant routing\n * @param stream - Effect Stream of byte chunks to upload\n * @returns The completed UploadFile with final size\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const result = yield* server.uploadStream(\n * {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * fileName: \"optimized.webp\",\n * },\n * clientId,\n * transformedStream,\n * );\n * console.log(`Uploaded ${result.size} bytes`);\n * ```\n */\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n delete: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<void, UploadistaError>;\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n unsubscribeFromUploadEvents: (\n uploadId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the UploadEngine service.\n *\n * Use this tag to access the UploadEngine in an Effect context.\n * The server must be provided via a Layer or dependency injection.\n *\n * @example\n * ```typescript\n * // Access UploadEngine in an Effect\n * const uploadEffect = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * const upload = yield* server.createUpload(inputFile, clientId);\n * return upload;\n * });\n *\n * // Provide UploadEngine layer\n * const program = uploadEffect.pipe(\n * Effect.provide(uploadEngine),\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(dataStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * );\n * ```\n */\nexport class UploadEngine extends Context.Tag(\"UploadEngine\")<\n UploadEngine,\n UploadEngineShape\n>() {}\n\n/**\n * Creates the UploadEngine implementation.\n *\n * This function constructs the UploadEngine service by composing all required\n * dependencies (KV store, data stores, event emitter, ID generator). It implements\n * all upload operations defined in UploadEngineShape.\n *\n * The server automatically handles:\n * - Upload lifecycle management (create, resume, complete)\n * - Progress tracking and event emission\n * - Storage backend routing based on storageId\n * - Error handling with proper UploadistaError types\n *\n * @returns An Effect that yields the UploadEngineShape implementation\n *\n * @example\n * ```typescript\n * // Create a custom UploadEngine layer\n * const myUploadEngine = Layer.effect(\n * UploadEngine,\n * createUploadEngine()\n * );\n *\n * // Use in a program\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Use server operations...\n * }).pipe(Effect.provide(myUploadEngine));\n * ```\n */\nexport function createUploadEngine() {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n const eventEmitter = yield* UploadEventEmitter;\n const generateId = yield* GenerateId;\n const dataStoreService = yield* UploadFileDataStores;\n\n return {\n upload: (\n inputFile: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) =>\n Effect.gen(function* () {\n const response = yield* fetchFile(url);\n const buffer = yield* arrayBuffer(response);\n\n // Create a readable stream from the buffer\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n },\n });\n\n const fileCreated = yield* createUpload(\n { ...inputFile, size: buffer.byteLength },\n clientId,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n },\n );\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n createUpload: (inputFile: InputFile, clientId: string | null) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return fileCreated;\n }),\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const file = yield* uploadChunk(uploadId, clientId, chunk, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n return file;\n }),\n getUpload: (uploadId: string) =>\n Effect.gen(function* () {\n const file = yield* kvStore.get(uploadId);\n return file;\n }),\n read: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n return yield* dataStore.read(uploadId);\n }),\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n\n // Check if the DataStore supports streaming reads\n const capabilities = dataStore.getCapabilities();\n if (capabilities.supportsStreamingRead && dataStore.readStream) {\n // Use native streaming\n yield* Effect.logDebug(`Using streaming read for file ${uploadId}`);\n return yield* dataStore.readStream(uploadId, config);\n }\n\n // Fallback: read entire file and convert to stream\n yield* Effect.logDebug(\n `Falling back to buffered read for file ${uploadId} (streaming not supported)`,\n );\n const bytes = yield* dataStore.read(uploadId);\n\n // Convert buffered bytes to a single-chunk stream\n return Stream.succeed(bytes);\n }),\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n // Get the data store for this storage\n const dataStore = yield* dataStoreService.getDataStore(\n file.storageId,\n clientId,\n );\n\n // Check if the DataStore supports streaming writes\n const capabilities = dataStore.getCapabilities();\n\n // Generate upload ID\n const uploadId = yield* generateId.generateId();\n\n if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n // Use native streaming write - DO NOT call createUpload as it would\n // create an S3 multipart upload that we won't use (writeStream creates its own)\n yield* Effect.logDebug(\n `Using streaming write for file ${uploadId}`,\n );\n\n // Parse metadata\n const metadata =\n typeof file.metadata === \"string\"\n ? JSON.parse(file.metadata)\n : file.metadata || {};\n\n // Convert metadata to Record<string, string> if present\n const stringMetadata = Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)]),\n );\n\n // Create initial upload record in KV store (without creating S3 multipart upload)\n const initialUpload: UploadFile = {\n id: uploadId,\n offset: 0,\n size: file.size ?? 0,\n storage: {\n id: file.storageId,\n type: dataStore.getCapabilities().supportsStreamingWrite\n ? \"streaming\"\n : \"default\",\n },\n metadata,\n creationDate: new Date().toISOString(),\n };\n yield* kvStore.set(uploadId, initialUpload);\n\n // Emit started event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_STARTED,\n data: initialUpload,\n });\n\n const result = yield* dataStore.writeStream(uploadId, {\n stream,\n contentType: file.type,\n sizeHint: file.sizeHint,\n metadata: stringMetadata,\n });\n\n // Update the upload record with the final size and URL\n const completedUpload: UploadFile = {\n ...initialUpload,\n size: result.size,\n offset: result.size,\n storage: {\n ...initialUpload.storage,\n path: result.path,\n },\n ...(result.url && { url: result.url }),\n };\n\n yield* kvStore.set(uploadId, completedUpload);\n\n // Emit completion event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_COMPLETE,\n data: completedUpload,\n });\n\n return completedUpload;\n }\n\n // Fallback: buffer the stream and use regular upload (which calls createUpload + uploadChunk)\n yield* Effect.logWarning(\n `Falling back to buffered upload for file ${uploadId} (streaming write not supported)`,\n );\n\n // Collect stream into a buffer\n const chunks: Uint8Array[] = [];\n yield* Stream.runForEach(stream, (chunk) =>\n Effect.sync(() => {\n chunks.push(chunk);\n }),\n );\n\n // Calculate total size\n const totalSize = chunks.reduce(\n (acc, chunk) => acc + chunk.length,\n 0,\n );\n\n // Create a combined buffer\n const buffer = new Uint8Array(totalSize);\n let offset = 0;\n for (const chunk of chunks) {\n buffer.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Create a readable stream from the buffer\n const readableStream = new ReadableStream({\n start(controller) {\n controller.enqueue(buffer);\n controller.close();\n },\n });\n\n // For fallback, use the regular flow with createUpload + uploadChunk\n const inputFile: InputFile = {\n ...file,\n size: totalSize,\n };\n\n const uploadFile = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId: { generateId: () => Effect.succeed(uploadId) },\n });\n\n // Use regular uploadChunk\n return yield* uploadChunk(uploadId, clientId, readableStream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n delete: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n yield* dataStore.remove(uploadId);\n yield* kvStore.delete(uploadId);\n return;\n }),\n getCapabilities: (storageId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const dataStore = yield* dataStoreService.getDataStore(\n storageId,\n clientId,\n );\n return dataStore.getCapabilities();\n }),\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(uploadId, connection);\n }),\n unsubscribeFromUploadEvents: (uploadId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(uploadId);\n }),\n } satisfies UploadEngineShape;\n });\n}\n\n/**\n * Pre-built UploadEngine Effect Layer.\n *\n * This layer provides a ready-to-use UploadEngine implementation that can be\n * composed with other layers to build a complete upload system.\n *\n * Required dependencies:\n * - UploadFileKVStore: For storing upload metadata\n * - UploadFileDataStores: For routing to storage backends\n * - UploadEventEmitter: For progress events\n * - GenerateId: For creating upload IDs\n *\n * @example\n * ```typescript\n * // Compose a complete upload system\n * const fullUploadSystem = Layer.mergeAll(\n * uploadEngine,\n * uploadFileKvStore,\n * dataStoreLayer,\n * uploadEventEmitter,\n * generateIdLayer\n * );\n *\n * // Use in application\n * const app = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Perform uploads...\n * }).pipe(Effect.provide(fullUploadSystem));\n * ```\n */\nexport const uploadEngine = Layer.effect(UploadEngine, createUploadEngine());\n","import type { DataStoreCapabilities, UploadStrategy } from \"../types\";\n\n/**\n * Configuration options for upload strategy negotiation.\n *\n * @property fileSize - Size of the file to be uploaded in bytes\n * @property preferredStrategy - Preferred upload strategy (single, parallel, resumable)\n * @property preferredChunkSize - Preferred chunk size in bytes\n * @property parallelUploads - Number of parallel upload connections\n * @property minChunkSizeForParallel - Minimum file size to consider parallel uploads\n */\nexport type UploadStrategyOptions = {\n fileSize: number;\n preferredStrategy?: UploadStrategy;\n preferredChunkSize?: number;\n parallelUploads?: number;\n minChunkSizeForParallel?: number;\n};\n\n/**\n * Result of upload strategy negotiation.\n *\n * @property strategy - The negotiated upload strategy\n * @property chunkSize - The negotiated chunk size in bytes\n * @property parallelUploads - The negotiated number of parallel uploads\n * @property reasoning - Array of reasoning strings explaining the decisions\n * @property warnings - Array of warning messages about adjustments made\n */\nexport type NegotiatedStrategy = {\n strategy: UploadStrategy;\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n};\n\n/**\n * Negotiates the optimal upload strategy based on data store capabilities and file characteristics.\n *\n * This class analyzes data store capabilities, file size, and user preferences to determine\n * the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size\n * and parallel connection count.\n *\n * The negotiator considers:\n * - Data store capabilities (parallel uploads, resumable uploads, concatenation)\n * - File size and chunk size constraints\n * - User preferences and requirements\n * - Performance optimization opportunities\n *\n * @example\n * ```typescript\n * // Create negotiator for S3 data store\n * const negotiator = new UploadStrategyNegotiator(\n * s3Capabilities,\n * (strategy) => s3Capabilities.supportsStrategy(strategy)\n * );\n *\n * // Negotiate strategy for large file\n * const result = negotiator.negotiateStrategy({\n * fileSize: 100_000_000, // 100MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB chunks\n * parallelUploads: 4\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * ```\n */\nexport class UploadStrategyNegotiator {\n /**\n * Creates a new upload strategy negotiator.\n *\n * @param capabilities - Data store capabilities and constraints\n * @param validateUploadStrategy - Function to validate if a strategy is supported\n */\n constructor(\n private capabilities: DataStoreCapabilities,\n private validateUploadStrategy: (strategy: UploadStrategy) => boolean,\n ) {}\n\n /**\n * Negotiates the optimal upload strategy based on options and data store capabilities.\n *\n * This method analyzes the provided options and data store capabilities to determine\n * the best upload strategy, chunk size, and parallel upload settings. It considers\n * user preferences, file size, and data store constraints to make optimal decisions.\n *\n * The negotiation process:\n * 1. Validates preferred strategy against data store capabilities\n * 2. Automatically selects strategy based on file size and capabilities\n * 3. Adjusts chunk size to fit within data store constraints\n * 4. Validates parallel upload settings\n * 5. Ensures final strategy is supported by the data store\n *\n * @param options - Upload strategy options including file size and preferences\n * @returns Negotiated strategy with reasoning and warnings\n *\n * @example\n * ```typescript\n * const result = negotiator.negotiateStrategy({\n * fileSize: 50_000_000, // 50MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB\n * parallelUploads: 3\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.parallelUploads); // 3\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * console.log(result.warnings); // [] (no warnings)\n * ```\n */\n negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy {\n const reasoning: string[] = [];\n const warnings: string[] = [];\n\n let strategy: UploadStrategy = \"single\";\n let chunkSize =\n options.preferredChunkSize ??\n this.capabilities.optimalChunkSize ??\n 1024 * 1024;\n let parallelUploads = options.parallelUploads ?? 1;\n\n // Check if data store supports the preferred strategy\n if (options.preferredStrategy) {\n if (!this.validateUploadStrategy(options.preferredStrategy)) {\n warnings.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store, falling back`,\n );\n } else {\n strategy = options.preferredStrategy;\n reasoning.push(`Using preferred strategy: ${strategy}`);\n }\n }\n\n // Automatic strategy selection based on capabilities and file size\n if (\n !options.preferredStrategy ||\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n if (\n this.capabilities.supportsParallelUploads &&\n options.fileSize > (options.minChunkSizeForParallel ?? 10 * 1024 * 1024)\n ) {\n strategy = \"parallel\";\n reasoning.push(\n `Selected parallel upload for large file (${options.fileSize} bytes)`,\n );\n } else {\n strategy = \"single\";\n reasoning.push(\n this.capabilities.supportsParallelUploads\n ? `Selected single upload for small file (${options.fileSize} bytes)`\n : \"Selected single upload (parallel not supported by data store)\",\n );\n }\n }\n\n // Validate and adjust chunk size based on data store constraints\n if (\n this.capabilities.minChunkSize &&\n chunkSize < this.capabilities.minChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} below minimum ${this.capabilities.minChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.minChunkSize;\n }\n\n if (\n this.capabilities.maxChunkSize &&\n chunkSize > this.capabilities.maxChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} above maximum ${this.capabilities.maxChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.maxChunkSize;\n }\n\n // Validate parallel upload settings\n if (strategy === \"parallel\") {\n if (\n this.capabilities.maxConcurrentUploads &&\n parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n warnings.push(\n `Parallel uploads ${parallelUploads} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`,\n );\n parallelUploads = this.capabilities.maxConcurrentUploads;\n }\n\n // Check if file would exceed max parts limit\n if (this.capabilities.maxParts) {\n const estimatedParts = Math.ceil(options.fileSize / chunkSize);\n if (estimatedParts > this.capabilities.maxParts) {\n const minChunkForParts = Math.ceil(\n options.fileSize / this.capabilities.maxParts,\n );\n warnings.push(\n `Estimated parts ${estimatedParts} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`,\n );\n chunkSize = Math.max(chunkSize, minChunkForParts);\n }\n }\n }\n\n // Final validation - ensure strategy is still valid after adjustments\n if (!this.validateUploadStrategy(strategy)) {\n warnings.push(\n `Final strategy validation failed, falling back to single upload`,\n );\n strategy = \"single\";\n parallelUploads = 1;\n }\n\n // Add capability information to reasoning\n reasoning.push(\n `Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`,\n );\n\n return {\n strategy,\n chunkSize,\n parallelUploads: strategy === \"parallel\" ? parallelUploads : 1,\n reasoning,\n warnings,\n };\n }\n\n /**\n * Gets the data store capabilities used by this negotiator.\n *\n * @returns The data store capabilities and constraints\n */\n getDataStoreCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n /**\n * Validates upload strategy configuration against data store capabilities.\n *\n * This method checks if the provided configuration is valid for the current\n * data store capabilities without performing the actual negotiation. It's\n * useful for pre-validation before attempting to negotiate a strategy.\n *\n * @param options - Upload strategy options to validate\n * @returns Validation result with validity flag and error messages\n *\n * @example\n * ```typescript\n * const validation = negotiator.validateConfiguration({\n * fileSize: 10_000_000,\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 1_000_000,\n * parallelUploads: 5\n * });\n *\n * if (!validation.valid) {\n * console.log(\"Configuration errors:\", validation.errors);\n * // Handle validation errors\n * }\n * ```\n */\n validateConfiguration(options: UploadStrategyOptions): {\n valid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n\n if (\n options.preferredStrategy &&\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n errors.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store`,\n );\n }\n\n if (options.preferredChunkSize) {\n if (\n this.capabilities.minChunkSize &&\n options.preferredChunkSize < this.capabilities.minChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`,\n );\n }\n if (\n this.capabilities.maxChunkSize &&\n options.preferredChunkSize > this.capabilities.maxChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`,\n );\n }\n }\n\n if (\n options.parallelUploads &&\n this.capabilities.maxConcurrentUploads &&\n options.parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n errors.push(\n `Parallel uploads ${options.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`,\n );\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n }\n}\n"],"mappings":"8VAGA,SAAS,EACP,EACA,EACA,EAAS,EACA,CAET,OADI,EAAO,OAAS,EAAS,EAAQ,OAAe,GAC7C,EAAQ,OAAO,EAAM,IAAM,EAAO,EAAS,KAAO,EAAK,CAMhE,SAAS,EAAY,EAAoB,EAAa,EAAS,EAAY,CACzE,GAAI,EAAO,OAAS,EAAS,EAAI,OAAQ,MAAO,GAChD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAI,OAAQ,IAC9B,GAAI,EAAO,EAAS,KAAO,EAAI,WAAW,EAAE,CAAE,MAAO,GAEvD,MAAO,GAWT,MAAa,GACX,EACA,IACW,CACX,GAAI,EAAO,SAAW,EACpB,MAAO,2BAMT,GAAI,EAAW,EAAQ,CAAC,IAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAK,CAAC,CACtE,MAAO,YAIT,GAAI,EAAW,EAAQ,CAAC,IAAM,IAAM,IAAK,CAAC,CACxC,MAAO,aAIT,GAAI,EAAY,EAAQ,SAAS,EAAI,EAAY,EAAQ,SAAS,CAChE,MAAO,YAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAK,CAAE,EAAE,EACzC,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAAI,EAAY,EAAQ,OAAQ,EAAE,EAEjE,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAK,CAAC,CAClC,MAAO,YAST,GALI,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,EAAK,CAAC,EAK5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,GAAK,CAAC,CAC9C,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAK,CAAC,CAC9C,MAAO,eAIT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CACD,GACE,EAAK,SAAS,OAAO,EACpB,EAAK,SAAS,QAAQ,EAAI,EAAK,SAAS,OAAO,CAEhD,MAAO,gBAOX,GAAI,EAAO,QAAU,IAAM,EAAY,EAAQ,OAAQ,EAAE,CAAE,CACzD,IAAM,EAAU,IAAI,aAAa,CAAC,OAAO,EAAO,MAAM,EAAG,GAAG,CAAC,CAC7D,GACE,EAAQ,WAAW,MAAM,EACzB,EAAQ,WAAW,KAAK,EACxB,EAAQ,WAAW,OAAO,CAE1B,MAAO,YAKX,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,CAC9C,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,kBAIT,GACE,EAAO,QAAU,IAChB,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,EAAI,EAAO,QAAU,KACtD,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,IAAI,CACrB,CACQ,SAAS,WAAW,CAC3B,MAAO,mBAOX,GACE,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAY,EAAQ,MAAM,CAE1B,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,MAAO,EAAE,CAE7B,MAAO,YAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,kBAMT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,CAC5C,CAGA,GAAI,EAAO,QAAU,KAAM,CACzB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OAAO,EAAO,CACtE,GAAI,EAAK,SAAS,QAAQ,CACxB,MAAO,0EACT,GAAI,EAAK,SAAS,MAAM,CACtB,MAAO,oEACT,GAAI,EAAK,SAAS,OAAO,CACvB,MAAO,4EAEX,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,GAAM,GAAM,EAAK,CAAC,CAC1D,MAAO,+BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAM,IAAM,IAAM,GAAM,GAAK,CAAC,CAC1D,MAAO,8BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAK,CAAC,CAClC,MAAO,mBAIT,GAAI,EAAO,QAAU,KAAO,EAAY,EAAQ,QAAS,IAAI,CAC3D,MAAO,oBAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAM,EAAK,CAAC,CACpD,MAAO,WAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,WAMT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAY,EAAO,GACzB,GAAI,IAAc,KAAQ,IAAc,GAEtC,GAAI,CACF,IAAM,EAAO,IAAI,YAAY,QAAQ,CAAC,OACpC,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CAED,OADA,KAAK,MAAM,EAAK,MAAM,CAAC,CAChB,wBACD,GAOZ,GAAI,EAEF,OADY,EAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,CACpD,CAEE,IAAK,MACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,OACH,MAAO,aACT,IAAK,OACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACL,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,eACT,IAAK,MACH,MAAO,gBAGT,IAAK,MACL,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,mBAGT,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YAGT,IAAK,MACH,MAAO,kBACT,IAAK,OACH,MAAO,0EACT,IAAK,OACH,MAAO,oEACT,IAAK,OACH,MAAO,4EAGT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,+BACT,IAAK,KACH,MAAO,8BACT,IAAK,KACL,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,oBAGT,IAAK,OACH,MAAO,YACT,IAAK,QACH,MAAO,aACT,IAAK,MACH,MAAO,WACT,IAAK,MACH,MAAO,WAGT,IAAK,MACH,MAAO,aACT,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,kBACT,IAAK,OACL,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,WACT,IAAK,KACH,MAAO,yBACT,IAAK,MACH,MAAO,WAET,QACE,MAAO,2BAIb,MAAO,4BAgBT,SAAgB,EAAiB,EAAkB,EAA2B,CAW5E,OATI,IAAa,EACR,GAIa,EAAS,MAAM,IAAI,CAAC,KACpB,EAAS,MAAM,IAAI,CAAC,GCvZ5C,MAAMA,EAEF,EAAO,IAAI,WAAa,CAC1B,IAAM,EAAa,MAAO,EAAO,YAAY,KAAK,EAAO,OAAO,CAChE,OAAO,EAAO,MAAM,EAAY,CAC9B,WAAc,IAAA,GACd,OAAS,IAAU,CACjB,QAAS,EAAK,QACd,OAAQ,EAAK,OACb,WAAY,EAAK,QAAU,EAAI,EAChC,EACF,CAAC,EACF,CA4DW,GACX,EACA,EACA,CACE,mBACA,UACA,eACA,gBAQF,EAAO,IAAI,WAAa,CAItB,IAAM,EAAe,MAAO,EACtB,EAAe,IAAI,MAAM,CAAC,aAAa,CA0E7C,OAvEoB,MAAO,EAAO,IAAI,WAAa,CAEjD,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAU,UACV,EACD,CAEK,EAAK,MAAO,EAAW,YAAY,CACnC,CAAE,OAAM,OAAM,WAAU,eAAc,WAAU,QAAS,EAE3DC,EAAyC,EAAE,CAC/C,GAAI,EACF,GAAI,CACF,EAAiB,KAAK,MAAM,EAAS,MAC/B,CACN,EAAiB,EAAE,CAIvB,IAAMC,EAAyC,CAC7C,GAAG,EACH,OACA,SAAU,GAAY,GACvB,CACG,IACF,EAAe,aAAe,EAAa,UAAU,EAGvD,IAAMC,EAAmB,CACvB,KACA,OACA,SAAU,EACV,OAAQ,EACR,eACA,QAAS,CACP,GAAI,EAAU,UACd,OACA,KAAM,GACN,OAAQ,EAAU,OACnB,CACD,OACA,eACD,CAGK,EAAU,MAAO,EAAU,OAAO,EAAK,CAY7C,OATA,MAAO,EAAQ,IAAI,EAAI,EAAQ,CAG/B,MAAO,EAAa,KAAK,EAAI,CAC3B,KAAM,EAAgB,eACtB,KAAM,EACN,KAAM,EAAQ,KACf,CAAC,CAEK,GACP,CAAC,KAED,EAAO,SAAS,gBAAiB,CAC/B,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CACH,EAGD,CAAC,KAGD,EAAO,SAAS,SAAU,CACxB,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAStB,GAPA,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGG,EAAK,KAAM,CACb,IAAM,EAAoB,EAAO,UAC/B,yBACA,EAAiB,YAAY,CAC3B,MAAO,KACP,OAAQ,EACR,MAAO,GACR,CAAC,CACH,CACD,MAAO,EAAO,OAAO,EAAmB,EAAK,KAAK,CAIpD,IAAM,EAAqB,EAAO,MAAM,iBAAiB,CACzD,MAAO,EAAO,UAAU,EAAmB,EAC3C,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAChC,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,SAAS,yBAAyB,CAAC,KAC/C,EAAO,aAAa,CAClB,mBAAoB,EAAU,UAAY,UAC1C,oBAAqB,EAAU,UAC/B,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CAGD,MAAO,EAAO,UACZ,EAAO,QAAQ,sBAAuB,CACpC,YAAa,sCACd,CAAC,CACH,EACD,CACH,CACF,CChOH,SAAgB,EAAmB,EAAyB,CAC1D,OAAO,EAAO,uBACN,EACL,GACC,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACpB,CAAC,CACL,CCgCH,SAAgB,EAAa,CAC3B,OACA,SACA,YACA,cACA,aACA,eACA,yBAAyB,KACH,CACtB,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAS,EAAgB,EAAK,CAEpC,GAAI,EAAW,OAAO,QACpB,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAIhE,IAAM,EAAkB,IAAI,gBACtB,CAAE,UAAW,EAGb,MAAgB,CAEpB,EAAgB,OAAO,EAKzB,OAFA,EAAW,OAAO,iBAAiB,QAAS,EAAS,CAAE,KAAM,GAAM,CAAC,CAE7D,MAAO,EAAO,kBACnB,EAAO,UAAY,CAAE,SAAQ,UAAS,EAAE,EACvC,CAAE,OAAQ,KACT,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAe,MAAO,EAAI,KAAK,EAAE,CAQjC,EALU,EAAoB,MAAM,CACxC,QAAS,EACV,CAAC,CAG4B,EAAO,CAyCrC,OAtCe,MAAO,EAAU,MAC9B,CACE,OAAQ,EACR,QAAS,EAAO,GAChB,OAAQ,EAAO,OAChB,CACD,CACE,WAAa,GAAsB,CAEjC,IAAM,EAAM,KAAK,KAAK,CACtB,EAAI,IAAI,EAAa,CAClB,KACC,EAAO,QAAS,GACV,EAAM,GAAY,EACb,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAI,IAAI,EAAc,EAAI,CACjC,MAAO,EAAa,KAAK,EAAO,GAAI,CAClC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAO,GACX,SAAU,EACV,MAAO,EAAO,MAAQ,EACvB,CACD,KAAM,EAAO,KACd,CAAC,EACF,CAEG,EAAO,KACd,CACF,EAAO,WACR,CACA,UAAY,GAEX,EAEP,CACF,EAGD,CAAC,KACD,EAAO,SAAU,GACX,aAAiB,OAAS,EAAM,OAAS,aACpC,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAErD,aAAiB,EACZ,EAAO,KAAK,EAAM,CAEpB,EAAO,KACZ,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAC/D,CACD,CACH,EACF,CAAE,QAAA,KACD,EAAO,SAAW,CAChB,EAAW,OAAO,oBAAoB,QAASC,EAAQ,EACvD,CACL,EACD,CAAC,KAED,EAAO,SAAS,wBAAyB,CACvC,WAAY,CACV,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,uBAAwB,EAAY,UAAU,CAC9C,mBAAoB,EAAO,MAAM,UAAU,EAAI,IAChD,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,wBAAwB,CAAC,KACvC,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,eAAgB,EAAO,UAAU,CACjC,uBAAwB,EAAS,EAAO,QAAQ,UAAU,CAC3D,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,2BAA2B,CAAC,KAC1C,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,MAAO,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CACrE,CAAC,CACH,CACF,CACF,CC7LH,SAAS,EAAmB,EAAsC,CAChE,OAAO,EAAO,aAAa,CACzB,QAAS,EAAa,QACtB,OAAQ,EAAa,OACrB,QAAS,EAAa,aAAe,EACtC,CAAC,CAcJ,MAAM,GACJ,EACA,IACwB,CACxB,IAAM,EAAe,IAAI,KAAK,EAAK,aAAuB,CAAC,SAAS,CAC9D,EAAkB,KAAK,KAAK,CAAG,EAErC,OAAO,EAAO,KAAK,KACjB,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,cAAe,EAAK,MAAQ,EAC5B,2BAA4B,EAC5B,oBAAqB,EAAK,QAAQ,GAClC,mBAAoB,EAAK,UAAU,UAAY,UAC/C,uBAAwB,EAAK,aAC7B,yBAA0B,IAAI,MAAM,CAAC,aAAa,CACnD,CACD,OAAQ,EACT,CAAC,CACH,EAoDU,GACX,EACA,EACA,EACA,CACE,mBACA,UACA,kBAOF,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAO,MAAO,EAAQ,IAAI,EAAS,CAInC,EAAa,EAAK,aACpB,EAAmB,EAAK,aAAa,CACrC,IAAA,GAsEJ,OAAO,MAnEc,EAAO,IAAI,WAAa,CAE3C,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,QAAQ,GACb,EACD,CAiDD,MAhCA,GAAK,OAVa,MAAO,EAAa,CACpC,YACA,KAAM,EACN,OAAQ,EACR,YAAa,IACb,WAPiB,IAAI,gBAQrB,uBAAwB,IACxB,eACD,CAAC,CAKF,MAAO,EAAQ,IAAI,EAAU,EAAK,CAGlC,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAK,GACT,SAAU,EAAK,OACf,MAAO,EAAK,MAAQ,EACrB,CACD,KAAM,EAAK,KACZ,CAAC,CAGE,EAAK,MAAQ,EAAK,SAAW,EAAK,OACpC,MAAO,EAAe,CACpB,OACA,YACA,eACD,CAAC,CAIE,EAAK,eAEP,MAAO,EAA+B,EADX,EAAmB,EAAK,aAAa,CACD,GAI5D,GACP,CAAC,KAED,EAAO,SAAS,eAAgB,CAC9B,WAAY,CACV,YAAa,EACb,kBAAmB,EACnB,2BAA4B,EAAK,aAAe,OAAS,QAC1D,CACD,OAAQ,EACT,CAAC,CACH,EAGD,CAAC,KAED,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGD,IAAM,EAAY,EAAK,OACjB,EAAqB,EAAO,UAChC,mBACA,EAAiB,OAAO,CACtB,MAAO,OACP,MAAO,OACP,MAAO,GACR,CAAC,CACH,CAID,GAHA,MAAO,EAAO,OAAO,EAAoB,EAAU,CAG/C,EAAK,MAAQ,EAAK,KAAO,EAAG,CAC9B,IAAM,EAAa,EACb,EAAkB,EAAO,MAC7B,qCACD,CACD,MAAO,EAAO,IAAI,EAAiB,EAAW,GAEhD,CACH,CAED,EAAO,IAAK,GACV,EAAO,SAAS,iBAAiB,CAAC,KAChC,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,aAAc,EAAK,OAAO,UAAU,CACpC,iBACE,EAAK,MAAQ,EAAK,KAAO,GACnB,EAAK,OAAS,EAAK,KAAQ,KAAK,QAAQ,EAAE,CAC5C,IACN,oBAAqB,EAAK,MAAM,UAAU,EAAI,IAC/C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,sBAAsB,CAAC,KACrC,EAAO,aAAa,CAClB,YAAa,EACb,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CAoCG,GAAkB,CACtB,OACA,YACA,kBAMA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAe,EAAU,iBAAiB,CAGhD,GACE,EAAa,mBACb,EAAK,MACL,EAAK,KAAO,EAAa,kBACzB,CACA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,QAAS,cAAc,EAAK,KAAK,uCAAuC,EAAa,kBAAkB,8BACxG,CACD,KAAM,EAAK,KACZ,CAAC,CACF,OAIF,IAAM,EAAY,MAAO,EAAU,KAAK,EAAK,GAAG,CAGhD,GAAI,EAAK,UAAY,EAAK,kBAAmB,CAC3C,IAAM,EAAmB,MAAO,EAC9B,EACA,EAAK,kBACN,CAED,GAAI,IAAqB,EAAK,SAiB5B,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EAAK,SACf,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,yCAAyC,EAAK,SAAS,SAAS,IACtE,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EAAK,SACf,OAAQ,EACR,UAAW,EAAK,kBACjB,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WAChB,UAAW,EAAK,kBACjB,CACD,KAAM,EAAK,KACZ,CAAC,CAIJ,GAAI,EAAa,2BAA4B,CAC3C,IAAM,EAAmB,EAAe,EAAU,CAC5C,EAAmB,EAAK,UAAU,KAExC,GACE,GACA,CAAC,EAAiB,EAAkB,EAAiB,CAkBrD,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EACV,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,0CAA0C,EAAiB,cAAc,IAC/E,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EACV,OAAQ,EACT,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WACjB,CACD,KAAM,EAAK,KACZ,CAAC,GAEJ,CAAC,KACD,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,+BAAgC,EAAK,SAAW,OAAS,QACzD,2BAA4B,EAAU,iBAAiB,CACpD,2BACC,OACA,QACL,CACF,CAAC,CACH,CC3YU,EAAa,GACjB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,MAAM,EAAI,CAEzB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,aAAc,EACd,mBAAoB,QACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,wBAAyB,CACtC,YAAa,oCACd,CAAC,CACH,CAGG,EAAS,KACX,MAAO,EAAO,UACZ,EAAO,QAAQ,gCAAiC,CAC9C,YAAa,+CACd,CAAC,CACH,GAEH,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,sBAAsB,CAAC,KACpC,EAAO,aAAa,CAClB,aAAc,EACd,kBAAmB,EAAS,OAAO,UAAU,CAC7C,cAAe,EAAS,GAAG,UAAU,CACrC,0BACE,EAAS,QAAQ,IAAI,iBAAiB,EAAI,UAC7C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,+BAAgC,CAC7C,YAAa,2CACd,CAAC,CACH,CAGD,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,aAAc,EACd,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAiCU,EAAe,GACnB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,EAAS,aAAa,CAErC,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,2BAA4B,CAC1C,WAAY,CACV,mBAAoB,cACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,qCAAqC,CAAC,KACpD,EAAO,aAAa,CAClB,cAAe,EAAO,WAAW,UAAU,CAC5C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,6CAA6C,CAAC,KAC5D,EAAO,aAAa,CAClB,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CCwFH,IAAa,EAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GAgCJ,SAAgB,GAAqB,CACnC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAU,MAAO,EACjB,EAAe,MAAO,EACtB,EAAa,MAAO,EACpB,EAAmB,MAAO,EAEhC,MAAO,CACL,QACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAOtB,OAAO,MAAO,GANM,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EACoC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,eACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAS,MAAO,EADL,MAAO,EAAU,EAAI,CACK,CAGrC,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,IAAI,WAAW,EAAO,CAAC,CAC1C,EAAW,OAAO,EAErB,CAAC,CAYF,OAAO,MAAO,GAVM,MAAO,EACzB,CAAE,GAAG,EAAW,KAAM,EAAO,WAAY,CACzC,EACA,CACE,mBACA,UACA,eACA,aACD,CACF,EACqC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,cAAe,EAAsB,IACnC,EAAO,IAAI,WAAa,CAOtB,OANoB,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EAEF,CACJ,aACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAMtB,OALa,MAAO,EAAY,EAAU,EAAU,EAAO,CACzD,mBACA,UACA,eACD,CAAC,EAEF,CACJ,UAAY,GACV,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAQ,IAAI,EAAS,EAEzC,CACJ,MAAO,EAAkB,IACvB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAAO,OAJW,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACuB,KAAK,EAAS,EACtC,CACJ,YACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CACrC,EAAY,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,CAID,GADqB,EAAU,iBAAiB,CAC/B,uBAAyB,EAAU,WAGlD,OADA,MAAO,EAAO,SAAS,iCAAiC,IAAW,CAC5D,MAAO,EAAU,WAAW,EAAU,EAAO,CAItD,MAAO,EAAO,SACZ,0CAA0C,EAAS,4BACpD,CACD,IAAM,EAAQ,MAAO,EAAU,KAAK,EAAS,CAG7C,OAAO,EAAO,QAAQ,EAAM,EAC5B,CACJ,cACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,UACL,EACD,CAGK,EAAe,EAAU,iBAAiB,CAG1C,EAAW,MAAO,EAAW,YAAY,CAE/C,GAAI,EAAa,wBAA0B,EAAU,YAAa,CAGhE,MAAO,EAAO,SACZ,kCAAkC,IACnC,CAGD,IAAM,EACJ,OAAO,EAAK,UAAa,SACrB,KAAK,MAAM,EAAK,SAAS,CACzB,EAAK,UAAY,EAAE,CAGnB,EAAiB,OAAO,YAC5B,OAAO,QAAQ,EAAS,CAAC,KAAK,CAAC,EAAG,KAAO,CAAC,EAAG,OAAO,EAAE,CAAC,CAAC,CACzD,CAGKC,EAA4B,CAChC,GAAI,EACJ,OAAQ,EACR,KAAM,EAAK,MAAQ,EACnB,QAAS,CACP,GAAI,EAAK,UACT,KAAM,EAAU,iBAAiB,CAAC,uBAC9B,YACA,UACL,CACD,WACA,aAAc,IAAI,MAAM,CAAC,aAAa,CACvC,CACD,MAAO,EAAQ,IAAI,EAAU,EAAc,CAG3C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,eACtB,KAAM,EACP,CAAC,CAEF,IAAM,EAAS,MAAO,EAAU,YAAY,EAAU,CACpD,SACA,YAAa,EAAK,KAClB,SAAU,EAAK,SACf,SAAU,EACX,CAAC,CAGIC,EAA8B,CAClC,GAAG,EACH,KAAM,EAAO,KACb,OAAQ,EAAO,KACf,QAAS,CACP,GAAG,EAAc,QACjB,KAAM,EAAO,KACd,CACD,GAAI,EAAO,KAAO,CAAE,IAAK,EAAO,IAAK,CACtC,CAUD,OARA,MAAO,EAAQ,IAAI,EAAU,EAAgB,CAG7C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,gBACtB,KAAM,EACP,CAAC,CAEK,EAIT,MAAO,EAAO,WACZ,4CAA4C,EAAS,kCACtD,CAGD,IAAMC,EAAuB,EAAE,CAC/B,MAAO,EAAO,WAAW,EAAS,GAChC,EAAO,SAAW,CAChB,EAAO,KAAK,EAAM,EAClB,CACH,CAGD,IAAM,EAAY,EAAO,QACtB,EAAK,IAAU,EAAM,EAAM,OAC5B,EACD,CAGK,EAAS,IAAI,WAAW,EAAU,CACpC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAIlB,IAAM,EAAiB,IAAI,eAAe,CACxC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAO,CAC1B,EAAW,OAAO,EAErB,CAAC,CAgBF,OARmB,MAAO,EALG,CAC3B,GAAG,EACH,KAAM,EACP,CAEiD,EAAU,CAC1D,mBACA,UACA,eACA,WAAY,CAAE,eAAkB,EAAO,QAAQ,EAAS,CAAE,CAC3D,CAAC,CAGK,MAAO,EAAY,EAAU,EAAU,EAAgB,CAC5D,mBACA,UACA,eACD,CAAC,EACF,CACJ,QAAS,EAAkB,IACzB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAJkB,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACgB,OAAO,EAAS,CACjC,MAAO,EAAQ,OAAO,EAAS,EAE/B,CACJ,iBAAkB,EAAmB,IACnC,EAAO,IAAI,WAAa,CAKtB,OAJkB,MAAO,EAAiB,aACxC,EACA,EACD,EACgB,iBAAiB,EAClC,CACJ,yBACE,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAU,EAAW,EACnD,CACJ,4BAA8B,GAC5B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAS,EACzC,CACL,EACD,CAiCJ,MAAa,EAAe,EAAM,OAAO,EAAc,GAAoB,CAAC,CC5iB5E,IAAa,EAAb,KAAsC,CAOpC,YACE,EACA,EACA,CAFQ,KAAA,aAAA,EACA,KAAA,uBAAA,EAoCV,kBAAkB,EAAoD,CACpE,IAAMG,EAAsB,EAAE,CACxBC,EAAqB,EAAE,CAEzBC,EAA2B,SAC3B,EACF,EAAQ,oBACR,KAAK,aAAa,kBAClB,KAAO,KACL,EAAkB,EAAQ,iBAAmB,EA2DjD,GAxDI,EAAQ,oBACL,KAAK,uBAAuB,EAAQ,kBAAkB,EAKzD,EAAW,EAAQ,kBACnB,EAAU,KAAK,6BAA6B,IAAW,EALvD,EAAS,KACP,uBAAuB,EAAQ,kBAAkB,6CAClD,GASH,CAAC,EAAQ,mBACT,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,IAGrD,KAAK,aAAa,yBAClB,EAAQ,UAAY,EAAQ,yBAA2B,GAAK,KAAO,OAEnE,EAAW,WACX,EAAU,KACR,4CAA4C,EAAQ,SAAS,SAC9D,GAED,EAAW,SACX,EAAU,KACR,KAAK,aAAa,wBACd,0CAA0C,EAAQ,SAAS,SAC3D,gEACL,GAMH,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI9B,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI5B,IAAa,aAEb,KAAK,aAAa,sBAClB,EAAkB,KAAK,aAAa,uBAEpC,EAAS,KACP,oBAAoB,EAAgB,mBAAmB,KAAK,aAAa,qBAAqB,aAC/F,CACD,EAAkB,KAAK,aAAa,sBAIlC,KAAK,aAAa,UAAU,CAC9B,IAAM,EAAiB,KAAK,KAAK,EAAQ,SAAW,EAAU,CAC9D,GAAI,EAAiB,KAAK,aAAa,SAAU,CAC/C,IAAM,EAAmB,KAAK,KAC5B,EAAQ,SAAW,KAAK,aAAa,SACtC,CACD,EAAS,KACP,mBAAmB,EAAe,mBAAmB,KAAK,aAAa,SAAS,yBACjF,CACD,EAAY,KAAK,IAAI,EAAW,EAAiB,EAmBvD,OAbK,KAAK,uBAAuB,EAAS,GACxC,EAAS,KACP,kEACD,CACD,EAAW,SACX,EAAkB,GAIpB,EAAU,KACR,qCAAqC,KAAK,aAAa,wBAAwB,kBAAkB,KAAK,aAAa,sBAAsB,cAAc,KAAK,aAAa,2BAC1K,CAEM,CACL,WACA,YACA,gBAAiB,IAAa,WAAa,EAAkB,EAC7D,YACA,WACD,CAQH,0BAAkD,CAChD,OAAO,KAAK,aA4Bd,sBAAsB,EAGpB,CACA,IAAMC,EAAmB,EAAE,CAwC3B,OArCE,EAAQ,mBACR,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,EAEvD,EAAO,KACL,uBAAuB,EAAQ,kBAAkB,+BAClD,CAGC,EAAQ,qBAER,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,CAGD,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,EAKH,EAAQ,iBACR,KAAK,aAAa,sBAClB,EAAQ,gBAAkB,KAAK,aAAa,sBAE5C,EAAO,KACL,oBAAoB,EAAQ,gBAAgB,8BAA8B,KAAK,aAAa,uBAC7F,CAGI,CACL,MAAO,EAAO,SAAW,EACzB,SACD"}