@uploadista/core 0.0.20 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{checksum-DVPe3Db4.cjs → checksum-CTpNXWEL.cjs} +1 -1
- package/dist/errors/index.cjs +1 -1
- package/dist/errors/index.d.mts +2 -2
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +2 -2
- package/dist/flow/index.d.mts +6 -6
- package/dist/flow/index.mjs +1 -1
- package/dist/flow-CA8xO6wP.mjs +2 -0
- package/dist/flow-CA8xO6wP.mjs.map +1 -0
- package/dist/flow-DKJaCPxL.cjs +1 -0
- package/dist/index-9gyMMEIB.d.cts.map +1 -1
- package/dist/{index-RuQUCROH.d.mts → index-BKY0VjsL.d.mts} +230 -169
- package/dist/index-BKY0VjsL.d.mts.map +1 -0
- package/dist/{index-B9V5SSxl.d.mts → index-D8MZ6P3o.d.mts} +2 -2
- package/dist/{index-B9V5SSxl.d.mts.map → index-D8MZ6P3o.d.mts.map} +1 -1
- package/dist/{index-BFSHumky.d.mts → index-DQuMQssI.d.mts} +2 -2
- package/dist/{index-BFSHumky.d.mts.map → index-DQuMQssI.d.mts.map} +1 -1
- package/dist/{index-DMqaf28W.d.cts → index-j_n72QK0.d.cts} +228 -167
- package/dist/index-j_n72QK0.d.cts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.mts +6 -6
- package/dist/index.mjs +1 -1
- package/dist/{stream-limiter-BvkaZXcz.cjs → stream-limiter-DH0vv46_.cjs} +1 -1
- package/dist/streams/index.cjs +1 -1
- package/dist/streams/index.d.mts +2 -2
- package/dist/streams/index.mjs +1 -1
- package/dist/testing/index.cjs +2 -2
- package/dist/testing/index.d.cts +1 -1
- package/dist/testing/index.d.cts.map +1 -1
- package/dist/testing/index.d.mts +5 -5
- package/dist/testing/index.d.mts.map +1 -1
- package/dist/testing/index.mjs +2 -2
- package/dist/testing/index.mjs.map +1 -1
- package/dist/types/index.cjs +1 -1
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.mts +6 -6
- package/dist/types/index.mjs +1 -1
- package/dist/types-BF_tvkRh.cjs +1 -0
- package/dist/types-BRnwrJDg.mjs +2 -0
- package/dist/types-BRnwrJDg.mjs.map +1 -0
- package/dist/upload/index.cjs +1 -1
- package/dist/upload/index.d.cts +1 -1
- package/dist/upload/index.d.mts +5 -5
- package/dist/upload/index.mjs +1 -1
- package/dist/upload-CLHJ1SFS.cjs +1 -0
- package/dist/upload-CpsShjP3.mjs +2 -0
- package/dist/upload-CpsShjP3.mjs.map +1 -0
- package/dist/{uploadista-error-DR0XimpE.d.mts → uploadista-error-B1qbOy9N.d.mts} +1 -1
- package/dist/{uploadista-error-DR0XimpE.d.mts.map → uploadista-error-B1qbOy9N.d.mts.map} +1 -1
- package/dist/{uploadista-error-BgQU45we.cjs → uploadista-error-CLWoRAAr.cjs} +1 -1
- package/dist/uploadista-error-CkSxSyNo.mjs.map +1 -1
- package/dist/utils/index.cjs +1 -1
- package/dist/utils/index.d.mts +2 -2
- package/dist/utils/index.mjs +1 -1
- package/dist/{utils-UUJt8ILJ.cjs → utils-CvZJUNEo.cjs} +1 -1
- package/dist/{utils-B-ZhQ6b0.mjs → utils-DVwfrVBJ.mjs} +1 -1
- package/dist/utils-DVwfrVBJ.mjs.map +1 -0
- package/package.json +8 -8
- package/src/flow/circuit-breaker-store.ts +7 -8
- package/src/flow/flow.ts +6 -5
- package/src/flow/nodes/transform-node.ts +15 -1
- package/src/flow/plugins/image-plugin.ts +12 -3
- package/src/flow/plugins/video-plugin.ts +12 -3
- package/src/flow/types/flow-types.ts +75 -6
- package/src/flow/types/retry-policy.ts +5 -2
- package/src/flow/types/type-utils.ts +4 -6
- package/src/flow/utils/file-naming.ts +36 -11
- package/src/testing/mock-upload-engine.ts +18 -1
- package/src/types/circuit-breaker-store.ts +2 -2
- package/src/types/data-store.ts +4 -1
- package/src/types/kv-store.ts +13 -12
- package/src/types/upload-file.ts +29 -4
- package/src/upload/upload-chunk.ts +1 -1
- package/dist/flow-BHVkk_6W.cjs +0 -1
- package/dist/flow-DlhHOlMk.mjs +0 -2
- package/dist/flow-DlhHOlMk.mjs.map +0 -1
- package/dist/index-DMqaf28W.d.cts.map +0 -1
- package/dist/index-RuQUCROH.d.mts.map +0 -1
- package/dist/streams-BiD_pOPH.cjs +0 -0
- package/dist/streams-Cqjxk2rI.mjs +0 -1
- package/dist/types-Cws60JHC.cjs +0 -1
- package/dist/types-DKGQJIEr.mjs +0 -2
- package/dist/types-DKGQJIEr.mjs.map +0 -1
- package/dist/upload-C-C7hn1-.mjs +0 -2
- package/dist/upload-C-C7hn1-.mjs.map +0 -1
- package/dist/upload-DWBlRXHh.cjs +0 -1
- package/dist/utils-B-ZhQ6b0.mjs.map +0 -1
- /package/dist/{index-C-svZlpj.d.mts → index-DWe68pTi.d.mts} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"types-DKGQJIEr.mjs","names":["baseStore: BaseKvStore","keyPrefix: string","serialize: (data: TData) => string","deserialize: (str: string) => TData","DEFAULT_STREAMING_CONFIG: Required<StreamingConfig>","resolvedStores: Record<string, DataStore<UploadFile>>","baseEmitter: BaseEventEmitter","eventToMessage: (event: TEvent) => string","DEFAULT_HEALTH_CHECK_CONFIG: Required<\n Omit<HealthCheckConfig, \"version\">\n>","z"],"sources":["../src/types/kv-store.ts","../src/types/circuit-breaker-store.ts","../src/types/upload-file.ts","../src/types/data-store.ts","../src/types/event-broadcaster.ts","../src/types/event-emitter.ts","../src/types/health-check.ts","../src/types/input-file.ts","../src/types/middleware.ts","../src/types/upload-event.ts","../src/types/websocket.ts"],"sourcesContent":["import { Context, Effect, Layer } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport type { DeadLetterItem, FlowJob } from \"../flow\";\nimport type { UploadFile } from \"./upload-file\";\n\n/**\n * Base key-value store interface for raw string storage.\n *\n * This is the low-level interface that storage adapters implement.\n * It stores raw string values without type safety or serialization.\n *\n * @property get - Retrieves a value by key, returns null if not found\n * @property set - Stores a value with the given key\n * @property delete - Removes a value by key\n * @property list - Optional operation to list all keys with a given prefix\n *\n * @example\n * ```typescript\n * // Implement a BaseKvStore with Redis\n * const redisKvStore: BaseKvStore = {\n * get: (key) => Effect.tryPromise({\n * try: () => redis.get(key),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * set: (key, value) => Effect.tryPromise({\n * try: () => redis.set(key, value),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * delete: (key) => Effect.tryPromise({\n * try: () => redis.del(key),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * list: (prefix) => Effect.tryPromise({\n * try: () => redis.keys(`${prefix}*`),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * })\n * };\n * ```\n */\nexport interface BaseKvStore {\n readonly get: (key: string) => Effect.Effect<string | null, UploadistaError>;\n readonly set: (\n key: string,\n value: string,\n ) => Effect.Effect<void, UploadistaError>;\n readonly delete: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly list?: (\n keyPrefix: string,\n ) => Effect.Effect<Array<string>, UploadistaError>;\n}\n\n/**\n * Type-safe key-value store interface with automatic serialization.\n *\n * This wraps a BaseKvStore and handles JSON serialization/deserialization\n * for a specific data type, providing type safety and eliminating the need\n * for manual JSON.stringify/parse calls.\n *\n * @template TData - The type of data stored in this KV store\n *\n * @property get - Retrieves and deserializes a value, fails if not found\n * @property set - Serializes and stores a value\n * @property delete - Removes a value by key\n * @property list - Optional operation to list all keys (without prefix)\n *\n * @example\n * ```typescript\n * // Use a typed KV store\n * const uploadStore: KvStore<UploadFile> = new TypedKvStore(\n * baseStore,\n * \"uploads:\",\n * jsonSerializer.serialize,\n * jsonSerializer.deserialize\n * );\n *\n * // Store and retrieve typed data\n * const program = Effect.gen(function* () {\n * const file: UploadFile = {\n * id: \"file123\",\n * offset: 0,\n * storage: { id: \"s3\", type: \"s3\" }\n * };\n *\n * // Automatic serialization\n * yield* uploadStore.set(\"file123\", file);\n *\n * // Automatic deserialization with type safety\n * const retrieved = yield* uploadStore.get(\"file123\");\n * console.log(retrieved.offset); // TypeScript knows this is a number\n * });\n * ```\n */\nexport type KvStore<TData> = {\n readonly get: (key: string) => Effect.Effect<TData, UploadistaError>;\n readonly set: (\n key: string,\n value: TData,\n ) => Effect.Effect<void, UploadistaError>;\n readonly delete: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly list?: () => Effect.Effect<Array<string>, UploadistaError>;\n};\n\n/**\n * Typed wrapper class that adds serialization to a BaseKvStore.\n *\n * This class implements the KvStore interface by wrapping a BaseKvStore\n * and handling serialization/deserialization for a specific type. It also\n * adds a key prefix to isolate different data types in the same store.\n *\n * @template TData - The type of data to store\n *\n * @example\n * ```typescript\n * // Create a typed store for UploadFile\n * const uploadFileStore = new TypedKvStore<UploadFile>(\n * baseKvStore,\n * \"uploadista:upload-file:\", // All keys will be prefixed\n * (data) => JSON.stringify(data),\n * (str) => JSON.parse(str) as UploadFile\n * );\n *\n * // Use the store\n * const effect = Effect.gen(function* () {\n * const file: UploadFile = { ... };\n * yield* uploadFileStore.set(\"abc123\", file);\n * // Internally stores at key \"uploadista:upload-file:abc123\"\n *\n * const retrieved = yield* uploadFileStore.get(\"abc123\");\n * return retrieved;\n * });\n *\n * // Custom serialization for binary data\n * const binaryStore = new TypedKvStore<Uint8Array>(\n * baseKvStore,\n * \"binary:\",\n * (data) => btoa(String.fromCharCode(...data)), // Base64 encode\n * (str) => Uint8Array.from(atob(str), c => c.charCodeAt(0)) // Base64 decode\n * );\n * ```\n */\nexport class TypedKvStore<TData> implements KvStore<TData> {\n constructor(\n private baseStore: BaseKvStore,\n private keyPrefix: string,\n private serialize: (data: TData) => string,\n private deserialize: (str: string) => TData,\n ) {}\n\n get = (key: string): Effect.Effect<TData, UploadistaError> =>\n this.baseStore.get(this.keyPrefix + key).pipe(\n Effect.flatMap((value) => {\n if (value === null) {\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_NOT_FOUND\", {\n cause: `Key \"${key}\" not found`,\n }),\n );\n }\n try {\n return Effect.succeed(this.deserialize(value));\n } catch (error) {\n return Effect.fail(\n new UploadistaError({\n code: \"VALIDATION_ERROR\",\n status: 400,\n body: `Failed to deserialize value for key \"${key}\"`,\n cause: error,\n }),\n );\n }\n }),\n );\n\n set = (key: string, value: TData): Effect.Effect<void, UploadistaError> => {\n try {\n const serialized = this.serialize(value);\n return this.baseStore.set(this.keyPrefix + key, serialized);\n } catch (error) {\n return Effect.fail(\n new UploadistaError({\n code: \"VALIDATION_ERROR\",\n status: 400,\n body: `Failed to serialize value for key \"${key}\"`,\n cause: error,\n }),\n );\n }\n };\n\n delete = (key: string): Effect.Effect<void, UploadistaError> =>\n this.baseStore.delete(this.keyPrefix + key);\n\n list = (): Effect.Effect<Array<string>, UploadistaError> => {\n if (this.baseStore.list) {\n // Get keys with prefix and strip the prefix for use with get/set/delete\n return this.baseStore.list(this.keyPrefix).pipe(\n Effect.map((keys) =>\n keys.map((key) =>\n key.startsWith(this.keyPrefix)\n ? key.slice(this.keyPrefix.length)\n : key,\n ),\n ),\n );\n }\n return Effect.fail(\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 501,\n body: \"List operation not supported by this store\",\n }),\n );\n };\n}\n\n/**\n * Default JSON serialization helpers.\n *\n * These functions provide standard JSON serialization for use with TypedKvStore.\n * They work with any JSON-serializable type.\n *\n * @example\n * ```typescript\n * const store = new TypedKvStore<MyType>(\n * baseStore,\n * \"mydata:\",\n * jsonSerializer.serialize,\n * jsonSerializer.deserialize\n * );\n * ```\n */\nexport const jsonSerializer = {\n serialize: <T>(data: T): string => JSON.stringify(data),\n deserialize: <T>(str: string): T => JSON.parse(str),\n};\n\n/**\n * Effect-TS context tag for the base untyped KV store.\n *\n * This is the low-level store that storage adapter implementations provide.\n * Most application code should use typed stores like UploadFileKVStore instead.\n *\n * @example\n * ```typescript\n * // Provide a base store implementation\n * const baseStoreLayer = Layer.succeed(BaseKvStoreService, redisKvStore);\n *\n * // Use in an Effect\n * const effect = Effect.gen(function* () {\n * const baseStore = yield* BaseKvStoreService;\n * yield* baseStore.set(\"raw-key\", \"raw-value\");\n * });\n * ```\n */\nexport class BaseKvStoreService extends Context.Tag(\"BaseKvStore\")<\n BaseKvStoreService,\n BaseKvStore\n>() {}\n\n/**\n * Effect-TS context tag for the UploadFile typed KV store.\n *\n * This provides type-safe storage for UploadFile metadata. It's the primary\n * way to store and retrieve upload metadata in the system.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n *\n * // Store upload metadata\n * const file: UploadFile = {\n * id: \"upload123\",\n * offset: 0,\n * storage: { id: \"s3\", type: \"s3\" }\n * };\n * yield* kvStore.set(\"upload123\", file);\n *\n * // Retrieve with type safety\n * const retrieved = yield* kvStore.get(\"upload123\");\n * return retrieved;\n * });\n * ```\n */\nexport class UploadFileKVStore extends Context.Tag(\"UploadFileKVStore\")<\n UploadFileKVStore,\n KvStore<UploadFile>\n>() {}\n\n/**\n * Effect Layer that creates the UploadFileKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for UploadFile objects\n * with the \"uploadista:upload-file:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const uploadFileKvStore = Layer.effect(\n UploadFileKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<UploadFile>(\n baseStore,\n \"uploadista:upload-file:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the FlowJob typed KV store.\n *\n * This provides type-safe storage for FlowJob metadata, tracking the\n * execution state of flow processing jobs.\n *\n * @example\n * ```typescript\n * const flowEffect = Effect.gen(function* () {\n * const jobStore = yield* FlowJobKVStore;\n *\n * // Store job state\n * const job: FlowJob = {\n * id: \"job123\",\n * flowId: \"flow_resize\",\n * status: \"running\",\n * tasks: [],\n * createdAt: new Date(),\n * updatedAt: new Date()\n * };\n * yield* jobStore.set(\"job123\", job);\n *\n * // Retrieve and check status\n * const retrieved = yield* jobStore.get(\"job123\");\n * return retrieved.status;\n * });\n * ```\n */\nexport class FlowJobKVStore extends Context.Tag(\"FlowJobKVStore\")<\n FlowJobKVStore,\n KvStore<FlowJob>\n>() {}\n\n/**\n * Effect Layer that creates the FlowJobKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for FlowJob objects\n * with the \"uploadista:flow-job:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const jobStore = yield* FlowJobKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(flowJobKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const flowJobKvStore = Layer.effect(\n FlowJobKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<FlowJob>(\n baseStore,\n \"uploadista:flow-job:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the Dead Letter Queue typed KV store.\n *\n * This provides type-safe storage for DeadLetterItem objects, tracking\n * failed flow jobs for retry, debugging, and manual intervention.\n *\n * @example\n * ```typescript\n * const dlqEffect = Effect.gen(function* () {\n * const dlqStore = yield* DeadLetterQueueKVStore;\n *\n * // Store a DLQ item\n * const item: DeadLetterItem = {\n * id: \"dlq_123\",\n * jobId: \"job_456\",\n * flowId: \"image-pipeline\",\n * storageId: \"s3\",\n * clientId: \"client_789\",\n * error: { code: \"FLOW_NODE_ERROR\", message: \"Timeout\" },\n * inputs: { input: { uploadId: \"upload_abc\" } },\n * nodeResults: {},\n * retryCount: 0,\n * maxRetries: 3,\n * retryHistory: [],\n * createdAt: new Date(),\n * updatedAt: new Date(),\n * status: \"pending\"\n * };\n * yield* dlqStore.set(\"dlq_123\", item);\n *\n * // Retrieve with type safety\n * const retrieved = yield* dlqStore.get(\"dlq_123\");\n * return retrieved.status;\n * });\n * ```\n */\nexport class DeadLetterQueueKVStore extends Context.Tag(\"DeadLetterQueueKVStore\")<\n DeadLetterQueueKVStore,\n KvStore<DeadLetterItem>\n>() {}\n\n/**\n * Effect Layer that creates the DeadLetterQueueKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for DeadLetterItem objects\n * with the \"uploadista:dlq:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const dlqStore = yield* DeadLetterQueueKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(deadLetterQueueKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const deadLetterQueueKvStore = Layer.effect(\n DeadLetterQueueKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<DeadLetterItem>(\n baseStore,\n \"uploadista:dlq:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n","/**\n * Circuit Breaker Store - Distributed state storage for circuit breakers.\n *\n * This module defines the interface for storing circuit breaker state in\n * distributed environments. It allows circuit breaker state to be shared\n * across multiple instances in a cluster.\n *\n * @module types/circuit-breaker-store\n */\n\nimport { Context, Effect, Layer } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n// ============================================================================\n// State Types\n// ============================================================================\n\n/**\n * Circuit breaker state values.\n */\nexport type CircuitBreakerStateValue = \"closed\" | \"open\" | \"half-open\";\n\n/**\n * Persisted circuit breaker state data.\n *\n * This represents the full state of a circuit breaker that needs to be\n * stored and shared across instances.\n */\nexport interface CircuitBreakerStateData {\n /** Current circuit state */\n state: CircuitBreakerStateValue;\n /** Number of failures in current window */\n failureCount: number;\n /** Timestamp of last state transition */\n lastStateChange: number;\n /** Number of successful requests in half-open state */\n halfOpenSuccesses: number;\n /** Timestamp when the current failure window started */\n windowStart: number;\n /** Configuration snapshot for consistency */\n config: {\n failureThreshold: number;\n resetTimeout: number;\n halfOpenRequests: number;\n windowDuration: number;\n };\n}\n\n/**\n * Statistics about a circuit breaker.\n */\nexport interface CircuitBreakerStats {\n nodeType: string;\n state: CircuitBreakerStateValue;\n failureCount: number;\n halfOpenSuccesses: number;\n timeSinceLastStateChange: number;\n timeUntilHalfOpen?: number; // Only when state is \"open\"\n}\n\n// ============================================================================\n// Store Interface\n// ============================================================================\n\n/**\n * Interface for circuit breaker state storage.\n *\n * Implementations should handle distributed state for circuit breakers,\n * allowing multiple instances to share circuit state. The interface is\n * designed to work with eventually consistent stores - perfect consistency\n * is not required for circuit breaker functionality.\n *\n * @example\n * ```typescript\n * // Using the store\n * const store: CircuitBreakerStore = yield* CircuitBreakerStoreService;\n *\n * // Record a failure\n * const newCount = yield* store.incrementFailures(\"describe-image\", 60000);\n * if (newCount >= 5) {\n * yield* store.setState(\"describe-image\", {\n * state: \"open\",\n * failureCount: newCount,\n * lastStateChange: Date.now(),\n * // ...\n * });\n * }\n * ```\n */\nexport interface CircuitBreakerStore {\n /**\n * Gets the current state data for a circuit breaker.\n *\n * @param nodeType - The node type identifier\n * @returns The state data or null if no state exists\n */\n readonly getState: (\n nodeType: string,\n ) => Effect.Effect<CircuitBreakerStateData | null, UploadistaError>;\n\n /**\n * Sets the complete state for a circuit breaker.\n *\n * @param nodeType - The node type identifier\n * @param state - The new state data\n */\n readonly setState: (\n nodeType: string,\n state: CircuitBreakerStateData,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Increments the failure count and returns the new count.\n *\n * This operation should be atomic where possible. For stores that don't\n * support atomic increment, a read-modify-write is acceptable as circuit\n * breakers tolerate eventual consistency.\n *\n * The implementation should also handle window expiry - if the window\n * has expired, reset the count before incrementing.\n *\n * @param nodeType - The node type identifier\n * @param windowDuration - Duration of the sliding window in milliseconds\n * @returns The new failure count after incrementing\n */\n readonly incrementFailures: (\n nodeType: string,\n windowDuration: number,\n ) => Effect.Effect<number, UploadistaError>;\n\n /**\n * Resets the failure count to zero.\n *\n * Called when circuit closes or on successful requests.\n *\n * @param nodeType - The node type identifier\n */\n readonly resetFailures: (\n nodeType: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Increments the half-open success count.\n *\n * @param nodeType - The node type identifier\n * @returns The new half-open success count\n */\n readonly incrementHalfOpenSuccesses: (\n nodeType: string,\n ) => Effect.Effect<number, UploadistaError>;\n\n /**\n * Gets statistics for all tracked circuit breakers.\n *\n * @returns Map of node type to stats\n */\n readonly getAllStats: () => Effect.Effect<\n Map<string, CircuitBreakerStats>,\n UploadistaError\n >;\n\n /**\n * Deletes circuit breaker state for a node type.\n *\n * @param nodeType - The node type identifier\n */\n readonly delete: (nodeType: string) => Effect.Effect<void, UploadistaError>;\n}\n\n// ============================================================================\n// Effect Context\n// ============================================================================\n\n/**\n * Effect-TS context tag for the CircuitBreakerStore service.\n *\n * Use this to inject a circuit breaker store into your Effect programs.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const cbStore = yield* CircuitBreakerStoreService;\n * const state = yield* cbStore.getState(\"my-node-type\");\n * // ...\n * });\n *\n * // Provide the implementation\n * const result = yield* program.pipe(\n * Effect.provide(kvCircuitBreakerStoreLayer)\n * );\n * ```\n */\nexport class CircuitBreakerStoreService extends Context.Tag(\n \"CircuitBreakerStoreService\",\n)<CircuitBreakerStoreService, CircuitBreakerStore>() {}\n\n// ============================================================================\n// Default State Factory\n// ============================================================================\n\n/**\n * Creates a default initial state for a circuit breaker.\n *\n * @param config - Circuit breaker configuration\n * @returns Initial state data with closed circuit\n */\nexport function createInitialCircuitBreakerState(config: {\n failureThreshold: number;\n resetTimeout: number;\n halfOpenRequests: number;\n windowDuration: number;\n}): CircuitBreakerStateData {\n const now = Date.now();\n return {\n state: \"closed\",\n failureCount: 0,\n lastStateChange: now,\n halfOpenSuccesses: 0,\n windowStart: now,\n config,\n };\n}\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating UploadFile objects.\n *\n * This schema defines the structure and validation rules for upload file metadata.\n * Use this schema to parse and validate UploadFile data from external sources.\n *\n * @see {@link UploadFile} for the TypeScript type\n */\n/**\n * Zod schema for trace context used in distributed tracing.\n */\nexport const traceContextSchema = z.object({\n traceId: z.string(),\n spanId: z.string(),\n traceFlags: z.number(),\n});\n\nexport const uploadFileSchema = z.object({\n id: z.string(),\n size: z.number().optional(),\n offset: z.number(),\n metadata: z\n .record(z.string(), z.union([z.string(), z.number(), z.boolean()]))\n .optional(),\n creationDate: z.string().optional(),\n url: z.string().optional(),\n sizeIsDeferred: z.boolean().optional(),\n checksum: z.string().optional(),\n checksumAlgorithm: z.string().optional(),\n storage: z.object({\n id: z.string(),\n type: z.string(),\n path: z.string().optional(),\n uploadId: z.string().optional(),\n bucket: z.string().optional(),\n parts: z\n .array(\n z.object({\n partNumber: z.number(),\n etag: z.string(),\n size: z.number(),\n }),\n )\n .optional(),\n }),\n flow: z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional(),\n traceContext: traceContextSchema.optional(),\n});\n\n/**\n * Represents an uploaded file with its metadata and storage information.\n *\n * This is the core data structure that tracks file uploads throughout their lifecycle.\n * It contains all metadata needed to resume uploads, track progress, and locate files\n * in storage backends.\n *\n * @property id - Unique identifier for this upload\n * @property offset - Current byte offset (how many bytes have been uploaded)\n * @property storage - Storage backend information\n * @property storage.id - Storage backend identifier (e.g., \"s3-production\")\n * @property storage.type - Storage backend type (e.g., \"s3\", \"azure\", \"gcs\")\n * @property storage.path - Optional path prefix within the storage backend\n * @property storage.uploadId - Optional backend-specific upload ID (e.g., S3 multipart upload ID)\n * @property storage.bucket - Optional bucket or container name\n * @property storage.parts - Optional array of uploaded parts (used by data stores that need to track parts locally, like R2)\n * @property flow - Optional flow processing information (when file is part of a flow)\n * @property flow.flowId - ID of the flow processing this file\n * @property flow.nodeId - ID of the flow node that created this file\n * @property flow.jobId - ID of the flow job execution\n * @property size - Total file size in bytes (undefined if deferred)\n * @property metadata - Custom key-value metadata attached to the file\n * @property creationDate - ISO 8601 timestamp when upload was created\n * @property url - Optional public URL to access the file\n * @property sizeIsDeferred - True if file size is not known at upload start\n * @property checksum - Optional file checksum/hash value\n * @property checksumAlgorithm - Algorithm used for checksum (e.g., \"md5\", \"sha256\")\n *\n * @example\n * ```typescript\n * // Create an UploadFile for a new upload\n * const uploadFile: UploadFile = {\n * id: \"upload_abc123\",\n * offset: 0,\n * size: 1024000,\n * storage: {\n * id: \"s3-production\",\n * type: \"s3\",\n * bucket: \"my-uploads\",\n * path: \"files/\"\n * },\n * metadata: {\n * fileName: \"image.jpg\",\n * contentType: \"image/jpeg\",\n * userId: \"user_123\"\n * },\n * creationDate: new Date().toISOString(),\n * checksum: \"5d41402abc4b2a76b9719d911017c592\",\n * checksumAlgorithm: \"md5\"\n * };\n *\n * // UploadFile with flow processing\n * const flowFile: UploadFile = {\n * id: \"upload_xyz789\",\n * offset: 0,\n * size: 2048000,\n * storage: {\n * id: \"s3-temp\",\n * type: \"s3\",\n * bucket: \"temp-processing\"\n * },\n * flow: {\n * flowId: \"flow_resize_optimize\",\n * nodeId: \"input_1\",\n * jobId: \"job_456\"\n * }\n * };\n *\n * // Resume an interrupted upload\n * const resumingFile: UploadFile = {\n * id: \"upload_resume\",\n * offset: 524288, // Already uploaded 512KB\n * size: 1024000,\n * storage: {\n * id: \"s3-production\",\n * type: \"s3\",\n * uploadId: \"multipart_xyz\" // S3 multipart upload ID\n * }\n * };\n * ```\n */\n/**\n * Trace context for distributed tracing.\n * Allows upload operations to be linked under a single trace.\n */\nexport type UploadFileTraceContext = {\n /** 128-bit trace identifier (32 hex characters) */\n traceId: string;\n /** 64-bit span identifier (16 hex characters) */\n spanId: string;\n /** Trace flags (1 = sampled) */\n traceFlags: number;\n};\n\nexport type UploadFile = {\n id: string;\n offset: number;\n storage: {\n id: string;\n type: string;\n path?: string | undefined;\n uploadId?: string | undefined;\n bucket?: string | undefined;\n parts?:\n | Array<{\n partNumber: number;\n etag: string;\n size: number;\n }>\n | undefined;\n };\n flow?: {\n flowId: string;\n nodeId: string;\n jobId: string;\n };\n size?: number | undefined;\n metadata?: Record<string, string | number | boolean> | undefined;\n creationDate?: string | undefined;\n url?: string | undefined;\n sizeIsDeferred?: boolean | undefined;\n checksum?: string | undefined;\n checksumAlgorithm?: string | undefined;\n /**\n * OpenTelemetry trace context for distributed tracing.\n * When set, subsequent upload operations (chunks, validation) will be\n * linked as children of this trace context.\n */\n traceContext?: UploadFileTraceContext | undefined;\n};\n","import { Context, Effect, Layer, type Stream } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport type { UploadFileKVStore } from \"./kv-store\";\nimport type { UploadFile } from \"./upload-file\";\n\n/**\n * Options for writing data to a DataStore.\n *\n * @property file_id - Unique identifier for the file being written\n * @property stream - Stream of byte chunks to write to storage\n * @property offset - Byte offset where writing should begin (for resumable uploads)\n */\nexport type DataStoreWriteOptions = {\n file_id: string;\n stream: Stream.Stream<Uint8Array, UploadistaError>;\n offset: number;\n};\n\n/**\n * Upload strategy type indicating how chunks are uploaded.\n *\n * - `single`: Upload file in a single request (traditional upload)\n * - `parallel`: Upload file chunks in parallel (for large files)\n */\nexport type UploadStrategy = \"single\" | \"parallel\";\n\n/**\n * Configuration options for streaming file reads.\n *\n * Used to control streaming behavior in transform nodes and data stores.\n *\n * @property fileSizeThreshold - Files below this size use buffered mode (default: 1MB)\n * @property chunkSize - Chunk size for streaming reads in bytes (default: 64KB)\n *\n * @example\n * ```typescript\n * const config: StreamingConfig = {\n * fileSizeThreshold: 1_048_576, // 1MB - use buffered for smaller files\n * chunkSize: 65_536, // 64KB chunks\n * };\n * ```\n */\nexport type StreamingConfig = {\n /** Files below this size use buffered mode (default: 1MB = 1_048_576 bytes) */\n fileSizeThreshold?: number;\n /** Chunk size for streaming reads in bytes (default: 64KB = 65_536 bytes) */\n chunkSize?: number;\n};\n\n/**\n * Default streaming configuration values.\n */\nexport const DEFAULT_STREAMING_CONFIG: Required<StreamingConfig> = {\n fileSizeThreshold: 1_048_576, // 1MB\n chunkSize: 65_536, // 64KB\n};\n\n/**\n * Default multipart part size for S3/R2 streaming writes.\n * S3 requires minimum 5MB parts (except for the last part).\n */\nexport const DEFAULT_MULTIPART_PART_SIZE = 5 * 1024 * 1024; // 5MB\n\n/**\n * Options for streaming write operations.\n *\n * Used when writing file content from a stream with unknown final size.\n * The store will finalize the upload when the stream completes.\n *\n * @property stream - Effect Stream of byte chunks to write\n * @property contentType - Optional MIME type for the file\n * @property metadata - Optional metadata to store with the file\n * @property sizeHint - Optional estimated size for optimization (e.g., multipart part sizing)\n *\n * @example\n * ```typescript\n * const options: StreamWriteOptions = {\n * stream: transformedStream,\n * contentType: \"image/webp\",\n * metadata: { originalName: \"photo.jpg\" },\n * sizeHint: 5_000_000, // ~5MB expected\n * };\n * ```\n */\nexport type StreamWriteOptions = {\n stream: Stream.Stream<Uint8Array, UploadistaError>;\n contentType?: string;\n metadata?: Record<string, string>;\n /** Optional size hint for optimization (not required) */\n sizeHint?: number;\n};\n\n/**\n * Result of a streaming write operation.\n *\n * Contains the final size after the stream completes, along with\n * storage location information.\n *\n * @property id - Unique identifier of the written file\n * @property size - Final size in bytes after stream completed\n * @property path - Storage path or key where file was written\n * @property bucket - Optional bucket/container name (for cloud storage)\n *\n * @example\n * ```typescript\n * const result = yield* dataStore.writeStream(fileId, options);\n * console.log(`Wrote ${result.size} bytes to ${result.path}`);\n * ```\n */\nexport type StreamWriteResult = {\n id: string;\n size: number;\n path: string;\n bucket?: string;\n /** Public URL for accessing the uploaded file (if available) */\n url?: string;\n};\n\n/**\n * Capabilities and constraints of a DataStore implementation.\n *\n * This type describes what features a storage backend supports and what\n * limitations it has. Use this to determine the optimal upload strategy\n * and validate client requests.\n *\n * @property supportsParallelUploads - Can upload chunks in parallel (e.g., S3 multipart)\n * @property supportsConcatenation - Can concatenate multiple uploads into one file\n * @property supportsDeferredLength - Can start upload without knowing final size\n * @property supportsResumableUploads - Can resume interrupted uploads from last offset\n * @property supportsTransactionalUploads - Guarantees atomic upload success/failure\n * @property supportsStreamingRead - Can read file content as a stream instead of buffering\n * @property maxConcurrentUploads - Maximum parallel upload parts (if parallel supported)\n * @property minChunkSize - Minimum size in bytes for each chunk (except last)\n * @property maxChunkSize - Maximum size in bytes for each chunk\n * @property maxParts - Maximum number of parts in a multipart upload\n * @property optimalChunkSize - Recommended chunk size for best performance\n * @property requiresOrderedChunks - Must receive chunks in sequential order\n * @property requiresMimeTypeValidation - Validates file MIME type matches declaration\n * @property maxValidationSize - Maximum file size for MIME type validation\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n *\n * if (capabilities.supportsParallelUploads && fileSize > 10_000_000) {\n * // Use parallel upload for large files\n * const chunkSize = capabilities.optimalChunkSize || 5_242_880; // 5MB default\n * uploadInParallel(file, chunkSize);\n * } else {\n * // Use single upload\n * uploadAsSingleChunk(file);\n * }\n *\n * // Check for streaming support\n * if (capabilities.supportsStreamingRead) {\n * // Use streaming for memory-efficient processing\n * const stream = yield* dataStore.readStream(fileId);\n * }\n * ```\n */\nexport type DataStoreCapabilities = {\n supportsParallelUploads: boolean;\n supportsConcatenation: boolean;\n supportsDeferredLength: boolean;\n supportsResumableUploads: boolean;\n supportsTransactionalUploads: boolean;\n /** Whether the store supports streaming reads via readStream() */\n supportsStreamingRead?: boolean;\n /** Whether the store supports streaming writes via writeStream() with unknown final size */\n supportsStreamingWrite?: boolean;\n maxConcurrentUploads?: number;\n minChunkSize?: number;\n maxChunkSize?: number;\n maxParts?: number;\n optimalChunkSize?: number;\n requiresOrderedChunks: boolean;\n requiresMimeTypeValidation?: boolean;\n maxValidationSize?: number;\n};\n\n/**\n * Core interface for all storage backend implementations.\n *\n * DataStore abstracts file storage operations across different backends\n * (S3, Azure Blob, GCS, local filesystem, etc.). All storage adapters\n * must implement this interface.\n *\n * @template TData - The data type stored (typically UploadFile)\n *\n * @property bucket - Optional storage bucket or container name\n * @property path - Optional base path prefix for all stored files\n * @property create - Creates a new file record in storage\n * @property remove - Deletes a file from storage\n * @property read - Reads complete file contents as bytes\n * @property write - Writes data stream to storage at specified offset\n * @property deleteExpired - Optional cleanup of expired files\n * @property getCapabilities - Returns storage backend capabilities\n * @property validateUploadStrategy - Validates if strategy is supported\n *\n * @example\n * ```typescript\n * // Implement a custom DataStore\n * const myDataStore: DataStore<UploadFile> = {\n * bucket: \"my-uploads\",\n * path: \"files/\",\n *\n * create: (file) => Effect.gen(function* () {\n * // Store file metadata\n * yield* saveMetadata(file);\n * return file;\n * }),\n *\n * write: ({ file_id, stream, offset }, { onProgress }) => Effect.gen(function* () {\n * // Write chunks to storage\n * let bytesWritten = offset;\n * yield* Stream.runForEach(stream, (chunk) => Effect.sync(() => {\n * writeChunk(file_id, chunk, bytesWritten);\n * bytesWritten += chunk.byteLength;\n * onProgress?.(chunk.byteLength);\n * }));\n * return bytesWritten;\n * }),\n *\n * read: (file_id) => Effect.gen(function* () {\n * // Read complete file\n * const data = yield* readFromStorage(file_id);\n * return data;\n * }),\n *\n * remove: (file_id) => Effect.gen(function* () {\n * yield* deleteFromStorage(file_id);\n * }),\n *\n * getCapabilities: () => ({\n * supportsParallelUploads: true,\n * supportsConcatenation: false,\n * supportsDeferredLength: true,\n * supportsResumableUploads: true,\n * supportsTransactionalUploads: false,\n * maxConcurrentUploads: 10,\n * optimalChunkSize: 5_242_880, // 5MB\n * requiresOrderedChunks: false,\n * }),\n *\n * validateUploadStrategy: (strategy) =>\n * Effect.succeed(strategy === \"parallel\" || strategy === \"single\"),\n * };\n * ```\n */\nexport type DataStore<TData = unknown> = {\n readonly bucket?: string;\n readonly path?: string;\n readonly create: (file: TData) => Effect.Effect<TData, UploadistaError>;\n readonly remove: (file_id: string) => Effect.Effect<void, UploadistaError>;\n /**\n * Reads the complete file contents as bytes (buffered mode).\n * For large files, consider using readStream() if available.\n */\n readonly read: (\n file_id: string,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Optional - check getCapabilities().supportsStreamingRead before using.\n *\n * @param file_id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n * if (capabilities.supportsStreamingRead && dataStore.readStream) {\n * const stream = yield* dataStore.readStream(fileId, { chunkSize: 65536 });\n * // Process stream chunk by chunk\n * }\n * ```\n */\n readonly readStream?: (\n file_id: string,\n config?: StreamingConfig,\n ) => Effect.Effect<Stream.Stream<Uint8Array, UploadistaError>, UploadistaError>;\n readonly write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => Effect.Effect<number, UploadistaError>;\n /**\n * Writes file content from a stream with unknown final size.\n * Optional - check getCapabilities().supportsStreamingWrite before using.\n *\n * This method is optimized for end-to-end streaming where the output\n * size isn't known until the stream completes. It uses store-specific\n * mechanisms like multipart uploads (S3/R2), resumable uploads (GCS),\n * or block staging (Azure) to efficiently handle streaming data.\n *\n * @param fileId - Unique identifier for the file being written\n * @param options - Stream and optional metadata\n * @returns StreamWriteResult containing final size after completion\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n * if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n * const result = yield* dataStore.writeStream(fileId, {\n * stream: transformedStream,\n * contentType: \"image/webp\",\n * });\n * console.log(`Wrote ${result.size} bytes`);\n * }\n * ```\n */\n readonly writeStream?: (\n fileId: string,\n options: StreamWriteOptions,\n ) => Effect.Effect<StreamWriteResult, UploadistaError>;\n readonly deleteExpired?: () => Effect.Effect<number, UploadistaError>;\n readonly getCapabilities: () => DataStoreCapabilities;\n readonly validateUploadStrategy: (\n strategy: UploadStrategy,\n ) => Effect.Effect<boolean, never>;\n};\n\n/**\n * Effect-TS context tag for UploadFile DataStore.\n *\n * Use this tag to access the primary DataStore in an Effect context.\n * This is the standard storage backend for uploaded files.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const dataStore = yield* UploadFileDataStore;\n * const file = yield* dataStore.create(uploadFile);\n * return file;\n * });\n * ```\n */\nexport class UploadFileDataStore extends Context.Tag(\"UploadFileDataStore\")<\n UploadFileDataStore,\n DataStore<UploadFile>\n>() {}\n\n/**\n * Effect-TS context tag for buffered/temporary DataStore.\n *\n * This is an optional storage backend used for temporary or intermediate files\n * during flow processing. Not all implementations provide a buffered store.\n *\n * @example\n * ```typescript\n * const processEffect = Effect.gen(function* () {\n * const bufferedStore = yield* BufferedUploadFileDataStore;\n * // Store intermediate processing results\n * const tempFile = yield* bufferedStore.create(intermediateFile);\n * return tempFile;\n * });\n * ```\n */\nexport class BufferedUploadFileDataStore extends Context.Tag(\n \"BufferedUploadFileDataStore\",\n)<BufferedUploadFileDataStore, DataStore<UploadFile>>() {}\n\n/**\n * Service interface for managing multiple DataStore instances.\n *\n * This allows routing files to different storage backends based on\n * storageId (e.g., different S3 buckets, Azure containers, or storage tiers).\n *\n * @property getDataStore - Retrieves the appropriate DataStore for a given storage ID\n * @property bufferedDataStore - Optional temporary storage for intermediate files\n */\nexport type UploadFileDataStoresShape = {\n getDataStore: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStore<UploadFile>, UploadistaError>;\n bufferedDataStore: Effect.Effect<\n DataStore<UploadFile> | undefined,\n UploadistaError\n >;\n};\n\n/**\n * Effect-TS context tag for the DataStore routing service.\n *\n * Provides access to multiple DataStore instances with routing logic.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const dataStores = yield* UploadFileDataStores;\n * // Route to specific storage based on storageId\n * const dataStore = yield* dataStores.getDataStore(\"s3-production\", clientId);\n * const file = yield* dataStore.create(uploadFile);\n * return file;\n * });\n * ```\n */\nexport class UploadFileDataStores extends Context.Tag(\"UploadFileDataStores\")<\n UploadFileDataStores,\n UploadFileDataStoresShape\n>() {}\n\n/**\n * Simplified DataStore configuration for easy setup.\n *\n * This type allows flexible configuration:\n * - Single DataStore instance\n * - Multiple named stores with routing\n * - Effect that resolves to a DataStore\n * - Pre-built Effect Layer\n *\n * @example\n * ```typescript\n * // Single store\n * const config: DataStoreConfig = s3DataStore;\n *\n * // Multiple stores with routing\n * const config: DataStoreConfig = {\n * stores: {\n * \"s3-prod\": s3ProdStore,\n * \"s3-dev\": s3DevStore,\n * \"local\": localFileStore,\n * },\n * default: \"s3-prod\"\n * };\n *\n * // Effect that creates a store\n * const config: DataStoreConfig = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n * return s3Store(kvStore);\n * });\n *\n * // Pre-built Layer\n * const config: DataStoreConfig = Layer.succeed(UploadFileDataStores, {...});\n * ```\n */\nexport type DataStoreConfig =\n | DataStore<UploadFile>\n | Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>\n | {\n stores: Record<\n string,\n | DataStore<UploadFile>\n | Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>\n >;\n default?: string;\n }\n | Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>;\n\n/**\n * Type guard to check if a value is a DataStore instance.\n *\n * @param config - The value to check\n * @returns True if the value is a DataStore\n *\n * @example\n * ```typescript\n * if (isDataStore(config)) {\n * const capabilities = config.getCapabilities();\n * }\n * ```\n */\nexport const isDataStore = (\n config: DataStoreConfig,\n): config is DataStore<UploadFile> => {\n return \"create\" in config && \"write\" in config;\n};\n\n/**\n * Creates an Effect Layer from simplified DataStoreConfig.\n *\n * This function converts any DataStoreConfig format into a proper Effect Layer\n * that can be provided to the UploadFileDataStores context tag.\n *\n * It handles:\n * - Single DataStore: Wraps in a Layer that always returns that store\n * - Multiple stores: Creates routing logic with optional default\n * - Effect<DataStore>: Executes the Effect and wraps the result\n * - Layer: Returns as-is\n *\n * @param config - The DataStore configuration\n * @returns A Layer that provides UploadFileDataStores service\n *\n * @example\n * ```typescript\n * // Create from single store\n * const layer = await createDataStoreLayer(s3DataStore);\n *\n * // Create from multiple stores\n * const layer = await createDataStoreLayer({\n * stores: {\n * \"production\": s3Store,\n * \"development\": localStore,\n * },\n * default: \"development\"\n * });\n *\n * // Use the layer\n * const program = Effect.gen(function* () {\n * const stores = yield* UploadFileDataStores;\n * const store = yield* stores.getDataStore(\"production\", null);\n * return store;\n * }).pipe(Effect.provide(layer));\n * ```\n */\nexport const createDataStoreLayer = async (\n config: DataStoreConfig,\n): Promise<Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>> => {\n // Already a Layer, return as-is\n if (Layer.isLayer(config)) {\n return config as Layer.Layer<\n UploadFileDataStores,\n never,\n UploadFileKVStore\n >;\n }\n\n // Check if it's an Effect\n if (Effect.isEffect(config)) {\n return Layer.effect(\n UploadFileDataStores,\n Effect.gen(function* () {\n const dataStore = config as Effect.Effect<\n DataStore<UploadFile>,\n never,\n UploadFileKVStore\n >;\n const resolvedStore = yield* dataStore;\n return {\n getDataStore: (_storageId: string) => Effect.succeed(resolvedStore),\n bufferedDataStore: Effect.succeed(undefined),\n };\n }),\n );\n }\n\n // Single store (most common case)\n if (isDataStore(config)) {\n const store = config as DataStore<UploadFile>;\n return Layer.succeed(UploadFileDataStores, {\n getDataStore: (_storageId: string) => Effect.succeed(store),\n bufferedDataStore: Effect.succeed(undefined),\n });\n }\n\n // Multiple stores with routing\n const multiConfig = config as {\n stores: Record<\n string,\n DataStore<UploadFile> | Effect.Effect<DataStore<UploadFile>>\n >;\n default?: string;\n };\n\n const defaultKey = multiConfig.default || Object.keys(multiConfig.stores)[0];\n\n // Resolve any Effects in the stores\n const resolvedStores: Record<string, DataStore<UploadFile>> = {};\n for (const [key, storeOrEffect] of Object.entries(multiConfig.stores)) {\n if (\"pipe\" in storeOrEffect && !(\"create\" in storeOrEffect)) {\n resolvedStores[key] = await Effect.runPromise(\n storeOrEffect as Effect.Effect<DataStore<UploadFile>>,\n );\n } else {\n resolvedStores[key] = storeOrEffect as DataStore<UploadFile>;\n }\n }\n\n return Layer.succeed(UploadFileDataStores, {\n getDataStore: (storageId: string) => {\n const store =\n resolvedStores[storageId] ||\n (defaultKey ? resolvedStores[defaultKey] : undefined);\n if (store) {\n return Effect.succeed(store);\n }\n return Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n },\n bufferedDataStore: Effect.succeed(undefined),\n });\n};\n","import { Context, type Effect } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\n\n/**\n * Event broadcaster interface for pub/sub messaging across distributed instances.\n * Used by WebSocketManager to broadcast upload events to all connected instances.\n */\nexport interface EventBroadcaster {\n /**\n * Publish a message to a channel\n */\n readonly publish: (\n channel: string,\n message: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Subscribe to messages on a channel\n */\n readonly subscribe: (\n channel: string,\n handler: (message: string) => void,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Unsubscribe from a channel (optional - not all implementations may support)\n */\n readonly unsubscribe?: (\n channel: string,\n ) => Effect.Effect<void, UploadistaError>;\n}\n\n/**\n * Context tag for EventBroadcaster service\n */\nexport class EventBroadcasterService extends Context.Tag(\"EventBroadcaster\")<\n EventBroadcasterService,\n EventBroadcaster\n>() {}\n","import { Context, Effect, Layer } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type { FlowEvent } from \"../flow/event\";\nimport type { UploadEvent } from \"./upload-event\";\nimport type { WebSocketConnection } from \"./websocket\";\n\n/**\n * Base event emitter interface for raw string message broadcasting.\n *\n * This is the low-level interface that event broadcasting implementations\n * (WebSocket, Server-Sent Events, etc.) implement. It emits raw string messages\n * without type safety or serialization.\n *\n * @property subscribe - Registers a WebSocket connection to receive events for a key\n * @property unsubscribe - Removes subscription for a key\n * @property emit - Broadcasts a string message to all subscribers of a key\n *\n * @example\n * ```typescript\n * // Implement BaseEventEmitter with WebSocket broadcast\n * const websocketEmitter: BaseEventEmitter = {\n * subscribe: (key, connection) => Effect.sync(() => {\n * connections.set(key, [...(connections.get(key) || []), connection]);\n * }),\n *\n * unsubscribe: (key) => Effect.sync(() => {\n * connections.delete(key);\n * }),\n *\n * emit: (key, event) => Effect.sync(() => {\n * const subs = connections.get(key) || [];\n * subs.forEach(conn => conn.send(event));\n * })\n * };\n * ```\n */\nexport interface BaseEventEmitter {\n readonly subscribe: (\n key: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n readonly unsubscribe: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly emit: (\n key: string,\n event: string,\n ) => Effect.Effect<void, UploadistaError>;\n}\n\n/**\n * Type-safe event emitter interface with automatic serialization.\n *\n * This wraps a BaseEventEmitter and handles event serialization to JSON messages,\n * providing type safety for events and ensuring consistent message format.\n *\n * @template TEvent - The type of events emitted by this emitter\n *\n * @property subscribe - Registers a WebSocket connection to receive typed events\n * @property unsubscribe - Removes subscription\n * @property emit - Serializes and broadcasts a typed event\n *\n * @example\n * ```typescript\n * // Use a typed event emitter\n * const uploadEmitter: EventEmitter<UploadEvent> = new TypedEventEmitter(\n * baseEmitter,\n * (event) => JSON.stringify({ type: 'upload', payload: event })\n * );\n *\n * // Emit type-safe events\n * const program = Effect.gen(function* () {\n * const event: UploadEvent = {\n * uploadId: \"upload123\",\n * type: \"progress\",\n * offset: 1024,\n * size: 2048\n * };\n *\n * // Automatic serialization\n * yield* uploadEmitter.emit(\"upload123\", event);\n * });\n * ```\n */\nexport type EventEmitter<TEvent> = {\n readonly subscribe: (\n key: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n readonly unsubscribe: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly emit: (\n key: string,\n event: TEvent,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Typed wrapper class that adds event serialization to a BaseEventEmitter.\n *\n * This class implements the EventEmitter interface by wrapping a BaseEventEmitter\n * and handling serialization for a specific event type. It converts typed events\n * to JSON message strings before broadcasting.\n *\n * @template TEvent - The type of events to emit\n *\n * @example\n * ```typescript\n * // Create a typed emitter for UploadEvent\n * const uploadEmitter = new TypedEventEmitter<UploadEvent>(\n * baseEmitter,\n * (event) => JSON.stringify({\n * type: \"upload_event\",\n * payload: event,\n * timestamp: new Date().toISOString()\n * })\n * );\n *\n * // Use the emitter\n * const effect = Effect.gen(function* () {\n * // Subscribe a WebSocket connection\n * yield* uploadEmitter.subscribe(\"upload123\", websocket);\n *\n * // Emit an event (automatically serialized)\n * yield* uploadEmitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"completed\",\n * offset: 2048,\n * size: 2048\n * });\n *\n * // Unsubscribe when done\n * yield* uploadEmitter.unsubscribe(\"upload123\");\n * });\n *\n * // Custom message format\n * const customEmitter = new TypedEventEmitter<MyEvent>(\n * baseEmitter,\n * (event) => `EVENT:${event.type}:${JSON.stringify(event.data)}`\n * );\n * ```\n */\nexport class TypedEventEmitter<TEvent> implements EventEmitter<TEvent> {\n constructor(\n private baseEmitter: BaseEventEmitter,\n private eventToMessage: (event: TEvent) => string,\n ) {}\n\n subscribe = (\n key: string,\n connection: WebSocketConnection,\n ): Effect.Effect<void, UploadistaError> =>\n this.baseEmitter.subscribe(key, connection);\n\n unsubscribe = (key: string): Effect.Effect<void, UploadistaError> =>\n this.baseEmitter.unsubscribe(key);\n\n emit = (key: string, event: TEvent): Effect.Effect<void, UploadistaError> => {\n const message = this.eventToMessage(event);\n return this.baseEmitter.emit(key, message);\n };\n}\n\n/**\n * Default event-to-message serialization helper.\n *\n * Creates a standardized JSON message format with type, payload, and timestamp.\n * This is the recommended way to serialize events for WebSocket transmission.\n *\n * @param messageType - The message type identifier (\"upload_event\" or \"flow_event\")\n * @returns An object with an eventToMessage function\n *\n * @example\n * ```typescript\n * // Create emitter with standard serialization\n * const emitter = new TypedEventEmitter<UploadEvent>(\n * baseEmitter,\n * eventToMessageSerializer(\"upload_event\").eventToMessage\n * );\n *\n * // Messages will be formatted as:\n * // {\n * // \"type\": \"upload_event\",\n * // \"payload\": { ...event data... },\n * // \"timestamp\": \"2024-01-15T10:30:00.000Z\"\n * // }\n * ```\n */\nexport const eventToMessageSerializer = (\n messageType: \"upload_event\" | \"flow_event\",\n) => ({\n eventToMessage: <T>(event: T): string =>\n JSON.stringify({\n type: messageType,\n payload: event,\n timestamp: new Date().toISOString(),\n }),\n});\n\n/**\n * Effect-TS context tag for the base untyped event emitter.\n *\n * This is the low-level emitter that broadcasting implementations provide.\n * Most application code should use typed emitters like UploadEventEmitter instead.\n *\n * @example\n * ```typescript\n * // Provide a base emitter implementation\n * const baseEmitterLayer = Layer.succeed(BaseEventEmitterService, websocketEmitter);\n *\n * // Use in an Effect\n * const effect = Effect.gen(function* () {\n * const baseEmitter = yield* BaseEventEmitterService;\n * yield* baseEmitter.emit(\"channel1\", \"raw message\");\n * });\n * ```\n */\nexport class BaseEventEmitterService extends Context.Tag(\"BaseEventEmitter\")<\n BaseEventEmitterService,\n BaseEventEmitter\n>() {}\n\n/**\n * Effect-TS context tag for the UploadEvent typed emitter.\n *\n * This provides type-safe event emission for upload progress and lifecycle events.\n * It's the primary way to broadcast upload events to connected clients.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const emitter = yield* UploadEventEmitter;\n *\n * // Subscribe a client to upload events\n * yield* emitter.subscribe(\"upload123\", websocketConnection);\n *\n * // Emit progress event\n * yield* emitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"progress\",\n * offset: 512000,\n * size: 1024000\n * });\n *\n * // Emit completion event\n * yield* emitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"completed\",\n * offset: 1024000,\n * size: 1024000\n * });\n * });\n * ```\n */\nexport class UploadEventEmitter extends Context.Tag(\"UploadEventEmitter\")<\n UploadEventEmitter,\n EventEmitter<UploadEvent>\n>() {}\n\n/**\n * Effect Layer that creates the UploadEventEmitter from a BaseEventEmitter.\n *\n * This layer automatically wires up JSON serialization for UploadEvent objects\n * with the standard \"upload_event\" message format.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const emitter = yield* UploadEventEmitter;\n * // Use the emitter...\n * }).pipe(\n * Effect.provide(uploadEventEmitter),\n * Effect.provide(baseEmitterLayer)\n * );\n * ```\n */\nexport const uploadEventEmitter = Layer.effect(\n UploadEventEmitter,\n Effect.gen(function* () {\n const baseEmitter = yield* BaseEventEmitterService;\n return new TypedEventEmitter<UploadEvent>(\n baseEmitter,\n eventToMessageSerializer(\"upload_event\").eventToMessage,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the FlowEvent typed emitter.\n *\n * This provides type-safe event emission for flow processing lifecycle events.\n * It's used to broadcast flow execution progress, node completion, and errors.\n *\n * @example\n * ```typescript\n * const flowEffect = Effect.gen(function* () {\n * const emitter = yield* FlowEventEmitter;\n *\n * // Subscribe a client to flow job events\n * yield* emitter.subscribe(\"job123\", websocketConnection);\n *\n * // Emit node start event\n * yield* emitter.emit(\"job123\", {\n * jobId: \"job123\",\n * eventType: \"NodeStart\",\n * flowId: \"flow_resize\",\n * nodeId: \"resize_1\"\n * });\n *\n * // Emit node completion event\n * yield* emitter.emit(\"job123\", {\n * jobId: \"job123\",\n * eventType: \"NodeEnd\",\n * flowId: \"flow_resize\",\n * nodeId: \"resize_1\",\n * result: { width: 800, height: 600 }\n * });\n * });\n * ```\n */\nexport class FlowEventEmitter extends Context.Tag(\"FlowEventEmitter\")<\n FlowEventEmitter,\n EventEmitter<FlowEvent>\n>() {}\n\n/**\n * Effect Layer that creates the FlowEventEmitter from a BaseEventEmitter.\n *\n * This layer automatically wires up JSON serialization for FlowEvent objects\n * with the standard \"flow_event\" message format.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const emitter = yield* FlowEventEmitter;\n * // Use the emitter...\n * }).pipe(\n * Effect.provide(flowEventEmitter),\n * Effect.provide(baseEmitterLayer)\n * );\n * ```\n */\nexport const flowEventEmitter = Layer.effect(\n FlowEventEmitter,\n Effect.gen(function* () {\n const baseEmitter = yield* BaseEventEmitterService;\n return new TypedEventEmitter<FlowEvent>(\n baseEmitter,\n eventToMessageSerializer(\"flow_event\").eventToMessage,\n );\n }),\n);\n","/**\n * Health Check Types for Uploadista SDK.\n *\n * This module provides types for the health monitoring system including:\n * - Liveness probes (`/health`)\n * - Readiness probes (`/ready`)\n * - Component health details (`/health/components`)\n *\n * @module types/health-check\n */\n\n// ============================================================================\n// Health Status Types\n// ============================================================================\n\n/**\n * Health status values for components and overall system health.\n *\n * - `healthy`: All checks passed, system is fully operational\n * - `degraded`: Some non-critical issues detected, but system is functional\n * - `unhealthy`: Critical components unavailable, system cannot serve requests\n */\nexport type HealthStatus = \"healthy\" | \"degraded\" | \"unhealthy\";\n\n// ============================================================================\n// Component Health Types\n// ============================================================================\n\n/**\n * Health status for an individual component (storage, KV store, etc.).\n */\nexport interface ComponentHealth {\n /** Current health status of the component */\n status: HealthStatus;\n /** Latency of the last health check in milliseconds */\n latency?: number;\n /** Human-readable status message */\n message?: string;\n /** ISO 8601 timestamp of the last health check */\n lastCheck?: string;\n}\n\n/**\n * Circuit breaker health summary aggregating all circuit states.\n */\nexport interface CircuitBreakerHealthSummary {\n /** Overall circuit breaker system status */\n status: HealthStatus;\n /** Number of circuits currently in open state */\n openCircuits: number;\n /** Total number of tracked circuits */\n totalCircuits: number;\n /** Detailed stats for each circuit (optional, for debugging) */\n circuits?: Array<{\n nodeType: string;\n state: \"closed\" | \"open\" | \"half-open\";\n failureCount: number;\n timeSinceLastStateChange: number;\n }>;\n}\n\n/**\n * Dead Letter Queue health summary.\n */\nexport interface DlqHealthSummary {\n /** Overall DLQ status */\n status: HealthStatus;\n /** Number of items pending retry */\n pendingItems: number;\n /** Number of items that have exhausted all retries */\n exhaustedItems: number;\n /** ISO 8601 timestamp of the oldest item in the queue */\n oldestItem?: string;\n}\n\n// ============================================================================\n// Response Types\n// ============================================================================\n\n/**\n * Components health map for detailed health responses.\n */\nexport interface HealthComponents {\n /** Storage backend health */\n storage?: ComponentHealth;\n /** KV store health */\n kvStore?: ComponentHealth;\n /** Event broadcaster health */\n eventBroadcaster?: ComponentHealth;\n /** Circuit breaker summary (if enabled) */\n circuitBreaker?: CircuitBreakerHealthSummary;\n /** Dead letter queue summary (if enabled) */\n deadLetterQueue?: DlqHealthSummary;\n}\n\n/**\n * Standard health response structure.\n *\n * Used for all health endpoints with varying levels of detail.\n */\nexport interface HealthResponse {\n /** Overall health status */\n status: HealthStatus;\n /** ISO 8601 timestamp of the response */\n timestamp: string;\n /** Optional version string for deployment identification */\n version?: string;\n /** Server uptime in milliseconds */\n uptime?: number;\n /** Component-level health details (for /health/components) */\n components?: HealthComponents;\n}\n\n// ============================================================================\n// Configuration Types\n// ============================================================================\n\n/**\n * Configuration options for health check behavior.\n */\nexport interface HealthCheckConfig {\n /**\n * Timeout for dependency health checks in milliseconds.\n * @default 5000\n */\n timeout?: number;\n\n /**\n * Whether to check storage backend health.\n * @default true\n */\n checkStorage?: boolean;\n\n /**\n * Whether to check KV store health.\n * @default true\n */\n checkKvStore?: boolean;\n\n /**\n * Whether to check event broadcaster health.\n * @default true\n */\n checkEventBroadcaster?: boolean;\n\n /**\n * Optional version string to include in health responses.\n * Useful for identifying deployed versions.\n */\n version?: string;\n}\n\n/**\n * Default health check configuration values.\n */\nexport const DEFAULT_HEALTH_CHECK_CONFIG: Required<\n Omit<HealthCheckConfig, \"version\">\n> = {\n timeout: 5000,\n checkStorage: true,\n checkKvStore: true,\n checkEventBroadcaster: true,\n};\n\n// ============================================================================\n// Request Types (for Accept header handling)\n// ============================================================================\n\n/**\n * Supported response formats for health endpoints.\n */\nexport type HealthResponseFormat = \"json\" | \"text\";\n\n/**\n * Determines the response format based on Accept header.\n *\n * @param acceptHeader - The Accept header value from the request\n * @returns The response format to use\n */\nexport function getHealthResponseFormat(\n acceptHeader?: string | null,\n): HealthResponseFormat {\n if (acceptHeader?.includes(\"text/plain\")) {\n return \"text\";\n }\n return \"json\";\n}\n\n/**\n * Formats a health response as plain text.\n *\n * @param status - The health status\n * @returns Plain text representation\n */\nexport function formatHealthAsText(status: HealthStatus): string {\n switch (status) {\n case \"healthy\":\n return \"OK\";\n case \"degraded\":\n return \"OK\"; // Degraded still returns OK for liveness\n case \"unhealthy\":\n return \"Service Unavailable\";\n }\n}\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating InputFile objects.\n *\n * This schema defines the structure and validation rules for file upload requests.\n * Use this schema to parse and validate input data when creating new uploads.\n *\n * @see {@link InputFile} for the TypeScript type\n */\nexport const inputFileSchema = z\n .object({\n uploadLengthDeferred: z.boolean().optional(),\n storageId: z.string(),\n /** File size in bytes. Optional when uploadLengthDeferred is true. */\n size: z.number().optional(),\n /** Optional size hint for optimization when size is unknown */\n sizeHint: z.number().optional(),\n type: z.string(),\n fileName: z.string().optional(),\n lastModified: z.number().optional(),\n metadata: z.string().optional(),\n checksum: z.string().optional(),\n checksumAlgorithm: z.string().optional(),\n flow: z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional(),\n })\n .refine(\n (data) => {\n // Size is required unless uploadLengthDeferred is true\n if (data.uploadLengthDeferred === true) {\n return true; // Size can be omitted\n }\n return data.size !== undefined && data.size >= 0;\n },\n {\n message: \"size is required when uploadLengthDeferred is not true\",\n path: [\"size\"],\n },\n );\n\n/**\n * Represents the input data for creating a new file upload.\n *\n * This type defines the information required to initiate an upload.\n * It's used by clients to provide upload metadata before sending file data.\n *\n * @property storageId - Target storage backend identifier (e.g., \"s3-production\", \"azure-blob\")\n * @property size - File size in bytes. Optional when uploadLengthDeferred is true.\n * @property sizeHint - Optional size hint for optimization when exact size is unknown\n * @property type - MIME type of the file (e.g., \"image/jpeg\", \"application/pdf\")\n * @property uploadLengthDeferred - If true, file size is not known upfront (streaming upload)\n * @property fileName - Original filename from the client\n * @property lastModified - File's last modified timestamp in milliseconds since epoch\n * @property metadata - Base64-encoded metadata string (as per tus protocol)\n * @property checksum - Expected file checksum for validation\n * @property checksumAlgorithm - Algorithm used for checksum (e.g., \"md5\", \"sha256\")\n * @property flow - Optional flow processing configuration\n * @property flow.flowId - ID of the flow to execute on this file\n * @property flow.nodeId - Starting node ID in the flow\n * @property flow.jobId - Flow job execution ID\n *\n * @example\n * ```typescript\n * // Basic file upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * lastModified: Date.now()\n * };\n *\n * // Upload with metadata (base64 encoded as per tus protocol)\n * const metadata = btoa(JSON.stringify({\n * userId: \"user_123\",\n * albumId: \"album_456\"\n * }));\n * const inputWithMetadata: InputFile = {\n * storageId: \"s3-production\",\n * size: 2048000,\n * type: \"image/png\",\n * fileName: \"screenshot.png\",\n * metadata\n * };\n *\n * // Upload with checksum validation\n * const inputWithChecksum: InputFile = {\n * storageId: \"s3-production\",\n * size: 512000,\n * type: \"application/pdf\",\n * fileName: \"document.pdf\",\n * checksum: \"5d41402abc4b2a76b9719d911017c592\",\n * checksumAlgorithm: \"md5\"\n * };\n *\n * // Upload that triggers a flow\n * const inputWithFlow: InputFile = {\n * storageId: \"s3-temp\",\n * size: 4096000,\n * type: \"image/jpeg\",\n * fileName: \"large-image.jpg\",\n * flow: {\n * flowId: \"resize-and-optimize\",\n * nodeId: \"input_1\",\n * jobId: \"job_789\"\n * }\n * };\n *\n * // Streaming upload (size unknown) - size can be omitted\n * const streamingInput: InputFile = {\n * storageId: \"s3-production\",\n * type: \"video/mp4\",\n * uploadLengthDeferred: true,\n * fileName: \"live-stream.mp4\"\n * };\n *\n * // Streaming upload with size hint for optimization\n * const streamingWithHint: InputFile = {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * sizeHint: 5_000_000, // ~5MB expected\n * fileName: \"optimized-image.webp\"\n * };\n * ```\n */\nexport type InputFile = z.infer<typeof inputFileSchema>;\n","import { Context, Effect, Layer } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\n\nexport type MiddlewareContext = {\n request: Request;\n uploadId?: string;\n metadata?: Record<string, string>;\n};\n\nexport type MiddlewareNext = () => Promise<Response>;\n\nexport type Middleware = (\n context: MiddlewareContext,\n next: MiddlewareNext,\n) => Promise<Response>;\n\n// Effect-based Middleware service\nexport class MiddlewareService extends Context.Tag(\"MiddlewareService\")<\n MiddlewareService,\n {\n readonly execute: (\n middlewares: Middleware[],\n context: MiddlewareContext,\n handler: MiddlewareNext,\n ) => Effect.Effect<Response, UploadistaError>;\n }\n>() {}\n\nexport const MiddlewareServiceLive = Layer.succeed(\n MiddlewareService,\n MiddlewareService.of({\n execute: (middlewares, context, handler) =>\n Effect.gen(function* () {\n if (middlewares.length === 0) {\n return yield* Effect.tryPromise({\n try: () => handler(),\n catch: (error) => error as UploadistaError,\n });\n }\n\n const chain = middlewares.reduceRight(\n (next: MiddlewareNext, middleware: Middleware) => {\n return () => middleware(context, next);\n },\n handler,\n );\n\n return yield* Effect.tryPromise({\n try: () => chain(),\n catch: (error) => error as UploadistaError,\n });\n }),\n }),\n);\n","import { z } from \"zod\";\nimport { uploadFileSchema } from \"./upload-file\";\n\nexport enum UploadEventType {\n UPLOAD_STARTED = \"upload-started\",\n UPLOAD_PROGRESS = \"upload-progress\",\n UPLOAD_COMPLETE = \"upload-complete\",\n UPLOAD_FAILED = \"upload-failed\",\n UPLOAD_VALIDATION_SUCCESS = \"upload-validation-success\",\n UPLOAD_VALIDATION_FAILED = \"upload-validation-failed\",\n UPLOAD_VALIDATION_WARNING = \"upload-validation-warning\",\n}\n\nconst flowContextSchema = z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional();\n\nexport const uploadEventSchema = z.union([\n z.object({\n type: z.union([\n z.literal(UploadEventType.UPLOAD_STARTED),\n z.literal(UploadEventType.UPLOAD_COMPLETE),\n ]),\n data: uploadFileSchema,\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_PROGRESS),\n data: z.object({\n id: z.string(),\n progress: z.number(),\n total: z.number(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_FAILED),\n data: z.object({\n id: z.string(),\n error: z.string(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_SUCCESS),\n data: z.object({\n id: z.string(),\n validationType: z.enum([\"checksum\", \"mimetype\"]),\n algorithm: z.string().optional(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_FAILED),\n data: z.object({\n id: z.string(),\n reason: z.string(),\n expected: z.string(),\n actual: z.string(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_WARNING),\n data: z.object({\n id: z.string(),\n message: z.string(),\n }),\n flow: flowContextSchema,\n }),\n]);\n\nexport type UploadEvent = z.infer<typeof uploadEventSchema>;\n","import z from \"zod\";\nimport { uploadEventSchema } from \"./upload-event\";\n\n/**\n * Platform-agnostic WebSocket connection interface\n */\nexport interface WebSocketConnection {\n send(data: string): void;\n close(code?: number, reason?: string): void;\n readonly readyState: number;\n readonly id: string;\n}\n\n/**\n * WebSocket message that can be sent/received\n */\n\nexport const webSocketMessageSchema = z.union([\n z.object({\n type: z.literal(\"upload_event\"),\n payload: uploadEventSchema,\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"flow_event\"),\n payload: z.any(), // FlowEvent doesn't have a zod schema, using z.any() for now\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"subscribed\"),\n payload: z.object({ eventKey: z.string() }),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"error\"),\n message: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"pong\"),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"ping\"),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"connection\"),\n message: z.string().optional(),\n uploadId: z.string().optional(),\n timestamp: z.string().optional(),\n }),\n]);\n\nexport type WebSocketMessage<TEvent = unknown> =\n | z.infer<typeof webSocketMessageSchema>\n | {\n type: \"upload_event\";\n payload: TEvent;\n timestamp?: string;\n }\n | {\n type: \"flow_event\";\n payload: TEvent;\n timestamp?: string;\n };\n"],"mappings":"uIA+IA,IAAa,EAAb,KAA2D,CACzD,YACE,EACA,EACA,EACA,EACA,CAJQ,KAAA,UAAA,EACA,KAAA,UAAA,EACA,KAAA,UAAA,EACA,KAAA,YAAA,EAGV,IAAO,GACL,KAAK,UAAU,IAAI,KAAK,UAAY,EAAI,CAAC,KACvC,EAAO,QAAS,GAAU,CACxB,GAAI,IAAU,KACZ,OAAO,EAAO,KACZ,EAAgB,SAAS,iBAAkB,CACzC,MAAO,QAAQ,EAAI,aACpB,CAAC,CACH,CAEH,GAAI,CACF,OAAO,EAAO,QAAQ,KAAK,YAAY,EAAM,CAAC,OACvC,EAAO,CACd,OAAO,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,wCAAwC,EAAI,GAClD,MAAO,EACR,CAAC,CACH,GAEH,CACH,CAEH,KAAO,EAAa,IAAuD,CACzE,GAAI,CACF,IAAM,EAAa,KAAK,UAAU,EAAM,CACxC,OAAO,KAAK,UAAU,IAAI,KAAK,UAAY,EAAK,EAAW,OACpD,EAAO,CACd,OAAO,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,sCAAsC,EAAI,GAChD,MAAO,EACR,CAAC,CACH,GAIL,OAAU,GACR,KAAK,UAAU,OAAO,KAAK,UAAY,EAAI,CAE7C,SACM,KAAK,UAAU,KAEV,KAAK,UAAU,KAAK,KAAK,UAAU,CAAC,KACzC,EAAO,IAAK,GACV,EAAK,IAAK,GACR,EAAI,WAAW,KAAK,UAAU,CAC1B,EAAI,MAAM,KAAK,UAAU,OAAO,CAChC,EACL,CACF,CACF,CAEI,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,6CACP,CAAC,CACH,EAoBL,MAAa,EAAiB,CAC5B,UAAe,GAAoB,KAAK,UAAU,EAAK,CACvD,YAAiB,GAAmB,KAAK,MAAM,EAAI,CACpD,CAoBD,IAAa,EAAb,cAAwC,EAAQ,IAAI,cAAc,EAG/D,AAAC,GA2BS,EAAb,cAAuC,EAAQ,IAAI,oBAAoB,EAGpE,AAAC,GAmBJ,MAAa,EAAoB,EAAM,OACrC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,0BACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CA8BD,IAAa,EAAb,cAAoC,EAAQ,IAAI,iBAAiB,EAG9D,AAAC,GAmBJ,MAAa,EAAiB,EAAM,OAClC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,uBACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CAsCD,IAAa,EAAb,cAA4C,EAAQ,IAAI,yBAAyB,EAG9E,AAAC,GAmBJ,MAAa,EAAyB,EAAM,OAC1C,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,kBACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CCtQD,IAAa,EAAb,cAAgD,EAAQ,IACtD,6BACD,EAAmD,AAAC,GAYrD,SAAgB,EAAiC,EAKrB,CAC1B,IAAM,EAAM,KAAK,KAAK,CACtB,MAAO,CACL,MAAO,SACP,aAAc,EACd,gBAAiB,EACjB,kBAAmB,EACnB,YAAa,EACb,SACD,CC/MH,MAAa,EAAqB,EAAE,OAAO,CACzC,QAAS,EAAE,QAAQ,CACnB,OAAQ,EAAE,QAAQ,CAClB,WAAY,EAAE,QAAQ,CACvB,CAAC,CAEW,EAAmB,EAAE,OAAO,CACvC,GAAI,EAAE,QAAQ,CACd,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,OAAQ,EAAE,QAAQ,CAClB,SAAU,EACP,OAAO,EAAE,QAAQ,CAAE,EAAE,MAAM,CAAC,EAAE,QAAQ,CAAE,EAAE,QAAQ,CAAE,EAAE,SAAS,CAAC,CAAC,CAAC,CAClE,UAAU,CACb,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,IAAK,EAAE,QAAQ,CAAC,UAAU,CAC1B,eAAgB,EAAE,SAAS,CAAC,UAAU,CACtC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,kBAAmB,EAAE,QAAQ,CAAC,UAAU,CACxC,QAAS,EAAE,OAAO,CAChB,GAAI,EAAE,QAAQ,CACd,KAAM,EAAE,QAAQ,CAChB,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAC7B,MAAO,EACJ,MACC,EAAE,OAAO,CACP,WAAY,EAAE,QAAQ,CACtB,KAAM,EAAE,QAAQ,CAChB,KAAM,EAAE,QAAQ,CACjB,CAAC,CACH,CACA,UAAU,CACd,CAAC,CACF,KAAM,EACH,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CACb,aAAc,EAAmB,UAAU,CAC5C,CAAC,CCHWI,EAAsD,CACjE,kBAAmB,QACnB,UAAW,MACZ,CAMY,EAA8B,EAAI,KAAO,KAsRtD,IAAa,EAAb,cAAyC,EAAQ,IAAI,sBAAsB,EAGxE,AAAC,GAkBS,EAAb,cAAiD,EAAQ,IACvD,8BACD,EAAsD,AAAC,GAsC3C,EAAb,cAA0C,EAAQ,IAAI,uBAAuB,EAG1E,AAAC,GA8DJ,MAAa,EACX,GAEO,WAAY,GAAU,UAAW,EAwC7B,EAAuB,KAClC,IACyE,CAEzE,GAAI,EAAM,QAAQ,EAAO,CACvB,OAAO,EAQT,GAAI,EAAO,SAAS,EAAO,CACzB,OAAO,EAAM,OACX,EACA,EAAO,IAAI,WAAa,CAMtB,IAAM,EAAgB,MALJ,EAMlB,MAAO,CACL,aAAe,GAAuB,EAAO,QAAQ,EAAc,CACnE,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,EACD,CACH,CAIH,GAAI,EAAY,EAAO,CAAE,CACvB,IAAM,EAAQ,EACd,OAAO,EAAM,QAAQ,EAAsB,CACzC,aAAe,GAAuB,EAAO,QAAQ,EAAM,CAC3D,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,CAAC,CAIJ,IAAM,EAAc,EAQd,EAAa,EAAY,SAAW,OAAO,KAAK,EAAY,OAAO,CAAC,GAGpEC,EAAwD,EAAE,CAChE,IAAK,GAAM,CAAC,EAAK,KAAkB,OAAO,QAAQ,EAAY,OAAO,CAC/D,SAAU,GAAiB,EAAE,WAAY,GAC3C,EAAe,GAAO,MAAM,EAAO,WACjC,EACD,CAED,EAAe,GAAO,EAI1B,OAAO,EAAM,QAAQ,EAAsB,CACzC,aAAe,GAAsB,CACnC,IAAM,EACJ,EAAe,KACd,EAAa,EAAe,GAAc,IAAA,IAI7C,OAHI,EACK,EAAO,QAAQ,EAAM,CAEvB,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,EAEhE,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,CAAC,ECniBJ,IAAa,EAAb,cAA6C,EAAQ,IAAI,mBAAmB,EAGzE,AAAC,GCqGS,EAAb,KAAuE,CACrE,YACE,EACA,EACA,CAFQ,KAAA,YAAA,EACA,KAAA,eAAA,EAGV,WACE,EACA,IAEA,KAAK,YAAY,UAAU,EAAK,EAAW,CAE7C,YAAe,GACb,KAAK,YAAY,YAAY,EAAI,CAEnC,MAAQ,EAAa,IAAwD,CAC3E,IAAM,EAAU,KAAK,eAAe,EAAM,CAC1C,OAAO,KAAK,YAAY,KAAK,EAAK,EAAQ,GA6B9C,MAAa,EACX,IACI,CACJ,eAAoB,GAClB,KAAK,UAAU,CACb,KAAM,EACN,QAAS,EACT,UAAW,IAAI,MAAM,CAAC,aAAa,CACpC,CAAC,CACL,EAoBD,IAAa,EAAb,cAA6C,EAAQ,IAAI,mBAAmB,EAGzE,AAAC,GAkCS,EAAb,cAAwC,EAAQ,IAAI,qBAAqB,EAGtE,AAAC,GAmBJ,MAAa,EAAqB,EAAM,OACtC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADS,MAAO,EAGzB,EAAyB,eAAe,CAAC,eAC1C,EACD,CACH,CAmCD,IAAa,EAAb,cAAsC,EAAQ,IAAI,mBAAmB,EAGlE,AAAC,GAmBJ,MAAa,EAAmB,EAAM,OACpC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADS,MAAO,EAGzB,EAAyB,aAAa,CAAC,eACxC,EACD,CACH,CCjMYG,EAET,CACF,QAAS,IACT,aAAc,GACd,aAAc,GACd,sBAAuB,GACxB,CAiBD,SAAgB,EACd,EACsB,CAItB,OAHI,GAAc,SAAS,aAAa,CAC/B,OAEF,OAST,SAAgB,EAAmB,EAA8B,CAC/D,OAAQ,EAAR,CACE,IAAK,UACH,MAAO,KACT,IAAK,WACH,MAAO,KACT,IAAK,YACH,MAAO,uBC/Lb,MAAa,EAAkB,EAC5B,OAAO,CACN,qBAAsB,EAAE,SAAS,CAAC,UAAU,CAC5C,UAAW,EAAE,QAAQ,CAErB,KAAM,EAAE,QAAQ,CAAC,UAAU,CAE3B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,KAAM,EAAE,QAAQ,CAChB,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,kBAAmB,EAAE,QAAQ,CAAC,UAAU,CACxC,KAAM,EACH,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CACd,CAAC,CACD,OACE,GAEK,EAAK,uBAAyB,GACzB,GAEF,EAAK,OAAS,IAAA,IAAa,EAAK,MAAQ,EAEjD,CACE,QAAS,yDACT,KAAM,CAAC,OAAO,CACf,CACF,CC3BH,IAAa,EAAb,cAAuC,EAAQ,IAAI,oBAAoB,EASpE,AAAC,GAEJ,MAAa,EAAwB,EAAM,QACzC,EACA,EAAkB,GAAG,CACnB,SAAU,EAAa,EAAS,IAC9B,EAAO,IAAI,WAAa,CACtB,GAAI,EAAY,SAAW,EACzB,OAAO,MAAO,EAAO,WAAW,CAC9B,QAAW,GAAS,CACpB,MAAQ,GAAU,EACnB,CAAC,CAGJ,IAAM,EAAQ,EAAY,aACvB,EAAsB,QACR,EAAW,EAAS,EAAK,CAExC,EACD,CAED,OAAO,MAAO,EAAO,WAAW,CAC9B,QAAW,GAAO,CAClB,MAAQ,GAAU,EACnB,CAAC,EACF,CACL,CAAC,CACH,CClDD,IAAY,EAAA,SAAA,EAAL,OACL,GAAA,eAAA,iBACA,EAAA,gBAAA,kBACA,EAAA,gBAAA,kBACA,EAAA,cAAA,gBACA,EAAA,0BAAA,4BACA,EAAA,yBAAA,2BACA,EAAA,0BAAA,mCAGF,MAAM,EAAoB,EACvB,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CAEA,EAAoB,EAAE,MAAM,CACvC,EAAE,OAAO,CACP,KAAM,EAAE,MAAM,CACZ,EAAE,QAAQ,EAAgB,eAAe,CACzC,EAAE,QAAQ,EAAgB,gBAAgB,CAC3C,CAAC,CACF,KAAM,EACN,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,gBAAgB,CAChD,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,SAAU,EAAE,QAAQ,CACpB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,cAAc,CAC9C,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,MAAO,EAAE,QAAQ,CAClB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,0BAA0B,CAC1D,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,eAAgB,EAAE,KAAK,CAAC,WAAY,WAAW,CAAC,CAChD,UAAW,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,yBAAyB,CACzD,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,OAAQ,EAAE,QAAQ,CAClB,SAAU,EAAE,QAAQ,CACpB,OAAQ,EAAE,QAAQ,CACnB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,0BAA0B,CAC1D,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,QAAS,EAAE,QAAQ,CACpB,CAAC,CACF,KAAM,EACP,CAAC,CACH,CAAC,CCzDW,EAAyBC,EAAE,MAAM,CAC5CA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,eAAe,CAC/B,QAAS,EACT,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,KAAK,CAChB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,OAAO,CAAE,SAAUA,EAAE,QAAQ,CAAE,CAAC,CAC3C,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,QAAQ,CACxB,QAASA,EAAE,QAAQ,CAAC,UAAU,CAC/B,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,OAAO,CACvB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,OAAO,CACvB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,QAAQ,CAAC,UAAU,CAC9B,SAAUA,EAAE,QAAQ,CAAC,UAAU,CAC/B,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACH,CAAC"}
|
package/dist/upload-C-C7hn1-.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
import{n as e}from"./uploadista-error-CkSxSyNo.mjs";import{N as t,S as n,n as r,p as i}from"./types-DKGQJIEr.mjs";import{r as a,t as o}from"./checksum-COoD-F1l.mjs";import{t as s}from"./stream-limiter-B9nsn2gb.mjs";import{Context as c,Effect as l,Layer as u,Metric as d,MetricBoundaries as f,Option as p,Ref as m,Stream as h,Tracer as g}from"effect";function _(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function v(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const y=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(_(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(_(e,[255,216,255]))return`image/jpeg`;if(v(e,`GIF87a`)||v(e,`GIF89a`))return`image/gif`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&_(e,[0,0,0],0)&&v(e,`ftyp`,4)&&(v(e,`avif`,8)||v(e,`avis`,8)))return`image/avif`;if(e.length>=12&&v(e,`ftyp`,4)&&(v(e,`heic`,8)||v(e,`heif`,8)||v(e,`mif1`,8)))return`image/heic`;if(_(e,[66,77]))return`image/bmp`;if(_(e,[73,73,42,0])||_(e,[77,77,0,42]))return`image/tiff`;if(_(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&v(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(_(e,[26,69,223,163]))return`video/webm`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(v(e,`moov`,4)||v(e,`mdat`,4)||v(e,`free`,4)))return`video/quicktime`;if(_(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(_(e,[255,251])||_(e,[255,243])||_(e,[255,242])||v(e,`ID3`))return`audio/mpeg`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`WAVE`,8))return`audio/wav`;if(v(e,`fLaC`))return`audio/flac`;if(v(e,`OggS`))return`audio/ogg`;if(e.length>=12&&v(e,`ftyp`,4)&&v(e,`M4A`,8))return`audio/mp4`;if(v(e,`%PDF`))return`application/pdf`;if(_(e,[80,75,3,4])||_(e,[80,75,5,6])||_(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(_(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(_(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(_(e,[31,139]))return`application/gzip`;if(e.length>=262&&v(e,`ustar`,257))return`application/x-tar`;if(v(e,`wOFF`))return`font/woff`;if(v(e,`wOF2`))return`font/woff2`;if(_(e,[0,1,0,0,0]))return`font/ttf`;if(v(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function b(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const x=l.gen(function*(){let e=yield*l.currentSpan.pipe(l.option);return p.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),S=(e,t,{dataStoreService:n,kvStore:i,eventEmitter:a,generateId:o})=>l.gen(function*(){let s=yield*x,c=new Date().toISOString();return yield*l.gen(function*(){let l=yield*n.getDataStore(e.storageId,t),u=yield*o.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=e,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:u,size:d,metadata:v,offset:0,creationDate:c,storage:{id:e.storageId,type:f,path:``,bucket:l.bucket},flow:g,traceContext:s},b=yield*l.create(y);return yield*i.set(u,b),yield*a.emit(u,{type:r.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(l.withSpan(`upload-create`,{attributes:{"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId,"upload.mime_type":e.type,"upload.has_flow":e.flow?`true`:`false`}}))}).pipe(l.withSpan(`upload`,{attributes:{"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId,"upload.mime_type":e.type,"upload.has_flow":e.flow?`true`:`false`}}),l.tap(e=>l.gen(function*(){if(yield*d.increment(d.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=d.histogram(`upload_file_size_bytes`,f.exponential({start:1024,factor:2,count:25}));yield*d.update(t,e.size)}let t=d.gauge(`active_uploads`);yield*d.increment(t)})),l.tap(t=>l.logInfo(`Upload created`).pipe(l.annotateLogs({"upload.id":t.id,"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId}))),l.tapError(t=>l.gen(function*(){yield*l.logError(`Upload creation failed`).pipe(l.annotateLogs({"upload.file_name":e.fileName??`unknown`,"upload.storage_id":e.storageId,error:String(t)})),yield*d.increment(d.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function C(t){return h.fromReadableStream(()=>t,t=>new e({code:`UNKNOWN_ERROR`,status:500,body:String(t)}))}function w({data:t,upload:n,dataStore:i,maxFileSize:a,controller:o,eventEmitter:c,uploadProgressInterval:u=200}){return l.gen(function*(){let d=C(t);if(o.signal.aborted)return yield*l.fail(e.fromCode(`ABORTED`));let f=new AbortController,{signal:p}=f,h=()=>{f.abort()};return o.signal.addEventListener(`abort`,h,{once:!0}),yield*l.acquireUseRelease(l.sync(()=>({signal:p,onAbort:h})),({signal:t})=>l.gen(function*(){let e=yield*m.make(0),t=s.limit({maxSize:a})(d);return yield*i.write({stream:t,file_id:n.id,offset:n.offset},{onProgress:t=>{let i=Date.now();m.get(e).pipe(l.flatMap(a=>i-a>=u?l.gen(function*(){yield*m.set(e,i),yield*c.emit(n.id,{type:r.UPLOAD_PROGRESS,data:{id:n.id,progress:t,total:n.size??0},flow:n.flow})}):l.void),l.runPromise).catch(()=>{})}})}).pipe(l.catchAll(t=>t instanceof Error&&t.name===`AbortError`?l.fail(e.fromCode(`ABORTED`)):t instanceof e?l.fail(t):l.fail(e.fromCode(`FILE_WRITE_ERROR`,{cause:t})))),({onAbort:e})=>l.sync(()=>{o.signal.removeEventListener(`abort`,e)}))}).pipe(l.withSpan(`upload-write-to-store`,{attributes:{"upload.id":n.id,"upload.offset":n.offset.toString(),"upload.max_file_size":a.toString(),"upload.file_size":n.size?.toString()??`0`}}),l.tap(e=>l.logDebug(`Data written to store`).pipe(l.annotateLogs({"upload.id":n.id,"write.offset":e.toString(),"write.bytes_written":(e-n.offset).toString()}))),l.tapError(t=>l.logError(`Failed to write to store`).pipe(l.annotateLogs({"upload.id":n.id,"upload.offset":n.offset.toString(),error:t instanceof e?t.code:String(t)}))))}function T(e){return g.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const E=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return l.void.pipe(l.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},D=(e,t,n,{dataStoreService:i,kvStore:a,eventEmitter:o})=>l.gen(function*(){let s=yield*a.get(e),c=s.traceContext?T(s.traceContext):void 0;return yield*l.gen(function*(){let c=yield*i.getDataStore(s.storage.id,t);return s.offset=yield*w({dataStore:c,data:n,upload:s,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:o}),yield*a.set(e,s),yield*o.emit(s.id,{type:r.UPLOAD_PROGRESS,data:{id:s.id,progress:s.offset,total:s.size??0},flow:s.flow}),s.size&&s.offset===s.size&&(yield*O({file:s,dataStore:c,eventEmitter:o}),s.traceContext&&(yield*E(s,T(s.traceContext)))),s}).pipe(l.withSpan(`upload-chunk`,{attributes:{"upload.id":e,"chunk.upload_id":e,"upload.has_trace_context":s.traceContext?`true`:`false`},parent:c}))}).pipe(l.tap(e=>l.gen(function*(){yield*d.increment(d.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=d.histogram(`chunk_size_bytes`,f.linear({start:262144,width:262144,count:20}));if(yield*d.update(n,t),e.size&&e.size>0){let e=t,n=d.gauge(`upload_throughput_bytes_per_second`);yield*d.set(n,e)}})),l.tap(e=>l.logDebug(`Chunk uploaded`).pipe(l.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),l.tapError(t=>l.logError(`Chunk upload failed`).pipe(l.annotateLogs({"upload.id":e,error:String(t)})))),O=({file:t,dataStore:n,eventEmitter:i})=>l.gen(function*(){let a=n.getCapabilities();if(a.maxValidationSize&&t.size&&t.size>a.maxValidationSize){yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_WARNING,data:{id:t.id,message:`File size (${t.size} bytes) exceeds max validation size (${a.maxValidationSize} bytes). Validation skipped.`},flow:t.flow});return}let s=yield*n.read(t.id);if(t.checksum&&t.checksumAlgorithm){let a=yield*o(s,t.checksumAlgorithm);if(a!==t.checksum)return yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_FAILED,data:{id:t.id,reason:`checksum_mismatch`,expected:t.checksum,actual:a},flow:t.flow}),yield*n.remove(t.id),yield*e.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${t.checksum}, Got: ${a}`,details:{uploadId:t.id,expected:t.checksum,actual:a,algorithm:t.checksumAlgorithm}}).toEffect();yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_SUCCESS,data:{id:t.id,validationType:`checksum`,algorithm:t.checksumAlgorithm},flow:t.flow})}if(a.requiresMimeTypeValidation){let a=y(s),o=t.metadata?.type;if(o&&!b(o,a))return yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_FAILED,data:{id:t.id,reason:`mimetype_mismatch`,expected:o,actual:a},flow:t.flow}),yield*n.remove(t.id),yield*e.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${o}, Detected: ${a}`,details:{uploadId:t.id,expected:o,actual:a}}).toEffect();yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_SUCCESS,data:{id:t.id,validationType:`mimetype`},flow:t.flow})}}).pipe(l.withSpan(`validate-upload`,{attributes:{"upload.id":t.id,"validation.checksum_provided":t.checksum?`true`:`false`,"validation.mime_required":n.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),k=t=>l.tryPromise({try:async()=>await fetch(t),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})}).pipe(l.withSpan(`upload-fetch-url`,{attributes:{"upload.url":t,"upload.operation":`fetch`}}),l.tap(e=>l.gen(function*(){yield*d.increment(d.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*d.increment(d.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),l.tap(e=>l.logInfo(`URL fetch completed`).pipe(l.annotateLogs({"upload.url":t,"response.status":e.status.toString(),"response.ok":e.ok.toString(),"response.content_length":e.headers.get(`content-length`)??`unknown`}))),l.tapError(e=>l.gen(function*(){yield*d.increment(d.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*l.logError(`URL fetch failed`).pipe(l.annotateLogs({"upload.url":t,error:String(e)}))}))),A=t=>l.tryPromise({try:async()=>await t.arrayBuffer(),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})}).pipe(l.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),l.tap(e=>l.logDebug(`Response converted to array buffer`).pipe(l.annotateLogs({"buffer.size":e.byteLength.toString()}))),l.tapError(e=>l.logError(`Failed to convert response to array buffer`).pipe(l.annotateLogs({error:String(e)}))));var j=class extends c.Tag(`UploadEngine`)(){};function M(){return l.gen(function*(){let e=yield*t,o=yield*i,s=yield*a,c=yield*n;return{upload:(t,n,r)=>l.gen(function*(){return yield*D((yield*S(t,n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})).id,n,r,{dataStoreService:c,kvStore:e,eventEmitter:o})}),uploadFromUrl:(t,n,r)=>l.gen(function*(){let i=yield*A(yield*k(r)),a=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*D((yield*S({...t,size:i.byteLength},n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})).id,n,a,{dataStoreService:c,kvStore:e,eventEmitter:o})}),createUpload:(t,n)=>l.gen(function*(){return yield*S(t,n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})}),uploadChunk:(t,n,r)=>l.gen(function*(){return yield*D(t,n,r,{dataStoreService:c,kvStore:e,eventEmitter:o})}),getUpload:t=>l.gen(function*(){return yield*e.get(t)}),read:(t,n)=>l.gen(function*(){let r=yield*e.get(t);return yield*(yield*c.getDataStore(r.storage.id,n)).read(t)}),readStream:(t,n,r)=>l.gen(function*(){let i=yield*e.get(t),a=yield*c.getDataStore(i.storage.id,n);if(a.getCapabilities().supportsStreamingRead&&a.readStream)return yield*l.logDebug(`Using streaming read for file ${t}`),yield*a.readStream(t,r);yield*l.logDebug(`Falling back to buffered read for file ${t} (streaming not supported)`);let o=yield*a.read(t);return h.succeed(o)}),uploadStream:(t,n,i)=>l.gen(function*(){let a=yield*c.getDataStore(t.storageId,n),u=a.getCapabilities(),d=yield*s.generateId();if(u.supportsStreamingWrite&&a.writeStream){yield*l.logDebug(`Using streaming write for file ${d}`);let n=typeof t.metadata==`string`?JSON.parse(t.metadata):t.metadata||{},s=Object.fromEntries(Object.entries(n).map(([e,t])=>[e,String(t)])),c={id:d,offset:0,size:t.size??0,storage:{id:t.storageId,type:a.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:n,creationDate:new Date().toISOString()};yield*e.set(d,c),yield*o.emit(d,{type:r.UPLOAD_STARTED,data:c});let u=yield*a.writeStream(d,{stream:i,contentType:t.type,sizeHint:t.sizeHint,metadata:s}),f={...c,size:u.size,offset:u.size,storage:{...c.storage,path:u.path},...u.url&&{url:u.url}};return yield*e.set(d,f),yield*o.emit(d,{type:r.UPLOAD_COMPLETE,data:f}),f}yield*l.logWarning(`Falling back to buffered upload for file ${d} (streaming write not supported)`);let f=[];yield*h.runForEach(i,e=>l.sync(()=>{f.push(e)}));let p=f.reduce((e,t)=>e+t.length,0),m=new Uint8Array(p),g=0;for(let e of f)m.set(e,g),g+=e.length;let _=new ReadableStream({start(e){e.enqueue(m),e.close()}});return yield*S({...t,size:p},n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:{generateId:()=>l.succeed(d)}}),yield*D(d,n,_,{dataStoreService:c,kvStore:e,eventEmitter:o})}),delete:(t,n)=>l.gen(function*(){let r=yield*e.get(t);yield*(yield*c.getDataStore(r.storage.id,n)).remove(t),yield*e.delete(t)}),getCapabilities:(e,t)=>l.gen(function*(){return(yield*c.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>l.gen(function*(){yield*o.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>l.gen(function*(){yield*o.unsubscribe(e)})}})}const N=u.effect(j,M());var P=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};export{A as a,y as c,N as i,j as n,k as o,M as r,b as s,P as t};
|
|
2
|
-
//# sourceMappingURL=upload-C-C7hn1-.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"upload-C-C7hn1-.mjs","names":["captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n>","parsedMetadata: Record<string, string>","metadataObject: Record<string, string>","file: UploadFile","onAbort","initialUpload: UploadFile","completedUpload: UploadFile","chunks: Uint8Array[]","capabilities: DataStoreCapabilities","validateUploadStrategy: (strategy: UploadStrategy) => boolean","reasoning: string[]","warnings: string[]","strategy: UploadStrategy","errors: string[]"],"sources":["../src/upload/mime.ts","../src/upload/create-upload.ts","../src/upload/convert-to-stream.ts","../src/upload/write-to-store.ts","../src/upload/upload-chunk.ts","../src/upload/upload-url.ts","../src/upload/upload-engine.ts","../src/upload/upload-strategy-negotiator.ts"],"sourcesContent":["/**\n * Helper to check if buffer matches a byte pattern at given offset\n */\nfunction checkBytes(\n buffer: Uint8Array,\n pattern: number[],\n offset = 0,\n): boolean {\n if (buffer.length < offset + pattern.length) return false;\n return pattern.every((byte, i) => buffer[offset + i] === byte);\n}\n\n/**\n * Helper to check if buffer matches a string pattern at given offset\n */\nfunction checkString(buffer: Uint8Array, str: string, offset = 0): boolean {\n if (buffer.length < offset + str.length) return false;\n for (let i = 0; i < str.length; i++) {\n if (buffer[offset + i] !== str.charCodeAt(i)) return false;\n }\n return true;\n}\n\n/**\n * Detect MIME type from buffer using magic bytes (file signatures).\n * Supports a wide range of common file types including images, videos, audio, documents, and archives.\n *\n * @param buffer - File content as Uint8Array\n * @param filename - Optional filename for extension-based fallback\n * @returns Detected MIME type or \"application/octet-stream\" if unknown\n */\nexport const detectMimeType = (\n buffer: Uint8Array,\n filename?: string,\n): string => {\n if (buffer.length === 0) {\n return \"application/octet-stream\";\n }\n\n // ===== IMAGES =====\n\n // PNG: 89 50 4E 47 0D 0A 1A 0A\n if (checkBytes(buffer, [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])) {\n return \"image/png\";\n }\n\n // JPEG: FF D8 FF\n if (checkBytes(buffer, [0xff, 0xd8, 0xff])) {\n return \"image/jpeg\";\n }\n\n // GIF87a or GIF89a\n if (checkString(buffer, \"GIF87a\") || checkString(buffer, \"GIF89a\")) {\n return \"image/gif\";\n }\n\n // WebP: RIFF....WEBP\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WEBP\", 8)\n ) {\n return \"image/webp\";\n }\n\n // AVIF: ....ftypavif or ....ftypavis\n if (\n buffer.length >= 12 &&\n checkBytes(buffer, [0x00, 0x00, 0x00], 0) &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"avif\", 8) || checkString(buffer, \"avis\", 8))\n ) {\n return \"image/avif\";\n }\n\n // HEIC/HEIF: ....ftypheic or ....ftypheif or ....ftypmif1\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"heic\", 8) ||\n checkString(buffer, \"heif\", 8) ||\n checkString(buffer, \"mif1\", 8))\n ) {\n return \"image/heic\";\n }\n\n // BMP: 42 4D\n if (checkBytes(buffer, [0x42, 0x4d])) {\n return \"image/bmp\";\n }\n\n // TIFF (little-endian): 49 49 2A 00\n if (checkBytes(buffer, [0x49, 0x49, 0x2a, 0x00])) {\n return \"image/tiff\";\n }\n\n // TIFF (big-endian): 4D 4D 00 2A\n if (checkBytes(buffer, [0x4d, 0x4d, 0x00, 0x2a])) {\n return \"image/tiff\";\n }\n\n // ICO: 00 00 01 00\n if (checkBytes(buffer, [0x00, 0x00, 0x01, 0x00])) {\n return \"image/x-icon\";\n }\n\n // SVG (XML-based, check for <svg or <?xml)\n if (buffer.length >= 5) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n if (\n text.includes(\"<svg\") ||\n (text.includes(\"<?xml\") && text.includes(\"<svg\"))\n ) {\n return \"image/svg+xml\";\n }\n }\n\n // ===== VIDEOS =====\n\n // MP4/M4V/M4A: ....ftyp\n if (buffer.length >= 12 && checkString(buffer, \"ftyp\", 4)) {\n const subtype = new TextDecoder().decode(buffer.slice(8, 12));\n if (\n subtype.startsWith(\"mp4\") ||\n subtype.startsWith(\"M4\") ||\n subtype.startsWith(\"isom\")\n ) {\n return \"video/mp4\";\n }\n }\n\n // WebM: 1A 45 DF A3\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3])) {\n return \"video/webm\";\n }\n\n // AVI: RIFF....AVI\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"AVI \", 8)\n ) {\n return \"video/x-msvideo\";\n }\n\n // MOV (QuickTime): ....moov or ....mdat or ....free\n if (\n buffer.length >= 8 &&\n (checkString(buffer, \"moov\", 4) ||\n checkString(buffer, \"mdat\", 4) ||\n checkString(buffer, \"free\", 4))\n ) {\n return \"video/quicktime\";\n }\n\n // MKV: 1A 45 DF A3 (same as WebM but check for Matroska)\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3]) && buffer.length >= 100) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, 100),\n );\n if (text.includes(\"matroska\")) {\n return \"video/x-matroska\";\n }\n }\n\n // ===== AUDIO =====\n\n // MP3: FF FB or FF F3 or FF F2 or ID3\n if (\n checkBytes(buffer, [0xff, 0xfb]) ||\n checkBytes(buffer, [0xff, 0xf3]) ||\n checkBytes(buffer, [0xff, 0xf2]) ||\n checkString(buffer, \"ID3\")\n ) {\n return \"audio/mpeg\";\n }\n\n // WAV: RIFF....WAVE\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WAVE\", 8)\n ) {\n return \"audio/wav\";\n }\n\n // FLAC: 66 4C 61 43 (fLaC)\n if (checkString(buffer, \"fLaC\")) {\n return \"audio/flac\";\n }\n\n // OGG: 4F 67 67 53 (OggS)\n if (checkString(buffer, \"OggS\")) {\n return \"audio/ogg\";\n }\n\n // M4A: ....ftypM4A\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n checkString(buffer, \"M4A\", 8)\n ) {\n return \"audio/mp4\";\n }\n\n // ===== DOCUMENTS =====\n\n // PDF: 25 50 44 46 (%PDF)\n if (checkString(buffer, \"%PDF\")) {\n return \"application/pdf\";\n }\n\n // ===== ARCHIVES =====\n\n // ZIP: 50 4B 03 04 or 50 4B 05 06 (empty archive) or 50 4B 07 08 (spanned archive)\n if (\n checkBytes(buffer, [0x50, 0x4b, 0x03, 0x04]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x05, 0x06]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x07, 0x08])\n ) {\n // Could be ZIP, DOCX, XLSX, PPTX, JAR, APK, etc.\n // Check for Office formats\n if (buffer.length >= 1024) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(buffer);\n if (text.includes(\"word/\"))\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n if (text.includes(\"xl/\"))\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n if (text.includes(\"ppt/\"))\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n }\n return \"application/zip\";\n }\n\n // RAR: 52 61 72 21 1A 07 (Rar!)\n if (checkBytes(buffer, [0x52, 0x61, 0x72, 0x21, 0x1a, 0x07])) {\n return \"application/x-rar-compressed\";\n }\n\n // 7Z: 37 7A BC AF 27 1C\n if (checkBytes(buffer, [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c])) {\n return \"application/x-7z-compressed\";\n }\n\n // GZIP: 1F 8B\n if (checkBytes(buffer, [0x1f, 0x8b])) {\n return \"application/gzip\";\n }\n\n // TAR (ustar): \"ustar\" at offset 257\n if (buffer.length >= 262 && checkString(buffer, \"ustar\", 257)) {\n return \"application/x-tar\";\n }\n\n // ===== FONTS =====\n\n // WOFF: 77 4F 46 46 (wOFF)\n if (checkString(buffer, \"wOFF\")) {\n return \"font/woff\";\n }\n\n // WOFF2: 77 4F 46 32 (wOF2)\n if (checkString(buffer, \"wOF2\")) {\n return \"font/woff2\";\n }\n\n // TTF: 00 01 00 00 00\n if (checkBytes(buffer, [0x00, 0x01, 0x00, 0x00, 0x00])) {\n return \"font/ttf\";\n }\n\n // OTF: 4F 54 54 4F (OTTO)\n if (checkString(buffer, \"OTTO\")) {\n return \"font/otf\";\n }\n\n // ===== TEXT =====\n\n // JSON (basic check for { or [)\n if (buffer.length >= 1) {\n const firstByte = buffer[0];\n if (firstByte === 0x7b || firstByte === 0x5b) {\n // { or [\n try {\n const text = new TextDecoder(\"utf-8\").decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n JSON.parse(text.trim());\n return \"application/json\";\n } catch {\n // Not valid JSON\n }\n }\n }\n\n // Fallback to extension-based detection\n if (filename) {\n const ext = filename.split(\".\").pop()?.toLowerCase();\n switch (ext) {\n // Images\n case \"jpg\":\n case \"jpeg\":\n return \"image/jpeg\";\n case \"png\":\n return \"image/png\";\n case \"gif\":\n return \"image/gif\";\n case \"webp\":\n return \"image/webp\";\n case \"avif\":\n return \"image/avif\";\n case \"heic\":\n case \"heif\":\n return \"image/heic\";\n case \"bmp\":\n return \"image/bmp\";\n case \"tiff\":\n case \"tif\":\n return \"image/tiff\";\n case \"ico\":\n return \"image/x-icon\";\n case \"svg\":\n return \"image/svg+xml\";\n\n // Videos\n case \"mp4\":\n case \"m4v\":\n return \"video/mp4\";\n case \"webm\":\n return \"video/webm\";\n case \"avi\":\n return \"video/x-msvideo\";\n case \"mov\":\n return \"video/quicktime\";\n case \"mkv\":\n return \"video/x-matroska\";\n\n // Audio\n case \"mp3\":\n return \"audio/mpeg\";\n case \"wav\":\n return \"audio/wav\";\n case \"flac\":\n return \"audio/flac\";\n case \"ogg\":\n return \"audio/ogg\";\n case \"m4a\":\n return \"audio/mp4\";\n\n // Documents\n case \"pdf\":\n return \"application/pdf\";\n case \"docx\":\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n case \"xlsx\":\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n case \"pptx\":\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n\n // Archives\n case \"zip\":\n return \"application/zip\";\n case \"rar\":\n return \"application/x-rar-compressed\";\n case \"7z\":\n return \"application/x-7z-compressed\";\n case \"gz\":\n case \"gzip\":\n return \"application/gzip\";\n case \"tar\":\n return \"application/x-tar\";\n\n // Fonts\n case \"woff\":\n return \"font/woff\";\n case \"woff2\":\n return \"font/woff2\";\n case \"ttf\":\n return \"font/ttf\";\n case \"otf\":\n return \"font/otf\";\n\n // Text\n case \"txt\":\n return \"text/plain\";\n case \"json\":\n return \"application/json\";\n case \"xml\":\n return \"application/xml\";\n case \"html\":\n case \"htm\":\n return \"text/html\";\n case \"css\":\n return \"text/css\";\n case \"js\":\n return \"application/javascript\";\n case \"csv\":\n return \"text/csv\";\n\n default:\n return \"application/octet-stream\";\n }\n }\n\n return \"application/octet-stream\";\n};\n\n/**\n * Compare two MIME types with lenient matching.\n * Matches on major type (e.g., \"image/*\") to allow for minor variations.\n *\n * @param declared - MIME type provided by client\n * @param detected - MIME type detected from file content\n * @returns true if MIME types are compatible\n *\n * @example\n * compareMimeTypes(\"image/png\", \"image/apng\") // true\n * compareMimeTypes(\"image/jpeg\", \"image/png\") // true (both images)\n * compareMimeTypes(\"image/png\", \"application/pdf\") // false\n */\nexport function compareMimeTypes(declared: string, detected: string): boolean {\n // Exact match\n if (declared === detected) {\n return true;\n }\n\n // Extract major types (e.g., \"image\" from \"image/png\")\n const declaredMajor = declared.split(\"/\")[0];\n const detectedMajor = detected.split(\"/\")[0];\n\n // Compare major types for lenient matching\n return declaredMajor === detectedMajor;\n}\n","import { Effect, Metric, MetricBoundaries, Option } from \"effect\";\nimport {\n type EventEmitter,\n type InputFile,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport type { GenerateIdShape } from \"../utils/generate-id\";\n\n/**\n * Captures the current Effect trace context for distributed tracing.\n *\n * Uses Effect's `currentSpan` to get the active span, which is more reliable\n * than OpenTelemetry's `trace.getActiveSpan()` when using @effect/opentelemetry\n * because Effect manages its own span context that may not be synchronized\n * with OpenTelemetry's global context.\n *\n * @returns Effect that yields TraceContext if there's an active span, undefined otherwise\n */\nconst captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n> = Effect.gen(function* () {\n const spanOption = yield* Effect.currentSpan.pipe(Effect.option);\n return Option.match(spanOption, {\n onNone: () => undefined,\n onSome: (span) => ({\n traceId: span.traceId,\n spanId: span.spanId,\n traceFlags: span.sampled ? 1 : 0,\n }),\n });\n});\n\n/**\n * Creates a new upload and initializes it in the storage system.\n *\n * This function handles the initial upload creation process including:\n * - Generating a unique upload ID\n * - Routing to appropriate data store based on storage ID\n * - Creating the upload record in the data store\n * - Storing upload metadata in KV store\n * - Emitting upload started events\n * - Parsing and validating metadata\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for upload creation, file sizes, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param inputFile - Input file configuration including storage, size, type, etc.\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for upload lifecycle events\n * @param generateId - ID generator for creating unique upload identifiers\n * @returns Effect that yields the created UploadFile\n *\n * @example\n * ```typescript\n * // Create a new upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * metadata: JSON.stringify({ category: \"photos\" })\n * };\n *\n * const createEffect = createUpload(\n * inputFile,\n * \"client-123\",\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter,\n * generateId\n * }\n * );\n *\n * // Run with dependencies\n * const upload = await Effect.runPromise(\n * createEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer),\n * Effect.provide(generateIdLayer)\n * )\n * );\n * ```\n */\nexport const createUpload = (\n inputFile: InputFile,\n clientId: string | null,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId: GenerateIdShape;\n },\n) =>\n Effect.gen(function* () {\n // Capture the parent \"upload\" span's trace context FIRST\n // This allows subsequent chunk uploads to be siblings of upload-create\n // under the same parent \"upload\" span\n const traceContext = yield* captureTraceContextEffect;\n const creationDate = new Date().toISOString();\n\n // Now run the actual upload creation inside a child span\n const fileCreated = yield* Effect.gen(function* () {\n // Get datastore using Effect\n const dataStore = yield* dataStoreService.getDataStore(\n inputFile.storageId,\n clientId,\n );\n\n const id = yield* generateId.generateId();\n const { size, type, fileName, lastModified, metadata, flow } = inputFile;\n\n let parsedMetadata: Record<string, string> = {};\n if (metadata) {\n try {\n parsedMetadata = JSON.parse(metadata) as Record<string, string>;\n } catch {\n parsedMetadata = {};\n }\n }\n\n const metadataObject: Record<string, string> = {\n ...parsedMetadata,\n type,\n fileName: fileName ?? \"\",\n };\n if (lastModified) {\n metadataObject.lastModified = lastModified.toString();\n }\n\n const file: UploadFile = {\n id,\n size,\n metadata: metadataObject,\n offset: 0,\n creationDate,\n storage: {\n id: inputFile.storageId,\n type,\n path: \"\",\n bucket: dataStore.bucket,\n },\n flow,\n traceContext,\n };\n\n // Create file using Effect\n const created = yield* dataStore.create(file);\n\n // Store in KV store\n yield* kvStore.set(id, created);\n\n // Emit event\n yield* eventEmitter.emit(id, {\n type: UploadEventType.UPLOAD_STARTED,\n data: created,\n flow: created.flow,\n });\n\n return created;\n }).pipe(\n // upload-create is a CHILD span of the parent \"upload\" span\n Effect.withSpan(\"upload-create\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n );\n\n return fileCreated;\n }).pipe(\n // Parent \"upload\" span wraps the entire upload lifecycle\n // upload-create and upload-chunk will be children of this span\n Effect.withSpan(\"upload\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n // Track upload creation metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment upload created counter\n yield* Metric.increment(\n Metric.counter(\"upload_created_total\", {\n description: \"Total number of uploads created\",\n }),\n );\n\n // Record file size\n if (file.size) {\n const fileSizeHistogram = Metric.histogram(\n \"upload_file_size_bytes\",\n MetricBoundaries.exponential({\n start: 1024,\n factor: 2,\n count: 25,\n }),\n );\n yield* Metric.update(fileSizeHistogram, file.size);\n }\n\n // Track active uploads gauge\n const activeUploadsGauge = Metric.gauge(\"active_uploads\");\n yield* Metric.increment(activeUploadsGauge);\n }),\n ),\n // Add structured logging\n Effect.tap((file) =>\n Effect.logInfo(\"Upload created\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Log error\n yield* Effect.logError(\"Upload creation failed\").pipe(\n Effect.annotateLogs({\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.storage_id\": inputFile.storageId,\n error: String(error),\n }),\n );\n\n // Track failed upload metric\n yield* Metric.increment(\n Metric.counter(\"upload_failed_total\", {\n description: \"Total number of uploads that failed\",\n }),\n );\n }),\n ),\n );\n","import { Stream } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Converts a ReadableStream to an Effect Stream.\n *\n * This utility function wraps a ReadableStream in an Effect Stream, providing\n * proper error handling and integration with the Effect ecosystem. It's used\n * throughout the upload system to convert raw streams into Effect-compatible\n * streams for processing.\n *\n * The function handles:\n * - Stream conversion with proper error mapping\n * - UploadistaError creation for stream errors\n * - Integration with Effect Stream processing\n *\n * @param data - The ReadableStream to convert\n * @returns Effect Stream that can be processed with Effect operations\n *\n * @example\n * ```typescript\n * // Convert a file stream to Effect Stream\n * const fileStream = new ReadableStream(...);\n * const effectStream = convertToStream(fileStream);\n *\n * // Process with Effect operations\n * const processedStream = effectStream.pipe(\n * Stream.map((chunk) => processChunk(chunk)),\n * Stream.filter((chunk) => chunk.length > 0)\n * );\n *\n * // Run the stream\n * await Stream.runForEach(processedStream, (chunk) =>\n * Effect.logInfo(`Processed chunk: ${chunk.length} bytes`)\n * );\n * ```\n */\nexport function convertToStream<T>(data: ReadableStream<T>) {\n return Stream.fromReadableStream(\n () => data,\n (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n }),\n );\n}\n","import { Effect, Ref } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport { StreamLimiterEffect } from \"../streams/stream-limiter\";\nimport type { DataStore, UploadEvent, UploadFile } from \"../types\";\nimport { type EventEmitter, UploadEventType } from \"../types\";\nimport { convertToStream } from \"./convert-to-stream\";\n\n/**\n * Configuration options for writing data to a data store.\n *\n * @property data - The stream of data to write\n * @property upload - Upload file metadata\n * @property dataStore - Target data store for writing\n * @property maxFileSize - Maximum allowed file size in bytes\n * @property controller - AbortController for cancellation\n * @property eventEmitter - Event emitter for progress tracking\n * @property uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n */\ntype WriteToStoreOptions = {\n data: ReadableStream<Uint8Array>;\n upload: UploadFile;\n dataStore: DataStore<UploadFile>;\n maxFileSize: number;\n controller: AbortController;\n eventEmitter: EventEmitter<UploadEvent>;\n uploadProgressInterval?: number;\n};\n\n/**\n * Writes data stream to a data store with progress tracking and size limits.\n *\n * This function handles the core data writing logic including:\n * - Stream conversion and processing\n * - File size validation and limiting\n * - Progress tracking with throttled events\n * - Abort signal handling for cancellation\n * - Error handling and cleanup\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Structured logging for debugging and monitoring\n * - Progress event emission with throttling\n * - Error handling with proper UploadistaError types\n *\n * @param data - The stream of data to write to storage\n * @param upload - Upload file metadata containing ID, offset, etc.\n * @param dataStore - Target data store for writing the data\n * @param maxFileSize - Maximum allowed file size in bytes\n * @param controller - AbortController for handling cancellation\n * @param eventEmitter - Event emitter for progress tracking\n * @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n * @returns Effect that yields the number of bytes written\n *\n * @example\n * ```typescript\n * // Write data to store with progress tracking\n * const writeEffect = writeToStore({\n * data: fileStream,\n * upload: uploadMetadata,\n * dataStore: s3DataStore,\n * maxFileSize: 100_000_000, // 100MB\n * controller: abortController,\n * eventEmitter: progressEmitter,\n * uploadProgressInterval: 500 // Emit progress every 500ms\n * });\n *\n * // Run with error handling\n * const bytesWritten = await Effect.runPromise(\n * writeEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to write to store\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport function writeToStore({\n data,\n upload,\n dataStore,\n maxFileSize,\n controller,\n eventEmitter,\n uploadProgressInterval = 200,\n}: WriteToStoreOptions) {\n return Effect.gen(function* () {\n const stream = convertToStream(data);\n // Check if already aborted\n if (controller.signal.aborted) {\n return yield* Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n\n // Create an AbortController to manage the stream pipeline\n const abortController = new AbortController();\n const { signal } = abortController;\n\n // Set up abort handling\n const onAbort = () => {\n // stream.cancel();\n abortController.abort();\n };\n\n controller.signal.addEventListener(\"abort\", onAbort, { once: true });\n\n return yield* Effect.acquireUseRelease(\n Effect.sync(() => ({ signal, onAbort })),\n ({ signal: _signal }) =>\n Effect.gen(function* () {\n // Create a ref to track the last progress emission time for throttling\n const lastEmitTime = yield* Ref.make(0);\n\n // Create the stream limiter\n const limiter = StreamLimiterEffect.limit({\n maxSize: maxFileSize,\n });\n\n // Pipe the data through the limiter\n const limitedStream = limiter(stream);\n\n // Write to the data store with progress tracking\n const offset = yield* dataStore.write(\n {\n stream: limitedStream,\n file_id: upload.id,\n offset: upload.offset,\n },\n {\n onProgress: (newOffset: number) => {\n // Simple throttling using timestamp check\n const now = Date.now();\n Ref.get(lastEmitTime)\n .pipe(\n Effect.flatMap((lastTime) => {\n if (now - lastTime >= uploadProgressInterval) {\n return Effect.gen(function* () {\n yield* Ref.set(lastEmitTime, now);\n yield* eventEmitter.emit(upload.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: upload.id,\n progress: newOffset,\n total: upload.size ?? 0,\n },\n flow: upload.flow,\n });\n });\n }\n return Effect.void;\n }),\n Effect.runPromise,\n )\n .catch(() => {\n // Ignore errors during progress emission\n });\n },\n },\n );\n\n return offset;\n }).pipe(\n Effect.catchAll((error) => {\n if (error instanceof Error && error.name === \"AbortError\") {\n return Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n if (error instanceof UploadistaError) {\n return Effect.fail(error);\n }\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error }),\n );\n }),\n ),\n ({ onAbort }) =>\n Effect.sync(() => {\n controller.signal.removeEventListener(\"abort\", onAbort);\n }),\n );\n }).pipe(\n // Add tracing span for write operation\n Effect.withSpan(\"upload-write-to-store\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n \"upload.max_file_size\": maxFileSize.toString(),\n \"upload.file_size\": upload.size?.toString() ?? \"0\",\n },\n }),\n // Add structured logging for write operation\n Effect.tap((offset) =>\n Effect.logDebug(\"Data written to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"write.offset\": offset.toString(),\n \"write.bytes_written\": (offset - upload.offset).toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to write to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n error: error instanceof UploadistaError ? error.code : String(error),\n }),\n ),\n ),\n );\n}\n","import { Effect, Metric, MetricBoundaries, Tracer } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport {\n type DataStore,\n type EventEmitter,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport { computeChecksum } from \"../utils/checksum\";\nimport { compareMimeTypes, detectMimeType } from \"./mime\";\nimport { writeToStore } from \"./write-to-store\";\n\n/**\n * Creates an ExternalSpan from stored trace context.\n * Used for linking chunk uploads to the original upload trace.\n */\nfunction createExternalSpan(traceContext: UploadFileTraceContext) {\n return Tracer.externalSpan({\n traceId: traceContext.traceId,\n spanId: traceContext.spanId,\n sampled: traceContext.traceFlags === 1,\n });\n}\n\n/**\n * Creates an \"upload-complete\" span Effect that captures the full upload duration.\n * This span is a sibling of upload-create and upload-chunk under the parent \"upload\" span.\n *\n * Note: The span's visual duration in tracing UIs will be short (instant), but the\n * actual upload duration is captured in the \"upload.total_duration_ms\" attribute.\n *\n * @param file - The completed upload file\n * @param parentSpan - The parent span to link to\n * @returns Effect that creates and completes the span\n */\nconst createUploadCompleteSpanEffect = (\n file: UploadFile,\n parentSpan: Tracer.ExternalSpan,\n): Effect.Effect<void> => {\n const creationTime = new Date(file.creationDate as string).getTime();\n const totalDurationMs = Date.now() - creationTime;\n\n return Effect.void.pipe(\n Effect.withSpan(\"upload-complete\", {\n attributes: {\n \"upload.id\": file.id,\n \"upload.size\": file.size ?? 0,\n \"upload.total_duration_ms\": totalDurationMs,\n \"upload.storage_id\": file.storage.id,\n \"upload.file_name\": file.metadata?.fileName ?? \"unknown\",\n \"upload.creation_date\": file.creationDate as string,\n \"upload.completion_date\": new Date().toISOString(),\n },\n parent: parentSpan,\n }),\n );\n};\n\n/**\n * Uploads a chunk of data for an existing upload.\n *\n * This function handles the core chunk upload logic including:\n * - Retrieving upload metadata from KV store\n * - Routing to appropriate data store based on storage ID\n * - Writing chunk data to storage with progress tracking\n * - Updating upload offset and metadata\n * - Emitting progress events\n * - Validating upload completion (checksum, MIME type)\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for chunk size, throughput, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param uploadId - Unique identifier for the upload\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param chunk - ReadableStream containing the chunk data to upload\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for progress and validation events\n * @returns Effect that yields the updated UploadFile with new offset\n *\n * @example\n * ```typescript\n * // Upload a chunk for an existing upload\n * const uploadChunkEffect = uploadChunk(\n * \"upload-123\",\n * \"client-456\",\n * chunkStream,\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter\n * }\n * );\n *\n * // Run with dependencies\n * const result = await Effect.runPromise(\n * uploadChunkEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * )\n * );\n * ```\n */\nexport const uploadChunk = (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n },\n) =>\n Effect.gen(function* () {\n // Get file from KV store first to check for trace context\n const file = yield* kvStore.get(uploadId);\n\n // Create external span from stored trace context if available\n // This links chunk uploads to the original upload trace\n const parentSpan = file.traceContext\n ? createExternalSpan(file.traceContext)\n : undefined;\n\n // Core chunk processing logic\n const processChunk = Effect.gen(function* () {\n // Get datastore\n const dataStore = yield* dataStoreService.getDataStore(\n file.storage.id,\n clientId,\n );\n\n // Note: AbortController could be used for cancellation if needed\n\n // Write to store using writeToStore Effect\n const controller = new AbortController();\n\n const chunkSize = yield* writeToStore({\n dataStore,\n data: chunk,\n upload: file,\n maxFileSize: 100_000_000,\n controller,\n uploadProgressInterval: 200,\n eventEmitter,\n });\n\n file.offset = chunkSize;\n\n // Update KV store\n yield* kvStore.set(uploadId, file);\n\n // Emit progress event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: file.id,\n progress: file.offset,\n total: file.size ?? 0,\n },\n flow: file.flow,\n });\n\n // Check if upload is complete and run validation\n if (file.size && file.offset === file.size) {\n yield* validateUpload({\n file,\n dataStore,\n eventEmitter,\n });\n\n // Create \"upload-complete\" span that captures the full upload duration\n // This span shows the total time from upload creation to completion\n if (file.traceContext) {\n const completeParentSpan = createExternalSpan(file.traceContext);\n yield* createUploadCompleteSpanEffect(file, completeParentSpan);\n }\n }\n\n return file;\n }).pipe(\n // Add tracing span for chunk upload with parent from stored trace context\n Effect.withSpan(\"upload-chunk\", {\n attributes: {\n \"upload.id\": uploadId,\n \"chunk.upload_id\": uploadId,\n \"upload.has_trace_context\": file.traceContext ? \"true\" : \"false\",\n },\n parent: parentSpan,\n }),\n );\n\n return yield* processChunk;\n }).pipe(\n // Track chunk upload metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment chunk uploaded counter\n yield* Metric.increment(\n Metric.counter(\"chunk_uploaded_total\", {\n description: \"Total number of chunks uploaded\",\n }),\n );\n\n // Record chunk size\n const chunkSize = file.offset;\n const chunkSizeHistogram = Metric.histogram(\n \"chunk_size_bytes\",\n MetricBoundaries.linear({\n start: 262_144,\n width: 262_144,\n count: 20,\n }),\n );\n yield* Metric.update(chunkSizeHistogram, chunkSize);\n\n // Update throughput gauge\n if (file.size && file.size > 0) {\n const throughput = chunkSize; // bytes processed\n const throughputGauge = Metric.gauge(\n \"upload_throughput_bytes_per_second\",\n );\n yield* Metric.set(throughputGauge, throughput);\n }\n }),\n ),\n // Add structured logging for chunk progress\n Effect.tap((file) =>\n Effect.logDebug(\"Chunk uploaded\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"chunk.size\": file.offset.toString(),\n \"chunk.progress\":\n file.size && file.size > 0\n ? ((file.offset / file.size) * 100).toFixed(2)\n : \"0\",\n \"upload.total_size\": file.size?.toString() ?? \"0\",\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Chunk upload failed\").pipe(\n Effect.annotateLogs({\n \"upload.id\": uploadId,\n error: String(error),\n }),\n ),\n ),\n );\n\n/**\n * Validates an upload after completion.\n *\n * Performs comprehensive validation including:\n * - Checksum validation (if provided) using the specified algorithm\n * - MIME type validation (if required by data store capabilities)\n * - File size validation against data store limits\n *\n * Validation results are emitted as events and failures result in:\n * - Cleanup of uploaded data from storage\n * - Removal of metadata from KV store\n * - Appropriate error responses\n *\n * The function respects data store capabilities for validation limits\n * and provides detailed error information for debugging.\n *\n * @param file - The upload file to validate\n * @param dataStore - Data store containing the uploaded file\n * @param eventEmitter - Event emitter for validation events\n * @returns Effect that completes validation or fails with UploadistaError\n *\n * @example\n * ```typescript\n * // Validate upload after completion\n * const validationEffect = validateUpload({\n * file: completedUpload,\n * dataStore: s3DataStore,\n * eventEmitter: progressEmitter\n * });\n *\n * // Run validation\n * await Effect.runPromise(validationEffect);\n * ```\n */\nconst validateUpload = ({\n file,\n dataStore,\n eventEmitter,\n}: {\n file: UploadFile;\n dataStore: DataStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n}): Effect.Effect<void, UploadistaError, never> =>\n Effect.gen(function* () {\n const capabilities = dataStore.getCapabilities();\n\n // Check if file exceeds max validation size\n if (\n capabilities.maxValidationSize &&\n file.size &&\n file.size > capabilities.maxValidationSize\n ) {\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_WARNING,\n data: {\n id: file.id,\n message: `File size (${file.size} bytes) exceeds max validation size (${capabilities.maxValidationSize} bytes). Validation skipped.`,\n },\n flow: file.flow,\n });\n return;\n }\n\n // Read file from datastore for validation\n const fileBytes = yield* dataStore.read(file.id);\n\n // Validate checksum if provided\n if (file.checksum && file.checksumAlgorithm) {\n const computedChecksum = yield* computeChecksum(\n fileBytes,\n file.checksumAlgorithm,\n );\n\n if (computedChecksum !== file.checksum) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"checksum_mismatch\",\n expected: file.checksum,\n actual: computedChecksum,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with checksum mismatch error\n return yield* UploadistaError.fromCode(\"CHECKSUM_MISMATCH\", {\n body: `Checksum validation failed. Expected: ${file.checksum}, Got: ${computedChecksum}`,\n details: {\n uploadId: file.id,\n expected: file.checksum,\n actual: computedChecksum,\n algorithm: file.checksumAlgorithm,\n },\n }).toEffect();\n }\n\n // Emit checksum validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"checksum\",\n algorithm: file.checksumAlgorithm,\n },\n flow: file.flow,\n });\n }\n\n // Validate MIME type if required by capabilities\n if (capabilities.requiresMimeTypeValidation) {\n const detectedMimeType = detectMimeType(fileBytes);\n const declaredMimeType = file.metadata?.type as string | undefined;\n\n if (\n declaredMimeType &&\n !compareMimeTypes(declaredMimeType, detectedMimeType)\n ) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"mimetype_mismatch\",\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with MIME type mismatch error\n return yield* UploadistaError.fromCode(\"MIMETYPE_MISMATCH\", {\n body: `MIME type validation failed. Expected: ${declaredMimeType}, Detected: ${detectedMimeType}`,\n details: {\n uploadId: file.id,\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n }).toEffect();\n }\n\n // Emit MIME type validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"mimetype\",\n },\n flow: file.flow,\n });\n }\n }).pipe(\n Effect.withSpan(\"validate-upload\", {\n attributes: {\n \"upload.id\": file.id,\n \"validation.checksum_provided\": file.checksum ? \"true\" : \"false\",\n \"validation.mime_required\": dataStore.getCapabilities()\n .requiresMimeTypeValidation\n ? \"true\"\n : \"false\",\n },\n }),\n );\n","import { Effect, Metric } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Fetches a file from a remote URL.\n *\n * This function handles HTTP requests to remote URLs for file uploads,\n * including proper error handling, metrics tracking, and observability.\n *\n * Features:\n * - HTTP request with proper error handling\n * - Effect tracing for performance monitoring\n * - Metrics tracking for URL-based uploads\n * - Structured logging for debugging\n * - Response validation and error reporting\n *\n * @param url - The remote URL to fetch the file from\n * @returns Effect that yields the Response object\n *\n * @example\n * ```typescript\n * // Fetch a file from URL\n * const fetchEffect = fetchFile(\"https://example.com/image.jpg\");\n *\n * // Run with error handling\n * const response = await Effect.runPromise(\n * fetchEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to fetch file\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport const fetchFile = (url: string) => {\n return Effect.tryPromise({\n try: async () => {\n return await fetch(url);\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for URL fetch\n Effect.withSpan(\"upload-fetch-url\", {\n attributes: {\n \"upload.url\": url,\n \"upload.operation\": \"fetch\",\n },\n }),\n // Track URL fetch metrics\n Effect.tap((response) =>\n Effect.gen(function* () {\n // Increment URL upload counter\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_total\", {\n description: \"Total number of URL-based uploads\",\n }),\n );\n\n // Track success/failure\n if (response.ok) {\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_success_total\", {\n description: \"Total number of successful URL-based uploads\",\n }),\n );\n }\n }),\n ),\n // Add structured logging\n Effect.tap((response) =>\n Effect.logInfo(\"URL fetch completed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n \"response.status\": response.status.toString(),\n \"response.ok\": response.ok.toString(),\n \"response.content_length\":\n response.headers.get(\"content-length\") ?? \"unknown\",\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Track failed URL upload\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_failed_total\", {\n description: \"Total number of failed URL-based uploads\",\n }),\n );\n\n // Log error\n yield* Effect.logError(\"URL fetch failed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n error: String(error),\n }),\n );\n }),\n ),\n );\n};\n\n/**\n * Converts a Response object to an ArrayBuffer.\n *\n * This function safely converts HTTP response data to binary format\n * for processing and storage, with proper error handling and observability.\n *\n * Features:\n * - Safe conversion from Response to ArrayBuffer\n * - Effect tracing for performance monitoring\n * - Structured logging for debugging\n * - Error handling with proper UploadistaError types\n *\n * @param response - The HTTP Response object to convert\n * @returns Effect that yields the ArrayBuffer data\n *\n * @example\n * ```typescript\n * // Convert response to buffer\n * const bufferEffect = arrayBuffer(response);\n *\n * // Use in upload pipeline\n * const buffer = await Effect.runPromise(\n * bufferEffect.pipe(\n * Effect.tap((buffer) =>\n * Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)\n * )\n * )\n * );\n * ```\n */\nexport const arrayBuffer = (response: Response) => {\n return Effect.tryPromise({\n try: async () => {\n return await response.arrayBuffer();\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for buffer conversion\n Effect.withSpan(\"upload-convert-to-buffer\", {\n attributes: {\n \"upload.operation\": \"arrayBuffer\",\n },\n }),\n // Add structured logging\n Effect.tap((buffer) =>\n Effect.logDebug(\"Response converted to array buffer\").pipe(\n Effect.annotateLogs({\n \"buffer.size\": buffer.byteLength.toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to convert response to array buffer\").pipe(\n Effect.annotateLogs({\n error: String(error),\n }),\n ),\n ),\n );\n};\n","import { Context, Effect, Layer, Stream } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n EventEmitter,\n InputFile,\n KvStore,\n Middleware,\n StreamingConfig,\n UploadEvent,\n UploadFile,\n WebSocketConnection,\n} from \"../types\";\nimport {\n UploadEventEmitter,\n UploadEventType,\n UploadFileDataStores,\n UploadFileKVStore,\n} from \"../types\";\nimport { GenerateId, type GenerateIdShape } from \"../utils/generate-id\";\nimport { createUpload } from \"./create-upload\";\nimport { uploadChunk } from \"./upload-chunk\";\nimport { arrayBuffer, fetchFile } from \"./upload-url\";\n\n/**\n * Legacy configuration options for UploadEngine.\n *\n * @deprecated Use Effect Layers instead of this configuration object.\n * This type is kept for backward compatibility.\n *\n * @property dataStore - DataStore instance or factory function\n * @property kvStore - KV store for upload metadata\n * @property eventEmitter - Event emitter for upload progress\n * @property generateId - Optional ID generator (defaults to UUID)\n * @property middlewares - Optional request middlewares\n * @property withTracing - Enable Effect tracing for debugging\n */\nexport type UploadEngineOptions = {\n dataStore:\n | ((storageId: string) => Promise<DataStore<UploadFile>>)\n | DataStore<UploadFile>;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId?: GenerateIdShape;\n middlewares?: Middleware[];\n withTracing?: boolean;\n};\n\n/**\n * UploadEngine service interface.\n *\n * This is the core upload handling service that provides all file upload operations.\n * It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.\n *\n * All operations return Effect types for composable, type-safe error handling.\n *\n * @property createUpload - Initiates a new upload and returns metadata\n * @property uploadChunk - Uploads a chunk of data for an existing upload\n * @property getCapabilities - Returns storage backend capabilities\n * @property upload - Complete upload in one operation (create + upload data)\n * @property uploadFromUrl - Uploads a file from a remote URL\n * @property getUpload - Retrieves upload metadata by ID\n * @property read - Reads the complete uploaded file data\n * @property delete - Deletes an upload and its data\n * @property subscribeToUploadEvents - Subscribes WebSocket to upload progress events\n * @property unsubscribeFromUploadEvents - Unsubscribes from upload events\n *\n * @example\n * ```typescript\n * // Basic upload flow\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // 1. Create upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\"\n * };\n * const upload = yield* server.createUpload(inputFile, \"client123\");\n *\n * // 2. Upload chunks\n * const chunk = new ReadableStream(...);\n * const updated = yield* server.uploadChunk(upload.id, \"client123\", chunk);\n *\n * // 3. Read the uploaded file\n * const data = yield* server.read(upload.id, \"client123\");\n *\n * return upload;\n * });\n *\n * // Upload with WebSocket progress tracking\n * const uploadWithProgress = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // Subscribe to progress events\n * yield* server.subscribeToUploadEvents(uploadId, websocket);\n *\n * // Upload (events will be emitted automatically)\n * const result = yield* server.upload(inputFile, clientId, stream);\n *\n * // Unsubscribe when done\n * yield* server.unsubscribeFromUploadEvents(uploadId);\n *\n * return result;\n * });\n *\n * // Upload from URL\n * const urlUpload = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 0, // Unknown initially\n * type: \"image/png\",\n * fileName: \"remote-image.png\"\n * };\n *\n * const upload = yield* server.uploadFromUrl(\n * inputFile,\n * \"client123\",\n * \"https://example.com/image.png\"\n * );\n *\n * return upload;\n * });\n * ```\n */\nexport type UploadEngineShape = {\n createUpload: (\n inputFile: InputFile,\n clientId: string | null,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getCapabilities: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStoreCapabilities, UploadistaError>;\n upload: (\n file: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getUpload: (uploadId: string) => Effect.Effect<UploadFile, UploadistaError>;\n /**\n * Reads the complete uploaded file data as bytes (buffered mode).\n * For large files, consider using readStream() for memory efficiency.\n */\n read: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Falls back to buffered read if the underlying DataStore doesn't support streaming.\n *\n * @param uploadId - The unique identifier of the upload to read\n * @param clientId - Client identifier for multi-tenant routing\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });\n * // Process stream chunk by chunk with bounded memory\n * yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));\n * ```\n */\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) => Effect.Effect<\n Stream.Stream<Uint8Array, UploadistaError>,\n UploadistaError\n >;\n /**\n * Uploads file content from a stream with unknown final size.\n * Creates upload with deferred length, streams content to storage,\n * and updates the upload record with final size when complete.\n *\n * Falls back to buffered upload if the underlying DataStore\n * doesn't support streaming writes.\n *\n * @param file - Input file configuration (size is optional)\n * @param clientId - Client identifier for multi-tenant routing\n * @param stream - Effect Stream of byte chunks to upload\n * @returns The completed UploadFile with final size\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const result = yield* server.uploadStream(\n * {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * fileName: \"optimized.webp\",\n * },\n * clientId,\n * transformedStream,\n * );\n * console.log(`Uploaded ${result.size} bytes`);\n * ```\n */\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n delete: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<void, UploadistaError>;\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n unsubscribeFromUploadEvents: (\n uploadId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the UploadEngine service.\n *\n * Use this tag to access the UploadEngine in an Effect context.\n * The server must be provided via a Layer or dependency injection.\n *\n * @example\n * ```typescript\n * // Access UploadEngine in an Effect\n * const uploadEffect = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * const upload = yield* server.createUpload(inputFile, clientId);\n * return upload;\n * });\n *\n * // Provide UploadEngine layer\n * const program = uploadEffect.pipe(\n * Effect.provide(uploadEngine),\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(dataStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * );\n * ```\n */\nexport class UploadEngine extends Context.Tag(\"UploadEngine\")<\n UploadEngine,\n UploadEngineShape\n>() {}\n\n/**\n * Creates the UploadEngine implementation.\n *\n * This function constructs the UploadEngine service by composing all required\n * dependencies (KV store, data stores, event emitter, ID generator). It implements\n * all upload operations defined in UploadEngineShape.\n *\n * The server automatically handles:\n * - Upload lifecycle management (create, resume, complete)\n * - Progress tracking and event emission\n * - Storage backend routing based on storageId\n * - Error handling with proper UploadistaError types\n *\n * @returns An Effect that yields the UploadEngineShape implementation\n *\n * @example\n * ```typescript\n * // Create a custom UploadEngine layer\n * const myUploadEngine = Layer.effect(\n * UploadEngine,\n * createUploadEngine()\n * );\n *\n * // Use in a program\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Use server operations...\n * }).pipe(Effect.provide(myUploadEngine));\n * ```\n */\nexport function createUploadEngine() {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n const eventEmitter = yield* UploadEventEmitter;\n const generateId = yield* GenerateId;\n const dataStoreService = yield* UploadFileDataStores;\n\n return {\n upload: (\n inputFile: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) =>\n Effect.gen(function* () {\n const response = yield* fetchFile(url);\n const buffer = yield* arrayBuffer(response);\n\n // Create a readable stream from the buffer\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n },\n });\n\n const fileCreated = yield* createUpload(\n { ...inputFile, size: buffer.byteLength },\n clientId,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n },\n );\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n createUpload: (inputFile: InputFile, clientId: string | null) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return fileCreated;\n }),\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const file = yield* uploadChunk(uploadId, clientId, chunk, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n return file;\n }),\n getUpload: (uploadId: string) =>\n Effect.gen(function* () {\n const file = yield* kvStore.get(uploadId);\n return file;\n }),\n read: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n return yield* dataStore.read(uploadId);\n }),\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n\n // Check if the DataStore supports streaming reads\n const capabilities = dataStore.getCapabilities();\n if (capabilities.supportsStreamingRead && dataStore.readStream) {\n // Use native streaming\n yield* Effect.logDebug(`Using streaming read for file ${uploadId}`);\n return yield* dataStore.readStream(uploadId, config);\n }\n\n // Fallback: read entire file and convert to stream\n yield* Effect.logDebug(\n `Falling back to buffered read for file ${uploadId} (streaming not supported)`,\n );\n const bytes = yield* dataStore.read(uploadId);\n\n // Convert buffered bytes to a single-chunk stream\n return Stream.succeed(bytes);\n }),\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n // Get the data store for this storage\n const dataStore = yield* dataStoreService.getDataStore(\n file.storageId,\n clientId,\n );\n\n // Check if the DataStore supports streaming writes\n const capabilities = dataStore.getCapabilities();\n\n // Generate upload ID\n const uploadId = yield* generateId.generateId();\n\n if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n // Use native streaming write - DO NOT call createUpload as it would\n // create an S3 multipart upload that we won't use (writeStream creates its own)\n yield* Effect.logDebug(\n `Using streaming write for file ${uploadId}`,\n );\n\n // Parse metadata\n const metadata =\n typeof file.metadata === \"string\"\n ? JSON.parse(file.metadata)\n : file.metadata || {};\n\n // Convert metadata to Record<string, string> if present\n const stringMetadata = Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)]),\n );\n\n // Create initial upload record in KV store (without creating S3 multipart upload)\n const initialUpload: UploadFile = {\n id: uploadId,\n offset: 0,\n size: file.size ?? 0,\n storage: {\n id: file.storageId,\n type: dataStore.getCapabilities().supportsStreamingWrite\n ? \"streaming\"\n : \"default\",\n },\n metadata,\n creationDate: new Date().toISOString(),\n };\n yield* kvStore.set(uploadId, initialUpload);\n\n // Emit started event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_STARTED,\n data: initialUpload,\n });\n\n const result = yield* dataStore.writeStream(uploadId, {\n stream,\n contentType: file.type,\n sizeHint: file.sizeHint,\n metadata: stringMetadata,\n });\n\n // Update the upload record with the final size and URL\n const completedUpload: UploadFile = {\n ...initialUpload,\n size: result.size,\n offset: result.size,\n storage: {\n ...initialUpload.storage,\n path: result.path,\n },\n ...(result.url && { url: result.url }),\n };\n\n yield* kvStore.set(uploadId, completedUpload);\n\n // Emit completion event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_COMPLETE,\n data: completedUpload,\n });\n\n return completedUpload;\n }\n\n // Fallback: buffer the stream and use regular upload (which calls createUpload + uploadChunk)\n yield* Effect.logWarning(\n `Falling back to buffered upload for file ${uploadId} (streaming write not supported)`,\n );\n\n // Collect stream into a buffer\n const chunks: Uint8Array[] = [];\n yield* Stream.runForEach(stream, (chunk) =>\n Effect.sync(() => {\n chunks.push(chunk);\n }),\n );\n\n // Calculate total size\n const totalSize = chunks.reduce(\n (acc, chunk) => acc + chunk.length,\n 0,\n );\n\n // Create a combined buffer\n const buffer = new Uint8Array(totalSize);\n let offset = 0;\n for (const chunk of chunks) {\n buffer.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Create a readable stream from the buffer\n const readableStream = new ReadableStream({\n start(controller) {\n controller.enqueue(buffer);\n controller.close();\n },\n });\n\n // For fallback, use the regular flow with createUpload + uploadChunk\n const inputFile: InputFile = {\n ...file,\n size: totalSize,\n };\n\n const uploadFile = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId: { generateId: () => Effect.succeed(uploadId) },\n });\n\n // Use regular uploadChunk\n return yield* uploadChunk(uploadId, clientId, readableStream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n delete: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n yield* dataStore.remove(uploadId);\n yield* kvStore.delete(uploadId);\n return;\n }),\n getCapabilities: (storageId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const dataStore = yield* dataStoreService.getDataStore(\n storageId,\n clientId,\n );\n return dataStore.getCapabilities();\n }),\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(uploadId, connection);\n }),\n unsubscribeFromUploadEvents: (uploadId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(uploadId);\n }),\n } satisfies UploadEngineShape;\n });\n}\n\n/**\n * Pre-built UploadEngine Effect Layer.\n *\n * This layer provides a ready-to-use UploadEngine implementation that can be\n * composed with other layers to build a complete upload system.\n *\n * Required dependencies:\n * - UploadFileKVStore: For storing upload metadata\n * - UploadFileDataStores: For routing to storage backends\n * - UploadEventEmitter: For progress events\n * - GenerateId: For creating upload IDs\n *\n * @example\n * ```typescript\n * // Compose a complete upload system\n * const fullUploadSystem = Layer.mergeAll(\n * uploadEngine,\n * uploadFileKvStore,\n * dataStoreLayer,\n * uploadEventEmitter,\n * generateIdLayer\n * );\n *\n * // Use in application\n * const app = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Perform uploads...\n * }).pipe(Effect.provide(fullUploadSystem));\n * ```\n */\nexport const uploadEngine = Layer.effect(UploadEngine, createUploadEngine());\n","import type { DataStoreCapabilities, UploadStrategy } from \"../types\";\n\n/**\n * Configuration options for upload strategy negotiation.\n *\n * @property fileSize - Size of the file to be uploaded in bytes\n * @property preferredStrategy - Preferred upload strategy (single, parallel, resumable)\n * @property preferredChunkSize - Preferred chunk size in bytes\n * @property parallelUploads - Number of parallel upload connections\n * @property minChunkSizeForParallel - Minimum file size to consider parallel uploads\n */\nexport type UploadStrategyOptions = {\n fileSize: number;\n preferredStrategy?: UploadStrategy;\n preferredChunkSize?: number;\n parallelUploads?: number;\n minChunkSizeForParallel?: number;\n};\n\n/**\n * Result of upload strategy negotiation.\n *\n * @property strategy - The negotiated upload strategy\n * @property chunkSize - The negotiated chunk size in bytes\n * @property parallelUploads - The negotiated number of parallel uploads\n * @property reasoning - Array of reasoning strings explaining the decisions\n * @property warnings - Array of warning messages about adjustments made\n */\nexport type NegotiatedStrategy = {\n strategy: UploadStrategy;\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n};\n\n/**\n * Negotiates the optimal upload strategy based on data store capabilities and file characteristics.\n *\n * This class analyzes data store capabilities, file size, and user preferences to determine\n * the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size\n * and parallel connection count.\n *\n * The negotiator considers:\n * - Data store capabilities (parallel uploads, resumable uploads, concatenation)\n * - File size and chunk size constraints\n * - User preferences and requirements\n * - Performance optimization opportunities\n *\n * @example\n * ```typescript\n * // Create negotiator for S3 data store\n * const negotiator = new UploadStrategyNegotiator(\n * s3Capabilities,\n * (strategy) => s3Capabilities.supportsStrategy(strategy)\n * );\n *\n * // Negotiate strategy for large file\n * const result = negotiator.negotiateStrategy({\n * fileSize: 100_000_000, // 100MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB chunks\n * parallelUploads: 4\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * ```\n */\nexport class UploadStrategyNegotiator {\n /**\n * Creates a new upload strategy negotiator.\n *\n * @param capabilities - Data store capabilities and constraints\n * @param validateUploadStrategy - Function to validate if a strategy is supported\n */\n constructor(\n private capabilities: DataStoreCapabilities,\n private validateUploadStrategy: (strategy: UploadStrategy) => boolean,\n ) {}\n\n /**\n * Negotiates the optimal upload strategy based on options and data store capabilities.\n *\n * This method analyzes the provided options and data store capabilities to determine\n * the best upload strategy, chunk size, and parallel upload settings. It considers\n * user preferences, file size, and data store constraints to make optimal decisions.\n *\n * The negotiation process:\n * 1. Validates preferred strategy against data store capabilities\n * 2. Automatically selects strategy based on file size and capabilities\n * 3. Adjusts chunk size to fit within data store constraints\n * 4. Validates parallel upload settings\n * 5. Ensures final strategy is supported by the data store\n *\n * @param options - Upload strategy options including file size and preferences\n * @returns Negotiated strategy with reasoning and warnings\n *\n * @example\n * ```typescript\n * const result = negotiator.negotiateStrategy({\n * fileSize: 50_000_000, // 50MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB\n * parallelUploads: 3\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.parallelUploads); // 3\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * console.log(result.warnings); // [] (no warnings)\n * ```\n */\n negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy {\n const reasoning: string[] = [];\n const warnings: string[] = [];\n\n let strategy: UploadStrategy = \"single\";\n let chunkSize =\n options.preferredChunkSize ??\n this.capabilities.optimalChunkSize ??\n 1024 * 1024;\n let parallelUploads = options.parallelUploads ?? 1;\n\n // Check if data store supports the preferred strategy\n if (options.preferredStrategy) {\n if (!this.validateUploadStrategy(options.preferredStrategy)) {\n warnings.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store, falling back`,\n );\n } else {\n strategy = options.preferredStrategy;\n reasoning.push(`Using preferred strategy: ${strategy}`);\n }\n }\n\n // Automatic strategy selection based on capabilities and file size\n if (\n !options.preferredStrategy ||\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n if (\n this.capabilities.supportsParallelUploads &&\n options.fileSize > (options.minChunkSizeForParallel ?? 10 * 1024 * 1024)\n ) {\n strategy = \"parallel\";\n reasoning.push(\n `Selected parallel upload for large file (${options.fileSize} bytes)`,\n );\n } else {\n strategy = \"single\";\n reasoning.push(\n this.capabilities.supportsParallelUploads\n ? `Selected single upload for small file (${options.fileSize} bytes)`\n : \"Selected single upload (parallel not supported by data store)\",\n );\n }\n }\n\n // Validate and adjust chunk size based on data store constraints\n if (\n this.capabilities.minChunkSize &&\n chunkSize < this.capabilities.minChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} below minimum ${this.capabilities.minChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.minChunkSize;\n }\n\n if (\n this.capabilities.maxChunkSize &&\n chunkSize > this.capabilities.maxChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} above maximum ${this.capabilities.maxChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.maxChunkSize;\n }\n\n // Validate parallel upload settings\n if (strategy === \"parallel\") {\n if (\n this.capabilities.maxConcurrentUploads &&\n parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n warnings.push(\n `Parallel uploads ${parallelUploads} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`,\n );\n parallelUploads = this.capabilities.maxConcurrentUploads;\n }\n\n // Check if file would exceed max parts limit\n if (this.capabilities.maxParts) {\n const estimatedParts = Math.ceil(options.fileSize / chunkSize);\n if (estimatedParts > this.capabilities.maxParts) {\n const minChunkForParts = Math.ceil(\n options.fileSize / this.capabilities.maxParts,\n );\n warnings.push(\n `Estimated parts ${estimatedParts} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`,\n );\n chunkSize = Math.max(chunkSize, minChunkForParts);\n }\n }\n }\n\n // Final validation - ensure strategy is still valid after adjustments\n if (!this.validateUploadStrategy(strategy)) {\n warnings.push(\n `Final strategy validation failed, falling back to single upload`,\n );\n strategy = \"single\";\n parallelUploads = 1;\n }\n\n // Add capability information to reasoning\n reasoning.push(\n `Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`,\n );\n\n return {\n strategy,\n chunkSize,\n parallelUploads: strategy === \"parallel\" ? parallelUploads : 1,\n reasoning,\n warnings,\n };\n }\n\n /**\n * Gets the data store capabilities used by this negotiator.\n *\n * @returns The data store capabilities and constraints\n */\n getDataStoreCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n /**\n * Validates upload strategy configuration against data store capabilities.\n *\n * This method checks if the provided configuration is valid for the current\n * data store capabilities without performing the actual negotiation. It's\n * useful for pre-validation before attempting to negotiate a strategy.\n *\n * @param options - Upload strategy options to validate\n * @returns Validation result with validity flag and error messages\n *\n * @example\n * ```typescript\n * const validation = negotiator.validateConfiguration({\n * fileSize: 10_000_000,\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 1_000_000,\n * parallelUploads: 5\n * });\n *\n * if (!validation.valid) {\n * console.log(\"Configuration errors:\", validation.errors);\n * // Handle validation errors\n * }\n * ```\n */\n validateConfiguration(options: UploadStrategyOptions): {\n valid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n\n if (\n options.preferredStrategy &&\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n errors.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store`,\n );\n }\n\n if (options.preferredChunkSize) {\n if (\n this.capabilities.minChunkSize &&\n options.preferredChunkSize < this.capabilities.minChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`,\n );\n }\n if (\n this.capabilities.maxChunkSize &&\n options.preferredChunkSize > this.capabilities.maxChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`,\n );\n }\n }\n\n if (\n options.parallelUploads &&\n this.capabilities.maxConcurrentUploads &&\n options.parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n errors.push(\n `Parallel uploads ${options.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`,\n );\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n }\n}\n"],"mappings":"8VAGA,SAAS,EACP,EACA,EACA,EAAS,EACA,CAET,OADI,EAAO,OAAS,EAAS,EAAQ,OAAe,GAC7C,EAAQ,OAAO,EAAM,IAAM,EAAO,EAAS,KAAO,EAAK,CAMhE,SAAS,EAAY,EAAoB,EAAa,EAAS,EAAY,CACzE,GAAI,EAAO,OAAS,EAAS,EAAI,OAAQ,MAAO,GAChD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAI,OAAQ,IAC9B,GAAI,EAAO,EAAS,KAAO,EAAI,WAAW,EAAE,CAAE,MAAO,GAEvD,MAAO,GAWT,MAAa,GACX,EACA,IACW,CACX,GAAI,EAAO,SAAW,EACpB,MAAO,2BAMT,GAAI,EAAW,EAAQ,CAAC,IAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAK,CAAC,CACtE,MAAO,YAIT,GAAI,EAAW,EAAQ,CAAC,IAAM,IAAM,IAAK,CAAC,CACxC,MAAO,aAIT,GAAI,EAAY,EAAQ,SAAS,EAAI,EAAY,EAAQ,SAAS,CAChE,MAAO,YAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAK,CAAE,EAAE,EACzC,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAAI,EAAY,EAAQ,OAAQ,EAAE,EAEjE,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAK,CAAC,CAClC,MAAO,YAST,GALI,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,EAAK,CAAC,EAK5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,GAAK,CAAC,CAC9C,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAK,CAAC,CAC9C,MAAO,eAIT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CACD,GACE,EAAK,SAAS,OAAO,EACpB,EAAK,SAAS,QAAQ,EAAI,EAAK,SAAS,OAAO,CAEhD,MAAO,gBAOX,GAAI,EAAO,QAAU,IAAM,EAAY,EAAQ,OAAQ,EAAE,CAAE,CACzD,IAAM,EAAU,IAAI,aAAa,CAAC,OAAO,EAAO,MAAM,EAAG,GAAG,CAAC,CAC7D,GACE,EAAQ,WAAW,MAAM,EACzB,EAAQ,WAAW,KAAK,EACxB,EAAQ,WAAW,OAAO,CAE1B,MAAO,YAKX,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,CAC9C,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,kBAIT,GACE,EAAO,QAAU,IAChB,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,EAAI,EAAO,QAAU,KACtD,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,IAAI,CACrB,CACQ,SAAS,WAAW,CAC3B,MAAO,mBAOX,GACE,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAY,EAAQ,MAAM,CAE1B,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,MAAO,EAAE,CAE7B,MAAO,YAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,kBAMT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,CAC5C,CAGA,GAAI,EAAO,QAAU,KAAM,CACzB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OAAO,EAAO,CACtE,GAAI,EAAK,SAAS,QAAQ,CACxB,MAAO,0EACT,GAAI,EAAK,SAAS,MAAM,CACtB,MAAO,oEACT,GAAI,EAAK,SAAS,OAAO,CACvB,MAAO,4EAEX,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,GAAM,GAAM,EAAK,CAAC,CAC1D,MAAO,+BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAM,IAAM,IAAM,GAAM,GAAK,CAAC,CAC1D,MAAO,8BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAK,CAAC,CAClC,MAAO,mBAIT,GAAI,EAAO,QAAU,KAAO,EAAY,EAAQ,QAAS,IAAI,CAC3D,MAAO,oBAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAM,EAAK,CAAC,CACpD,MAAO,WAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,WAMT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAY,EAAO,GACzB,GAAI,IAAc,KAAQ,IAAc,GAEtC,GAAI,CACF,IAAM,EAAO,IAAI,YAAY,QAAQ,CAAC,OACpC,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CAED,OADA,KAAK,MAAM,EAAK,MAAM,CAAC,CAChB,wBACD,GAOZ,GAAI,EAEF,OADY,EAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,CACpD,CAEE,IAAK,MACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,OACH,MAAO,aACT,IAAK,OACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACL,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,eACT,IAAK,MACH,MAAO,gBAGT,IAAK,MACL,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,mBAGT,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YAGT,IAAK,MACH,MAAO,kBACT,IAAK,OACH,MAAO,0EACT,IAAK,OACH,MAAO,oEACT,IAAK,OACH,MAAO,4EAGT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,+BACT,IAAK,KACH,MAAO,8BACT,IAAK,KACL,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,oBAGT,IAAK,OACH,MAAO,YACT,IAAK,QACH,MAAO,aACT,IAAK,MACH,MAAO,WACT,IAAK,MACH,MAAO,WAGT,IAAK,MACH,MAAO,aACT,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,kBACT,IAAK,OACL,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,WACT,IAAK,KACH,MAAO,yBACT,IAAK,MACH,MAAO,WAET,QACE,MAAO,2BAIb,MAAO,4BAgBT,SAAgB,EAAiB,EAAkB,EAA2B,CAW5E,OATI,IAAa,EACR,GAIa,EAAS,MAAM,IAAI,CAAC,KACpB,EAAS,MAAM,IAAI,CAAC,GCvZ5C,MAAMA,EAEF,EAAO,IAAI,WAAa,CAC1B,IAAM,EAAa,MAAO,EAAO,YAAY,KAAK,EAAO,OAAO,CAChE,OAAO,EAAO,MAAM,EAAY,CAC9B,WAAc,IAAA,GACd,OAAS,IAAU,CACjB,QAAS,EAAK,QACd,OAAQ,EAAK,OACb,WAAY,EAAK,QAAU,EAAI,EAChC,EACF,CAAC,EACF,CA4DW,GACX,EACA,EACA,CACE,mBACA,UACA,eACA,gBAQF,EAAO,IAAI,WAAa,CAItB,IAAM,EAAe,MAAO,EACtB,EAAe,IAAI,MAAM,CAAC,aAAa,CA0E7C,OAvEoB,MAAO,EAAO,IAAI,WAAa,CAEjD,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAU,UACV,EACD,CAEK,EAAK,MAAO,EAAW,YAAY,CACnC,CAAE,OAAM,OAAM,WAAU,eAAc,WAAU,QAAS,EAE3DC,EAAyC,EAAE,CAC/C,GAAI,EACF,GAAI,CACF,EAAiB,KAAK,MAAM,EAAS,MAC/B,CACN,EAAiB,EAAE,CAIvB,IAAMC,EAAyC,CAC7C,GAAG,EACH,OACA,SAAU,GAAY,GACvB,CACG,IACF,EAAe,aAAe,EAAa,UAAU,EAGvD,IAAMC,EAAmB,CACvB,KACA,OACA,SAAU,EACV,OAAQ,EACR,eACA,QAAS,CACP,GAAI,EAAU,UACd,OACA,KAAM,GACN,OAAQ,EAAU,OACnB,CACD,OACA,eACD,CAGK,EAAU,MAAO,EAAU,OAAO,EAAK,CAY7C,OATA,MAAO,EAAQ,IAAI,EAAI,EAAQ,CAG/B,MAAO,EAAa,KAAK,EAAI,CAC3B,KAAM,EAAgB,eACtB,KAAM,EACN,KAAM,EAAQ,KACf,CAAC,CAEK,GACP,CAAC,KAED,EAAO,SAAS,gBAAiB,CAC/B,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CACH,EAGD,CAAC,KAGD,EAAO,SAAS,SAAU,CACxB,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAStB,GAPA,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGG,EAAK,KAAM,CACb,IAAM,EAAoB,EAAO,UAC/B,yBACA,EAAiB,YAAY,CAC3B,MAAO,KACP,OAAQ,EACR,MAAO,GACR,CAAC,CACH,CACD,MAAO,EAAO,OAAO,EAAmB,EAAK,KAAK,CAIpD,IAAM,EAAqB,EAAO,MAAM,iBAAiB,CACzD,MAAO,EAAO,UAAU,EAAmB,EAC3C,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAChC,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,SAAS,yBAAyB,CAAC,KAC/C,EAAO,aAAa,CAClB,mBAAoB,EAAU,UAAY,UAC1C,oBAAqB,EAAU,UAC/B,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CAGD,MAAO,EAAO,UACZ,EAAO,QAAQ,sBAAuB,CACpC,YAAa,sCACd,CAAC,CACH,EACD,CACH,CACF,CChOH,SAAgB,EAAmB,EAAyB,CAC1D,OAAO,EAAO,uBACN,EACL,GACC,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACpB,CAAC,CACL,CCgCH,SAAgB,EAAa,CAC3B,OACA,SACA,YACA,cACA,aACA,eACA,yBAAyB,KACH,CACtB,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAS,EAAgB,EAAK,CAEpC,GAAI,EAAW,OAAO,QACpB,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAIhE,IAAM,EAAkB,IAAI,gBACtB,CAAE,UAAW,EAGb,MAAgB,CAEpB,EAAgB,OAAO,EAKzB,OAFA,EAAW,OAAO,iBAAiB,QAAS,EAAS,CAAE,KAAM,GAAM,CAAC,CAE7D,MAAO,EAAO,kBACnB,EAAO,UAAY,CAAE,SAAQ,UAAS,EAAE,EACvC,CAAE,OAAQ,KACT,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAe,MAAO,EAAI,KAAK,EAAE,CAQjC,EALU,EAAoB,MAAM,CACxC,QAAS,EACV,CAAC,CAG4B,EAAO,CAyCrC,OAtCe,MAAO,EAAU,MAC9B,CACE,OAAQ,EACR,QAAS,EAAO,GAChB,OAAQ,EAAO,OAChB,CACD,CACE,WAAa,GAAsB,CAEjC,IAAM,EAAM,KAAK,KAAK,CACtB,EAAI,IAAI,EAAa,CAClB,KACC,EAAO,QAAS,GACV,EAAM,GAAY,EACb,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAI,IAAI,EAAc,EAAI,CACjC,MAAO,EAAa,KAAK,EAAO,GAAI,CAClC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAO,GACX,SAAU,EACV,MAAO,EAAO,MAAQ,EACvB,CACD,KAAM,EAAO,KACd,CAAC,EACF,CAEG,EAAO,KACd,CACF,EAAO,WACR,CACA,UAAY,GAEX,EAEP,CACF,EAGD,CAAC,KACD,EAAO,SAAU,GACX,aAAiB,OAAS,EAAM,OAAS,aACpC,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAErD,aAAiB,EACZ,EAAO,KAAK,EAAM,CAEpB,EAAO,KACZ,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAC/D,CACD,CACH,EACF,CAAE,QAAA,KACD,EAAO,SAAW,CAChB,EAAW,OAAO,oBAAoB,QAASC,EAAQ,EACvD,CACL,EACD,CAAC,KAED,EAAO,SAAS,wBAAyB,CACvC,WAAY,CACV,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,uBAAwB,EAAY,UAAU,CAC9C,mBAAoB,EAAO,MAAM,UAAU,EAAI,IAChD,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,wBAAwB,CAAC,KACvC,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,eAAgB,EAAO,UAAU,CACjC,uBAAwB,EAAS,EAAO,QAAQ,UAAU,CAC3D,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,2BAA2B,CAAC,KAC1C,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,MAAO,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CACrE,CAAC,CACH,CACF,CACF,CC7LH,SAAS,EAAmB,EAAsC,CAChE,OAAO,EAAO,aAAa,CACzB,QAAS,EAAa,QACtB,OAAQ,EAAa,OACrB,QAAS,EAAa,aAAe,EACtC,CAAC,CAcJ,MAAM,GACJ,EACA,IACwB,CACxB,IAAM,EAAe,IAAI,KAAK,EAAK,aAAuB,CAAC,SAAS,CAC9D,EAAkB,KAAK,KAAK,CAAG,EAErC,OAAO,EAAO,KAAK,KACjB,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,cAAe,EAAK,MAAQ,EAC5B,2BAA4B,EAC5B,oBAAqB,EAAK,QAAQ,GAClC,mBAAoB,EAAK,UAAU,UAAY,UAC/C,uBAAwB,EAAK,aAC7B,yBAA0B,IAAI,MAAM,CAAC,aAAa,CACnD,CACD,OAAQ,EACT,CAAC,CACH,EAoDU,GACX,EACA,EACA,EACA,CACE,mBACA,UACA,kBAOF,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAO,MAAO,EAAQ,IAAI,EAAS,CAInC,EAAa,EAAK,aACpB,EAAmB,EAAK,aAAa,CACrC,IAAA,GAsEJ,OAAO,MAnEc,EAAO,IAAI,WAAa,CAE3C,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,QAAQ,GACb,EACD,CAiDD,MAhCA,GAAK,OAVa,MAAO,EAAa,CACpC,YACA,KAAM,EACN,OAAQ,EACR,YAAa,IACb,WAPiB,IAAI,gBAQrB,uBAAwB,IACxB,eACD,CAAC,CAKF,MAAO,EAAQ,IAAI,EAAU,EAAK,CAGlC,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAK,GACT,SAAU,EAAK,OACf,MAAO,EAAK,MAAQ,EACrB,CACD,KAAM,EAAK,KACZ,CAAC,CAGE,EAAK,MAAQ,EAAK,SAAW,EAAK,OACpC,MAAO,EAAe,CACpB,OACA,YACA,eACD,CAAC,CAIE,EAAK,eAEP,MAAO,EAA+B,EADX,EAAmB,EAAK,aAAa,CACD,GAI5D,GACP,CAAC,KAED,EAAO,SAAS,eAAgB,CAC9B,WAAY,CACV,YAAa,EACb,kBAAmB,EACnB,2BAA4B,EAAK,aAAe,OAAS,QAC1D,CACD,OAAQ,EACT,CAAC,CACH,EAGD,CAAC,KAED,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGD,IAAM,EAAY,EAAK,OACjB,EAAqB,EAAO,UAChC,mBACA,EAAiB,OAAO,CACtB,MAAO,OACP,MAAO,OACP,MAAO,GACR,CAAC,CACH,CAID,GAHA,MAAO,EAAO,OAAO,EAAoB,EAAU,CAG/C,EAAK,MAAQ,EAAK,KAAO,EAAG,CAC9B,IAAM,EAAa,EACb,EAAkB,EAAO,MAC7B,qCACD,CACD,MAAO,EAAO,IAAI,EAAiB,EAAW,GAEhD,CACH,CAED,EAAO,IAAK,GACV,EAAO,SAAS,iBAAiB,CAAC,KAChC,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,aAAc,EAAK,OAAO,UAAU,CACpC,iBACE,EAAK,MAAQ,EAAK,KAAO,GACnB,EAAK,OAAS,EAAK,KAAQ,KAAK,QAAQ,EAAE,CAC5C,IACN,oBAAqB,EAAK,MAAM,UAAU,EAAI,IAC/C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,sBAAsB,CAAC,KACrC,EAAO,aAAa,CAClB,YAAa,EACb,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CAoCG,GAAkB,CACtB,OACA,YACA,kBAMA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAe,EAAU,iBAAiB,CAGhD,GACE,EAAa,mBACb,EAAK,MACL,EAAK,KAAO,EAAa,kBACzB,CACA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,QAAS,cAAc,EAAK,KAAK,uCAAuC,EAAa,kBAAkB,8BACxG,CACD,KAAM,EAAK,KACZ,CAAC,CACF,OAIF,IAAM,EAAY,MAAO,EAAU,KAAK,EAAK,GAAG,CAGhD,GAAI,EAAK,UAAY,EAAK,kBAAmB,CAC3C,IAAM,EAAmB,MAAO,EAC9B,EACA,EAAK,kBACN,CAED,GAAI,IAAqB,EAAK,SAiB5B,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EAAK,SACf,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,yCAAyC,EAAK,SAAS,SAAS,IACtE,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EAAK,SACf,OAAQ,EACR,UAAW,EAAK,kBACjB,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WAChB,UAAW,EAAK,kBACjB,CACD,KAAM,EAAK,KACZ,CAAC,CAIJ,GAAI,EAAa,2BAA4B,CAC3C,IAAM,EAAmB,EAAe,EAAU,CAC5C,EAAmB,EAAK,UAAU,KAExC,GACE,GACA,CAAC,EAAiB,EAAkB,EAAiB,CAkBrD,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EACV,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,0CAA0C,EAAiB,cAAc,IAC/E,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EACV,OAAQ,EACT,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WACjB,CACD,KAAM,EAAK,KACZ,CAAC,GAEJ,CAAC,KACD,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,+BAAgC,EAAK,SAAW,OAAS,QACzD,2BAA4B,EAAU,iBAAiB,CACpD,2BACC,OACA,QACL,CACF,CAAC,CACH,CC3YU,EAAa,GACjB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,MAAM,EAAI,CAEzB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,aAAc,EACd,mBAAoB,QACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,wBAAyB,CACtC,YAAa,oCACd,CAAC,CACH,CAGG,EAAS,KACX,MAAO,EAAO,UACZ,EAAO,QAAQ,gCAAiC,CAC9C,YAAa,+CACd,CAAC,CACH,GAEH,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,sBAAsB,CAAC,KACpC,EAAO,aAAa,CAClB,aAAc,EACd,kBAAmB,EAAS,OAAO,UAAU,CAC7C,cAAe,EAAS,GAAG,UAAU,CACrC,0BACE,EAAS,QAAQ,IAAI,iBAAiB,EAAI,UAC7C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,+BAAgC,CAC7C,YAAa,2CACd,CAAC,CACH,CAGD,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,aAAc,EACd,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAiCU,EAAe,GACnB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,EAAS,aAAa,CAErC,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,2BAA4B,CAC1C,WAAY,CACV,mBAAoB,cACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,qCAAqC,CAAC,KACpD,EAAO,aAAa,CAClB,cAAe,EAAO,WAAW,UAAU,CAC5C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,6CAA6C,CAAC,KAC5D,EAAO,aAAa,CAClB,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CCwFH,IAAa,EAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GAgCJ,SAAgB,GAAqB,CACnC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAU,MAAO,EACjB,EAAe,MAAO,EACtB,EAAa,MAAO,EACpB,EAAmB,MAAO,EAEhC,MAAO,CACL,QACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAOtB,OAAO,MAAO,GANM,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EACoC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,eACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAS,MAAO,EADL,MAAO,EAAU,EAAI,CACK,CAGrC,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,IAAI,WAAW,EAAO,CAAC,CAC1C,EAAW,OAAO,EAErB,CAAC,CAYF,OAAO,MAAO,GAVM,MAAO,EACzB,CAAE,GAAG,EAAW,KAAM,EAAO,WAAY,CACzC,EACA,CACE,mBACA,UACA,eACA,aACD,CACF,EACqC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,cAAe,EAAsB,IACnC,EAAO,IAAI,WAAa,CAOtB,OANoB,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EAEF,CACJ,aACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAMtB,OALa,MAAO,EAAY,EAAU,EAAU,EAAO,CACzD,mBACA,UACA,eACD,CAAC,EAEF,CACJ,UAAY,GACV,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAQ,IAAI,EAAS,EAEzC,CACJ,MAAO,EAAkB,IACvB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAAO,OAJW,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACuB,KAAK,EAAS,EACtC,CACJ,YACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CACrC,EAAY,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,CAID,GADqB,EAAU,iBAAiB,CAC/B,uBAAyB,EAAU,WAGlD,OADA,MAAO,EAAO,SAAS,iCAAiC,IAAW,CAC5D,MAAO,EAAU,WAAW,EAAU,EAAO,CAItD,MAAO,EAAO,SACZ,0CAA0C,EAAS,4BACpD,CACD,IAAM,EAAQ,MAAO,EAAU,KAAK,EAAS,CAG7C,OAAO,EAAO,QAAQ,EAAM,EAC5B,CACJ,cACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,UACL,EACD,CAGK,EAAe,EAAU,iBAAiB,CAG1C,EAAW,MAAO,EAAW,YAAY,CAE/C,GAAI,EAAa,wBAA0B,EAAU,YAAa,CAGhE,MAAO,EAAO,SACZ,kCAAkC,IACnC,CAGD,IAAM,EACJ,OAAO,EAAK,UAAa,SACrB,KAAK,MAAM,EAAK,SAAS,CACzB,EAAK,UAAY,EAAE,CAGnB,EAAiB,OAAO,YAC5B,OAAO,QAAQ,EAAS,CAAC,KAAK,CAAC,EAAG,KAAO,CAAC,EAAG,OAAO,EAAE,CAAC,CAAC,CACzD,CAGKC,EAA4B,CAChC,GAAI,EACJ,OAAQ,EACR,KAAM,EAAK,MAAQ,EACnB,QAAS,CACP,GAAI,EAAK,UACT,KAAM,EAAU,iBAAiB,CAAC,uBAC9B,YACA,UACL,CACD,WACA,aAAc,IAAI,MAAM,CAAC,aAAa,CACvC,CACD,MAAO,EAAQ,IAAI,EAAU,EAAc,CAG3C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,eACtB,KAAM,EACP,CAAC,CAEF,IAAM,EAAS,MAAO,EAAU,YAAY,EAAU,CACpD,SACA,YAAa,EAAK,KAClB,SAAU,EAAK,SACf,SAAU,EACX,CAAC,CAGIC,EAA8B,CAClC,GAAG,EACH,KAAM,EAAO,KACb,OAAQ,EAAO,KACf,QAAS,CACP,GAAG,EAAc,QACjB,KAAM,EAAO,KACd,CACD,GAAI,EAAO,KAAO,CAAE,IAAK,EAAO,IAAK,CACtC,CAUD,OARA,MAAO,EAAQ,IAAI,EAAU,EAAgB,CAG7C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,gBACtB,KAAM,EACP,CAAC,CAEK,EAIT,MAAO,EAAO,WACZ,4CAA4C,EAAS,kCACtD,CAGD,IAAMC,EAAuB,EAAE,CAC/B,MAAO,EAAO,WAAW,EAAS,GAChC,EAAO,SAAW,CAChB,EAAO,KAAK,EAAM,EAClB,CACH,CAGD,IAAM,EAAY,EAAO,QACtB,EAAK,IAAU,EAAM,EAAM,OAC5B,EACD,CAGK,EAAS,IAAI,WAAW,EAAU,CACpC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAIlB,IAAM,EAAiB,IAAI,eAAe,CACxC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAO,CAC1B,EAAW,OAAO,EAErB,CAAC,CAgBF,OARmB,MAAO,EALG,CAC3B,GAAG,EACH,KAAM,EACP,CAEiD,EAAU,CAC1D,mBACA,UACA,eACA,WAAY,CAAE,eAAkB,EAAO,QAAQ,EAAS,CAAE,CAC3D,CAAC,CAGK,MAAO,EAAY,EAAU,EAAU,EAAgB,CAC5D,mBACA,UACA,eACD,CAAC,EACF,CACJ,QAAS,EAAkB,IACzB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAJkB,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACgB,OAAO,EAAS,CACjC,MAAO,EAAQ,OAAO,EAAS,EAE/B,CACJ,iBAAkB,EAAmB,IACnC,EAAO,IAAI,WAAa,CAKtB,OAJkB,MAAO,EAAiB,aACxC,EACA,EACD,EACgB,iBAAiB,EAClC,CACJ,yBACE,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAU,EAAW,EACnD,CACJ,4BAA8B,GAC5B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAS,EACzC,CACL,EACD,CAiCJ,MAAa,EAAe,EAAM,OAAO,EAAc,GAAoB,CAAC,CC5iB5E,IAAa,EAAb,KAAsC,CAOpC,YACE,EACA,EACA,CAFQ,KAAA,aAAA,EACA,KAAA,uBAAA,EAoCV,kBAAkB,EAAoD,CACpE,IAAMG,EAAsB,EAAE,CACxBC,EAAqB,EAAE,CAEzBC,EAA2B,SAC3B,EACF,EAAQ,oBACR,KAAK,aAAa,kBAClB,KAAO,KACL,EAAkB,EAAQ,iBAAmB,EA2DjD,GAxDI,EAAQ,oBACL,KAAK,uBAAuB,EAAQ,kBAAkB,EAKzD,EAAW,EAAQ,kBACnB,EAAU,KAAK,6BAA6B,IAAW,EALvD,EAAS,KACP,uBAAuB,EAAQ,kBAAkB,6CAClD,GASH,CAAC,EAAQ,mBACT,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,IAGrD,KAAK,aAAa,yBAClB,EAAQ,UAAY,EAAQ,yBAA2B,GAAK,KAAO,OAEnE,EAAW,WACX,EAAU,KACR,4CAA4C,EAAQ,SAAS,SAC9D,GAED,EAAW,SACX,EAAU,KACR,KAAK,aAAa,wBACd,0CAA0C,EAAQ,SAAS,SAC3D,gEACL,GAMH,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI9B,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI5B,IAAa,aAEb,KAAK,aAAa,sBAClB,EAAkB,KAAK,aAAa,uBAEpC,EAAS,KACP,oBAAoB,EAAgB,mBAAmB,KAAK,aAAa,qBAAqB,aAC/F,CACD,EAAkB,KAAK,aAAa,sBAIlC,KAAK,aAAa,UAAU,CAC9B,IAAM,EAAiB,KAAK,KAAK,EAAQ,SAAW,EAAU,CAC9D,GAAI,EAAiB,KAAK,aAAa,SAAU,CAC/C,IAAM,EAAmB,KAAK,KAC5B,EAAQ,SAAW,KAAK,aAAa,SACtC,CACD,EAAS,KACP,mBAAmB,EAAe,mBAAmB,KAAK,aAAa,SAAS,yBACjF,CACD,EAAY,KAAK,IAAI,EAAW,EAAiB,EAmBvD,OAbK,KAAK,uBAAuB,EAAS,GACxC,EAAS,KACP,kEACD,CACD,EAAW,SACX,EAAkB,GAIpB,EAAU,KACR,qCAAqC,KAAK,aAAa,wBAAwB,kBAAkB,KAAK,aAAa,sBAAsB,cAAc,KAAK,aAAa,2BAC1K,CAEM,CACL,WACA,YACA,gBAAiB,IAAa,WAAa,EAAkB,EAC7D,YACA,WACD,CAQH,0BAAkD,CAChD,OAAO,KAAK,aA4Bd,sBAAsB,EAGpB,CACA,IAAMC,EAAmB,EAAE,CAwC3B,OArCE,EAAQ,mBACR,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,EAEvD,EAAO,KACL,uBAAuB,EAAQ,kBAAkB,+BAClD,CAGC,EAAQ,qBAER,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,CAGD,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,EAKH,EAAQ,iBACR,KAAK,aAAa,sBAClB,EAAQ,gBAAkB,KAAK,aAAa,sBAE5C,EAAO,KACL,oBAAoB,EAAQ,gBAAgB,8BAA8B,KAAK,aAAa,uBAC7F,CAGI,CACL,MAAO,EAAO,SAAW,EACzB,SACD"}
|
package/dist/upload-DWBlRXHh.cjs
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
const e=require(`./types-Cws60JHC.cjs`),t=require(`./uploadista-error-BgQU45we.cjs`),n=require(`./checksum-DVPe3Db4.cjs`),r=require(`./stream-limiter-BvkaZXcz.cjs`);let i=require(`effect`);function a(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function o(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const s=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(a(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(a(e,[255,216,255]))return`image/jpeg`;if(o(e,`GIF87a`)||o(e,`GIF89a`))return`image/gif`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&a(e,[0,0,0],0)&&o(e,`ftyp`,4)&&(o(e,`avif`,8)||o(e,`avis`,8)))return`image/avif`;if(e.length>=12&&o(e,`ftyp`,4)&&(o(e,`heic`,8)||o(e,`heif`,8)||o(e,`mif1`,8)))return`image/heic`;if(a(e,[66,77]))return`image/bmp`;if(a(e,[73,73,42,0])||a(e,[77,77,0,42]))return`image/tiff`;if(a(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&o(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(a(e,[26,69,223,163]))return`video/webm`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(o(e,`moov`,4)||o(e,`mdat`,4)||o(e,`free`,4)))return`video/quicktime`;if(a(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(a(e,[255,251])||a(e,[255,243])||a(e,[255,242])||o(e,`ID3`))return`audio/mpeg`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WAVE`,8))return`audio/wav`;if(o(e,`fLaC`))return`audio/flac`;if(o(e,`OggS`))return`audio/ogg`;if(e.length>=12&&o(e,`ftyp`,4)&&o(e,`M4A`,8))return`audio/mp4`;if(o(e,`%PDF`))return`application/pdf`;if(a(e,[80,75,3,4])||a(e,[80,75,5,6])||a(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(a(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(a(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(a(e,[31,139]))return`application/gzip`;if(e.length>=262&&o(e,`ustar`,257))return`application/x-tar`;if(o(e,`wOFF`))return`font/woff`;if(o(e,`wOF2`))return`font/woff2`;if(a(e,[0,1,0,0,0]))return`font/ttf`;if(o(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function c(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const l=i.Effect.gen(function*(){let e=yield*i.Effect.currentSpan.pipe(i.Effect.option);return i.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),u=(t,n,{dataStoreService:r,kvStore:a,eventEmitter:o,generateId:s})=>i.Effect.gen(function*(){let c=yield*l,u=new Date().toISOString();return yield*i.Effect.gen(function*(){let i=yield*r.getDataStore(t.storageId,n),l=yield*s.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=t,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:l,size:d,metadata:v,offset:0,creationDate:u,storage:{id:t.storageId,type:f,path:``,bucket:i.bucket},flow:g,traceContext:c},b=yield*i.create(y);return yield*a.set(l,b),yield*o.emit(l,{type:e.n.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(i.Effect.withSpan(`upload-create`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}))}).pipe(i.Effect.withSpan(`upload`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}),i.Effect.tap(e=>i.Effect.gen(function*(){if(yield*i.Metric.increment(i.Metric.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=i.Metric.histogram(`upload_file_size_bytes`,i.MetricBoundaries.exponential({start:1024,factor:2,count:25}));yield*i.Metric.update(t,e.size)}let t=i.Metric.gauge(`active_uploads`);yield*i.Metric.increment(t)})),i.Effect.tap(e=>i.Effect.logInfo(`Upload created`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId}))),i.Effect.tapError(e=>i.Effect.gen(function*(){yield*i.Effect.logError(`Upload creation failed`).pipe(i.Effect.annotateLogs({"upload.file_name":t.fileName??`unknown`,"upload.storage_id":t.storageId,error:String(e)})),yield*i.Metric.increment(i.Metric.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function d(e){return i.Stream.fromReadableStream(()=>e,e=>new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(e)}))}function f({data:n,upload:a,dataStore:o,maxFileSize:s,controller:c,eventEmitter:l,uploadProgressInterval:u=200}){return i.Effect.gen(function*(){let f=d(n);if(c.signal.aborted)return yield*i.Effect.fail(t.n.fromCode(`ABORTED`));let p=new AbortController,{signal:m}=p,h=()=>{p.abort()};return c.signal.addEventListener(`abort`,h,{once:!0}),yield*i.Effect.acquireUseRelease(i.Effect.sync(()=>({signal:m,onAbort:h})),({signal:n})=>i.Effect.gen(function*(){let t=yield*i.Ref.make(0),n=r.t.limit({maxSize:s})(f);return yield*o.write({stream:n,file_id:a.id,offset:a.offset},{onProgress:n=>{let r=Date.now();i.Ref.get(t).pipe(i.Effect.flatMap(o=>r-o>=u?i.Effect.gen(function*(){yield*i.Ref.set(t,r),yield*l.emit(a.id,{type:e.n.UPLOAD_PROGRESS,data:{id:a.id,progress:n,total:a.size??0},flow:a.flow})}):i.Effect.void),i.Effect.runPromise).catch(()=>{})}})}).pipe(i.Effect.catchAll(e=>e instanceof Error&&e.name===`AbortError`?i.Effect.fail(t.n.fromCode(`ABORTED`)):e instanceof t.n?i.Effect.fail(e):i.Effect.fail(t.n.fromCode(`FILE_WRITE_ERROR`,{cause:e})))),({onAbort:e})=>i.Effect.sync(()=>{c.signal.removeEventListener(`abort`,e)}))}).pipe(i.Effect.withSpan(`upload-write-to-store`,{attributes:{"upload.id":a.id,"upload.offset":a.offset.toString(),"upload.max_file_size":s.toString(),"upload.file_size":a.size?.toString()??`0`}}),i.Effect.tap(e=>i.Effect.logDebug(`Data written to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"write.offset":e.toString(),"write.bytes_written":(e-a.offset).toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to write to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"upload.offset":a.offset.toString(),error:e instanceof t.n?e.code:String(e)}))))}function p(e){return i.Tracer.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const m=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return i.Effect.void.pipe(i.Effect.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},h=(t,n,r,{dataStoreService:a,kvStore:o,eventEmitter:s})=>i.Effect.gen(function*(){let c=yield*o.get(t),l=c.traceContext?p(c.traceContext):void 0;return yield*i.Effect.gen(function*(){let i=yield*a.getDataStore(c.storage.id,n);return c.offset=yield*f({dataStore:i,data:r,upload:c,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:s}),yield*o.set(t,c),yield*s.emit(c.id,{type:e.n.UPLOAD_PROGRESS,data:{id:c.id,progress:c.offset,total:c.size??0},flow:c.flow}),c.size&&c.offset===c.size&&(yield*g({file:c,dataStore:i,eventEmitter:s}),c.traceContext&&(yield*m(c,p(c.traceContext)))),c}).pipe(i.Effect.withSpan(`upload-chunk`,{attributes:{"upload.id":t,"chunk.upload_id":t,"upload.has_trace_context":c.traceContext?`true`:`false`},parent:l}))}).pipe(i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=i.Metric.histogram(`chunk_size_bytes`,i.MetricBoundaries.linear({start:262144,width:262144,count:20}));if(yield*i.Metric.update(n,t),e.size&&e.size>0){let e=t,n=i.Metric.gauge(`upload_throughput_bytes_per_second`);yield*i.Metric.set(n,e)}})),i.Effect.tap(e=>i.Effect.logDebug(`Chunk uploaded`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),i.Effect.tapError(e=>i.Effect.logError(`Chunk upload failed`).pipe(i.Effect.annotateLogs({"upload.id":t,error:String(e)})))),g=({file:r,dataStore:a,eventEmitter:o})=>i.Effect.gen(function*(){let i=a.getCapabilities();if(i.maxValidationSize&&r.size&&r.size>i.maxValidationSize){yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_WARNING,data:{id:r.id,message:`File size (${r.size} bytes) exceeds max validation size (${i.maxValidationSize} bytes). Validation skipped.`},flow:r.flow});return}let l=yield*a.read(r.id);if(r.checksum&&r.checksumAlgorithm){let i=yield*n.t(l,r.checksumAlgorithm);if(i!==r.checksum)return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`checksum_mismatch`,expected:r.checksum,actual:i},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${r.checksum}, Got: ${i}`,details:{uploadId:r.id,expected:r.checksum,actual:i,algorithm:r.checksumAlgorithm}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`checksum`,algorithm:r.checksumAlgorithm},flow:r.flow})}if(i.requiresMimeTypeValidation){let n=s(l),i=r.metadata?.type;if(i&&!c(i,n))return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`mimetype_mismatch`,expected:i,actual:n},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${i}, Detected: ${n}`,details:{uploadId:r.id,expected:i,actual:n}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`mimetype`},flow:r.flow})}}).pipe(i.Effect.withSpan(`validate-upload`,{attributes:{"upload.id":r.id,"validation.checksum_provided":r.checksum?`true`:`false`,"validation.mime_required":a.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),_=e=>i.Effect.tryPromise({try:async()=>await fetch(e),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-fetch-url`,{attributes:{"upload.url":e,"upload.operation":`fetch`}}),i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*i.Metric.increment(i.Metric.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),i.Effect.tap(t=>i.Effect.logInfo(`URL fetch completed`).pipe(i.Effect.annotateLogs({"upload.url":e,"response.status":t.status.toString(),"response.ok":t.ok.toString(),"response.content_length":t.headers.get(`content-length`)??`unknown`}))),i.Effect.tapError(t=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*i.Effect.logError(`URL fetch failed`).pipe(i.Effect.annotateLogs({"upload.url":e,error:String(t)}))}))),v=e=>i.Effect.tryPromise({try:async()=>await e.arrayBuffer(),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),i.Effect.tap(e=>i.Effect.logDebug(`Response converted to array buffer`).pipe(i.Effect.annotateLogs({"buffer.size":e.byteLength.toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to convert response to array buffer`).pipe(i.Effect.annotateLogs({error:String(e)}))));var y=class extends i.Context.Tag(`UploadEngine`)(){};function b(){return i.Effect.gen(function*(){let t=yield*e.N,r=yield*e.p,a=yield*n.r,o=yield*e.S;return{upload:(e,n,s)=>i.Effect.gen(function*(){return yield*h((yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,s,{dataStoreService:o,kvStore:t,eventEmitter:r})}),uploadFromUrl:(e,n,s)=>i.Effect.gen(function*(){let i=yield*v(yield*_(s)),c=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*h((yield*u({...e,size:i.byteLength},n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,c,{dataStoreService:o,kvStore:t,eventEmitter:r})}),createUpload:(e,n)=>i.Effect.gen(function*(){return yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})}),uploadChunk:(e,n,a)=>i.Effect.gen(function*(){return yield*h(e,n,a,{dataStoreService:o,kvStore:t,eventEmitter:r})}),getUpload:e=>i.Effect.gen(function*(){return yield*t.get(e)}),read:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);return yield*(yield*o.getDataStore(r.storage.id,n)).read(e)}),readStream:(e,n,r)=>i.Effect.gen(function*(){let a=yield*t.get(e),s=yield*o.getDataStore(a.storage.id,n);if(s.getCapabilities().supportsStreamingRead&&s.readStream)return yield*i.Effect.logDebug(`Using streaming read for file ${e}`),yield*s.readStream(e,r);yield*i.Effect.logDebug(`Falling back to buffered read for file ${e} (streaming not supported)`);let c=yield*s.read(e);return i.Stream.succeed(c)}),uploadStream:(n,s,c)=>i.Effect.gen(function*(){let l=yield*o.getDataStore(n.storageId,s),d=l.getCapabilities(),f=yield*a.generateId();if(d.supportsStreamingWrite&&l.writeStream){yield*i.Effect.logDebug(`Using streaming write for file ${f}`);let a=typeof n.metadata==`string`?JSON.parse(n.metadata):n.metadata||{},o=Object.fromEntries(Object.entries(a).map(([e,t])=>[e,String(t)])),s={id:f,offset:0,size:n.size??0,storage:{id:n.storageId,type:l.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:a,creationDate:new Date().toISOString()};yield*t.set(f,s),yield*r.emit(f,{type:e.n.UPLOAD_STARTED,data:s});let u=yield*l.writeStream(f,{stream:c,contentType:n.type,sizeHint:n.sizeHint,metadata:o}),d={...s,size:u.size,offset:u.size,storage:{...s.storage,path:u.path},...u.url&&{url:u.url}};return yield*t.set(f,d),yield*r.emit(f,{type:e.n.UPLOAD_COMPLETE,data:d}),d}yield*i.Effect.logWarning(`Falling back to buffered upload for file ${f} (streaming write not supported)`);let p=[];yield*i.Stream.runForEach(c,e=>i.Effect.sync(()=>{p.push(e)}));let m=p.reduce((e,t)=>e+t.length,0),g=new Uint8Array(m),_=0;for(let e of p)g.set(e,_),_+=e.length;let v=new ReadableStream({start(e){e.enqueue(g),e.close()}});return yield*u({...n,size:m},s,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:{generateId:()=>i.Effect.succeed(f)}}),yield*h(f,s,v,{dataStoreService:o,kvStore:t,eventEmitter:r})}),delete:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);yield*(yield*o.getDataStore(r.storage.id,n)).remove(e),yield*t.delete(e)}),getCapabilities:(e,t)=>i.Effect.gen(function*(){return(yield*o.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>i.Effect.gen(function*(){yield*r.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>i.Effect.gen(function*(){yield*r.unsubscribe(e)})}})}const x=i.Layer.effect(y,b());var S=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return s}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return S}});
|