@uploadista/data-store-gcs 0.0.20-beta.9 → 0.1.0-beta.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs.map +1 -1
- package/package.json +9 -9
package/dist/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","names":["stringifyUploadKeys","getUpload","uploadRequestsTotal","activeUploadsGauge","fileSizeHistogram","uploadErrorsTotal","options: CreateWriteStreamOptions","withUploadMetrics","withTimingMetrics","uploadDurationHistogram","uploadSuccessTotal","gcsOptions: CreateWriteStreamOptions","bucket: Bucket","uploadUrl","createGCSStore","chunks: Uint8Array[]"],"sources":["../src/gcs-store.ts","../src/services/gcs-client.service.ts","../src/services/gcs-client-nodejs.service.ts","../src/services/gcs-client-rest.service.ts","../src/gcs-store-v2.ts"],"sourcesContent":["import { PassThrough, pipeline, Readable, Transform } from \"node:stream\";\nimport type { Bucket, CreateWriteStreamOptions } from \"@google-cloud/storage\";\nimport { Storage } from \"@google-cloud/storage\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n type DataStore,\n type DataStoreCapabilities,\n type DataStoreWriteOptions,\n DEFAULT_STREAMING_CONFIG,\n type KvStore,\n type StreamingConfig,\n type StreamWriteOptions,\n type StreamWriteResult,\n type UploadFile,\n UploadFileKVStore,\n type UploadStrategy,\n} from \"@uploadista/core/types\";\nimport {\n gcsActiveUploadsGauge as activeUploadsGauge,\n gcsFileSizeHistogram as fileSizeHistogram,\n logGCSUploadCompletion,\n trackGCSError,\n gcsUploadDurationHistogram as uploadDurationHistogram,\n gcsUploadErrorsTotal as uploadErrorsTotal,\n gcsUploadRequestsTotal as uploadRequestsTotal,\n gcsUploadSuccessTotal as uploadSuccessTotal,\n withGCSTimingMetrics as withTimingMetrics,\n withGCSUploadMetrics as withUploadMetrics,\n} from \"@uploadista/observability\";\nimport { Effect, Stream } from \"effect\";\n\nexport type GCSStoreOptions = {\n keyFilename?: string;\n credentials?: object;\n bucketName: string;\n kvStore: KvStore<UploadFile>;\n};\n\n/**\n * Convert the Upload object to a format that can be stored in GCS metadata.\n */\nfunction stringifyUploadKeys(upload: UploadFile) {\n return {\n size: upload.size ?? null,\n sizeIsDeferred: `${upload.sizeIsDeferred}`,\n offset: upload.offset,\n metadata: JSON.stringify(upload.metadata),\n storage: JSON.stringify(upload.storage),\n };\n}\n\nconst getUpload = (\n bucket: Bucket,\n id: string,\n kvStore: KvStore<UploadFile>,\n) => {\n return Effect.gen(function* () {\n try {\n const [metadata] = yield* Effect.promise(() =>\n bucket.file(id).getMetadata(),\n );\n const { size, metadata: meta } = metadata;\n const file = yield* kvStore.get(id);\n return {\n id,\n size: size ? Number.parseInt(`${size}`, 10) : undefined,\n offset: metadata.size ? Number.parseInt(`${metadata.size}`, 10) : 0, // `size` is set by GCS\n metadata: meta ? (meta as Record<string, string>) : undefined,\n storage: {\n id: file.storage.id,\n type: file.storage.type,\n path: id,\n bucket: bucket.name,\n },\n };\n } catch (error) {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n throw error;\n }\n });\n};\n\nexport function createGCSStore({\n keyFilename,\n credentials,\n bucketName,\n}: Omit<GCSStoreOptions, \"kvStore\">) {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n return gcsStore({ keyFilename, credentials, bucketName, kvStore });\n });\n}\n\nexport function gcsStore({\n keyFilename,\n credentials,\n bucketName,\n kvStore,\n}: GCSStoreOptions): DataStore<UploadFile> {\n const storage = new Storage(\n keyFilename ? { keyFilename } : credentials ? { credentials } : {},\n );\n\n const bucket = storage.bucket(bucketName);\n\n const getCapabilities = (): DataStoreCapabilities => {\n return {\n supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3\n supportsConcatenation: true, // Can combine files using bucket.combine\n supportsDeferredLength: true,\n supportsResumableUploads: true, // Through patch files\n supportsTransactionalUploads: false,\n supportsStreamingRead: true, // Supports streaming reads via file.createReadStream\n supportsStreamingWrite: true, // Supports streaming writes via resumable uploads\n maxConcurrentUploads: 1, // Sequential operations\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB default\n requiresOrderedChunks: true, // Due to combine operation\n requiresMimeTypeValidation: true,\n maxValidationSize: undefined, // no size limit\n };\n };\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n return {\n bucket: bucket.name,\n create: (file: UploadFile) => {\n return Effect.gen(function* () {\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n yield* fileSizeHistogram(Effect.succeed(file.size || 0));\n\n if (!file.id) {\n yield* uploadErrorsTotal(Effect.succeed(1));\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n const gcs_file = bucket.file(file.id);\n\n file.storage = {\n id: file.storage.id,\n type: file.storage.type,\n path: file.id,\n bucket: bucket.name,\n };\n\n console.log(\"file\", gcs_file.id);\n\n const options: CreateWriteStreamOptions = {\n metadata: {\n metadata: {\n ...stringifyUploadKeys(file),\n },\n },\n };\n if (file.metadata?.contentType) {\n options.contentType = file.metadata.contentType.toString();\n }\n\n return yield* Effect.tryPromise({\n try: () => {\n console.log(\"creating file\", gcs_file.id);\n return new Promise<UploadFile>((resolve, reject) => {\n const fake_stream = new PassThrough();\n fake_stream.end();\n fake_stream\n .pipe(gcs_file.createWriteStream(options))\n .on(\"error\", reject)\n .on(\"finish\", () => {\n resolve(file);\n });\n });\n },\n catch: (error) => {\n console.error(\"error creating file\", error);\n Effect.runSync(\n trackGCSError(\"create\", error, {\n upload_id: file.id,\n bucket: bucket.name,\n }),\n );\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", {\n cause: error,\n });\n },\n });\n });\n },\n read: (file_id: string) => {\n return Effect.tryPromise({\n try: async () => {\n const [buffer] = await bucket.file(file_id).download();\n return new Uint8Array(buffer);\n },\n catch: (error) => {\n Effect.runSync(\n trackGCSError(\"read\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n },\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Uses GCS file.createReadStream under the hood.\n *\n * @param file_id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n */\n readStream: (file_id: string, config?: StreamingConfig) =>\n Effect.gen(function* () {\n // Merge config with defaults\n const effectiveConfig = {\n ...DEFAULT_STREAMING_CONFIG,\n ...config,\n };\n\n // Verify file exists\n const file = bucket.file(file_id);\n const [exists] = yield* Effect.tryPromise({\n try: () => file.exists(),\n catch: (error) => {\n Effect.runSync(\n trackGCSError(\"readStream\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n\n if (!exists) {\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n // Create a Node.js readable stream from GCS\n const nodeStream = file.createReadStream();\n\n // Convert Node.js stream to Effect Stream with chunking\n return Stream.async<Uint8Array, UploadistaError>((emit) => {\n const chunkSize = effectiveConfig.chunkSize;\n let buffer = new Uint8Array(0);\n\n nodeStream.on(\"data\", (chunk: Buffer) => {\n // Combine buffer with new data\n const combined = new Uint8Array(buffer.length + chunk.length);\n combined.set(buffer);\n combined.set(new Uint8Array(chunk), buffer.length);\n buffer = combined;\n\n // Emit chunks of the configured size\n while (buffer.length >= chunkSize) {\n const outChunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emit.single(outChunk);\n }\n });\n\n nodeStream.on(\"end\", () => {\n // Emit any remaining data in buffer\n if (buffer.length > 0) {\n emit.single(buffer);\n }\n emit.end();\n });\n\n nodeStream.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"readStream\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n emit.fail(\n new UploadistaError({\n code: \"FILE_READ_ERROR\",\n status: 500,\n body: \"Failed to read GCS file stream\",\n details: `GCS stream read failed: ${String(error)}`,\n }),\n );\n });\n\n // Cleanup function when stream is interrupted\n return Effect.sync(() => {\n nodeStream.destroy();\n });\n });\n }),\n remove: (file_id: string) => {\n return Effect.gen(function* () {\n try {\n yield* Effect.promise(() => bucket.file(file_id).delete());\n yield* activeUploadsGauge(Effect.succeed(-1));\n } catch (error) {\n Effect.runSync(\n trackGCSError(\"remove\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n throw error;\n }\n });\n },\n /**\n * Get the file metatata from the object in GCS, then upload a new version\n * passing through the metadata to the new version.\n */\n write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => {\n return withUploadMetrics(\n options.file_id,\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n const { file_id, offset, stream: effectStream } = options;\n console.log(\"write\", file_id, offset);\n const { onProgress } = dependencies;\n\n // GCS Doesn't persist metadata within versions,\n // get that metadata first\n const upload = yield* getUpload(bucket, file_id, kvStore);\n console.log(\"upload\", upload);\n\n return yield* Effect.promise(\n () =>\n new Promise<number>((resolve, reject) => {\n const file = bucket.file(file_id);\n const destination =\n upload.offset === 0\n ? file\n : bucket.file(`${file_id}_patch`);\n\n upload.offset = offset;\n\n const gcsOptions = {\n metadata: {\n metadata: {\n ...stringifyUploadKeys(upload),\n },\n },\n };\n const write_stream =\n destination.createWriteStream(gcsOptions);\n if (!write_stream) {\n Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n return;\n }\n\n let bytes_received = upload.offset;\n\n // Convert Effect Stream to ReadableStream\n const readableStream = Stream.toReadableStream(effectStream);\n\n const transform = new Transform({\n transform(\n chunk: Buffer,\n _: string,\n callback: (error?: Error | null, data?: Buffer) => void,\n ) {\n bytes_received += chunk.length;\n onProgress?.(bytes_received);\n callback(null, chunk);\n },\n });\n\n const nodeReadable = Readable.fromWeb(readableStream);\n\n pipeline(\n nodeReadable,\n transform,\n write_stream,\n async (e: Error | null) => {\n if (e) {\n console.error(\"error writing file\", e);\n Effect.runSync(\n trackGCSError(\"write\", e, {\n upload_id: file_id,\n bucket: bucket.name,\n offset,\n }),\n );\n try {\n await destination.delete({ ignoreNotFound: true });\n } finally {\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n }\n } else {\n try {\n if (file !== destination) {\n await bucket.combine([file, destination], file);\n await Promise.all([\n file.setMetadata(gcsOptions.metadata),\n destination.delete({ ignoreNotFound: true }),\n ]);\n }\n\n // Log completion\n Effect.runSync(\n logGCSUploadCompletion(file_id, {\n fileSize: upload.size || 0,\n totalDurationMs: Date.now() - startTime,\n partsCount: 1,\n averagePartSize: upload.size,\n throughputBps:\n (upload.size || 0) / (Date.now() - startTime),\n retryCount: 0,\n }),\n );\n Effect.runSync(uploadSuccessTotal(Effect.succeed(1)));\n Effect.runSync(\n activeUploadsGauge(Effect.succeed(-1)),\n );\n\n resolve(bytes_received);\n } catch (error) {\n console.error(error);\n Effect.runSync(\n trackGCSError(\"write\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n operation: \"combine\",\n }),\n );\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n }\n }\n },\n );\n }),\n );\n }),\n ),\n );\n },\n /**\n * Writes file content from a stream without knowing the final size upfront.\n * Uses GCS resumable upload with streaming directly to the write stream.\n *\n * @param fileId - The unique identifier for the file\n * @param options - Stream write options including the Effect Stream\n * @returns StreamWriteResult with final size after stream completes\n */\n writeStream: (\n fileId: string,\n options: StreamWriteOptions,\n ): Effect.Effect<StreamWriteResult, UploadistaError> =>\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n\n yield* Effect.logInfo(\"Starting streaming write to GCS\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n bucket: bucket.name,\n size_hint: options.sizeHint,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n\n const file = bucket.file(fileId);\n\n // Set up write stream options\n const gcsOptions: CreateWriteStreamOptions = {\n resumable: true, // Enable resumable uploads for better reliability\n metadata: options.metadata\n ? { metadata: options.metadata }\n : undefined,\n };\n\n if (options.contentType) {\n gcsOptions.contentType = options.contentType;\n }\n\n // Create the write stream\n const writeStream = file.createWriteStream(gcsOptions);\n\n // Stream the content and track bytes\n const result = yield* Effect.tryPromise({\n try: () =>\n new Promise<number>((resolve, reject) => {\n let totalBytes = 0;\n\n // Create a pass-through stream to track bytes\n const passThrough = new PassThrough();\n\n passThrough.on(\"data\", (chunk: Buffer) => {\n totalBytes += chunk.length;\n });\n\n // Pipe passThrough to GCS writeStream\n passThrough.pipe(writeStream);\n\n writeStream.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n }),\n );\n reject(error);\n });\n\n writeStream.on(\"finish\", () => {\n resolve(totalBytes);\n });\n\n // Convert Effect Stream to readable and pipe to passThrough\n const readableStream = Stream.toReadableStream(options.stream);\n const nodeReadable = Readable.fromWeb(readableStream);\n\n nodeReadable.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n phase: \"read\",\n }),\n );\n passThrough.destroy(error);\n reject(error);\n });\n\n pipeline(nodeReadable, passThrough, (error) => {\n if (error) {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n phase: \"pipeline\",\n }),\n );\n reject(error);\n }\n });\n }),\n catch: (error) => {\n Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));\n Effect.runSync(activeUploadsGauge(Effect.succeed(-1)));\n return new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 500,\n body: \"Failed to write stream to GCS\",\n details: `GCS streaming write failed: ${String(error)}`,\n });\n },\n });\n\n // Log completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const throughputBps =\n totalDurationMs > 0 ? (result * 1000) / totalDurationMs : 0;\n\n yield* logGCSUploadCompletion(fileId, {\n fileSize: result,\n totalDurationMs,\n partsCount: 1,\n averagePartSize: result,\n throughputBps,\n retryCount: 0,\n });\n\n yield* uploadSuccessTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n yield* fileSizeHistogram(Effect.succeed(result));\n\n yield* Effect.logInfo(\"Streaming write to GCS completed\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n total_bytes: result,\n duration_ms: totalDurationMs,\n }),\n );\n\n return {\n id: fileId,\n size: result,\n path: fileId,\n bucket: bucket.name,\n } satisfies StreamWriteResult;\n }),\n ),\n getCapabilities,\n validateUploadStrategy,\n };\n}\n","import type { UploadistaError } from \"@uploadista/core/errors\";\nimport { Context, type Effect } from \"effect\";\n\nexport interface GCSOperationContext {\n bucket: string;\n key: string;\n contentType?: string;\n metadata?: Record<string, string | null>;\n}\n\nexport interface GCSObjectMetadata {\n name: string;\n bucket: string;\n size?: number;\n contentType?: string;\n metadata?: Record<string, string | null>;\n generation?: string;\n timeCreated?: string;\n updated?: string;\n}\n\nexport type GCSClient = {\n readonly bucket: string;\n\n // Basic GCS operations\n readonly getObject: (\n key: string,\n ) => Effect.Effect<ReadableStream, UploadistaError>;\n readonly getObjectMetadata: (\n key: string,\n ) => Effect.Effect<GCSObjectMetadata, UploadistaError>;\n readonly getObjectBuffer: (\n key: string,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n readonly objectExists: (\n key: string,\n ) => Effect.Effect<boolean, UploadistaError>;\n readonly putObject: (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n readonly putObjectFromStream?: (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n ) => Effect.Effect<number, UploadistaError>;\n readonly putObjectFromStreamWithPatching?: (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n isAppend?: boolean,\n ) => Effect.Effect<number, UploadistaError>;\n readonly deleteObject: (key: string) => Effect.Effect<void, UploadistaError>;\n\n // Resumable upload operations\n readonly createResumableUpload: (\n context: GCSOperationContext,\n ) => Effect.Effect<string, UploadistaError>; // Returns upload URL\n readonly uploadChunk: (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) => Effect.Effect<\n { completed: boolean; bytesUploaded: number },\n UploadistaError\n >;\n readonly getUploadStatus: (\n uploadUrl: string,\n ) => Effect.Effect<\n { bytesUploaded: number; completed: boolean },\n UploadistaError\n >;\n readonly cancelUpload: (\n uploadUrl: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n // Compose operations (GCS specific - for combining files)\n readonly composeObjects: (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n\n // Temporary file operations (for patches)\n readonly putTemporaryObject: (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n readonly getTemporaryObject: (\n key: string,\n ) => Effect.Effect<ReadableStream | undefined, UploadistaError>;\n readonly deleteTemporaryObject: (\n key: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\nexport class GCSClientService extends Context.Tag(\"GCSClientService\")<\n GCSClientService,\n GCSClient\n>() {}\n\nexport interface GCSClientConfig {\n bucket: string;\n // For Node.js implementation\n keyFilename?: string;\n credentials?: object;\n projectId?: string;\n // For REST API implementation\n accessToken?: string;\n}\n","import { pipeline, Readable, Transform } from \"node:stream\";\nimport { type Bucket, Storage } from \"@google-cloud/storage\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport { Effect, Layer } from \"effect\";\nimport {\n type GCSClientConfig,\n GCSClientService,\n type GCSObjectMetadata,\n type GCSOperationContext,\n} from \"./gcs-client.service\";\n\nfunction createNodeJSGCSClient(config: GCSClientConfig) {\n // Dynamic import to avoid issues in non-Node environments\n\n const storage = new Storage({\n keyFilename: config.keyFilename,\n credentials: config.credentials,\n projectId: config.projectId,\n });\n\n const bucket: Bucket = storage.bucket(config.bucket);\n\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const stream = file.createReadStream();\n\n // Convert Node.js stream to Web ReadableStream\n return new ReadableStream({\n start(controller) {\n stream.on(\"data\", (chunk) => {\n controller.enqueue(new Uint8Array(chunk));\n });\n\n stream.on(\"end\", () => {\n controller.close();\n });\n\n stream.on(\"error\", (error) => {\n controller.error(error);\n });\n },\n });\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const getObjectMetadata = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const [metadata] = await file.getMetadata();\n\n const parseMetadata = (\n meta: Record<string, string | number | boolean | null> | undefined,\n ) => {\n if (!meta) return {};\n if (typeof meta.metadata === \"string\") {\n try {\n return JSON.parse(meta.metadata);\n } catch {\n return meta;\n }\n }\n return meta;\n };\n\n return {\n name: metadata.name,\n bucket: metadata.bucket,\n size: metadata.size\n ? Number.parseInt(`${metadata.size}`, 10)\n : undefined,\n contentType: metadata.contentType,\n metadata: parseMetadata(metadata.metadata),\n generation: metadata.generation,\n timeCreated: metadata.timeCreated,\n updated: metadata.updated,\n } as GCSObjectMetadata;\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const objectExists = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const [exists] = await file.exists();\n return exists;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const putObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n return new Promise<string>((resolve, reject) => {\n const file = bucket.file(key);\n const options = {\n metadata: {\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const stream = file.createWriteStream(options);\n\n stream.on(\"error\", reject);\n stream.on(\"finish\", () => {\n resolve(file.name);\n });\n\n stream.end(Buffer.from(body));\n });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putObjectFromStream = (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n ) =>\n Effect.tryPromise({\n try: async () => {\n return new Promise<number>((resolve, reject) => {\n const file = bucket.file(key);\n const options = {\n metadata: {\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const writeStream = file.createWriteStream(options);\n let bytesWritten = offset;\n\n const transform = new Transform({\n transform(\n chunk: Buffer,\n _: string,\n callback: (error?: Error | null, data?: Buffer) => void,\n ) {\n bytesWritten += chunk.length;\n onProgress?.(bytesWritten);\n callback(null, chunk);\n },\n });\n\n const nodeReadable = Readable.fromWeb(readableStream);\n\n pipeline(\n nodeReadable,\n transform,\n writeStream,\n (error: Error | null) => {\n if (error) {\n reject(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", {\n cause: error,\n }),\n );\n } else {\n resolve(bytesWritten);\n }\n },\n );\n });\n },\n catch: (error) => {\n console.error(\"error putting object from stream\", error);\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n await file.delete({ ignoreNotFound: true });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const createResumableUpload = (context: GCSOperationContext) =>\n Effect.tryPromise({\n try: async () => {\n // For Node.js, we'll use a simplified approach\n // In production, you'd want to implement proper resumable uploads\n // Return a pseudo-URL that we can use to identify this upload\n return `resumable://nodejs/${context.bucket}/${context.key}`;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const uploadChunk = (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) =>\n Effect.tryPromise({\n try: async () => {\n // Extract key from pseudo-URL\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n\n return new Promise<{ completed: boolean; bytesUploaded: number }>(\n (resolve, reject) => {\n const stream = file.createWriteStream({\n resumable: true,\n offset: start,\n });\n\n stream.on(\"error\", reject);\n stream.on(\"finish\", () => {\n resolve({\n completed: total ? start + chunk.length >= total : false,\n bytesUploaded: start + chunk.length,\n });\n });\n\n stream.end(Buffer.from(chunk));\n },\n );\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const getUploadStatus = (uploadUrl: string) =>\n Effect.promise(async () => {\n try {\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n const [metadata] = await file.getMetadata();\n\n return {\n bytesUploaded: metadata.size\n ? Number.parseInt(`${metadata.size}`, 10)\n : 0,\n completed: true, // Simplified for now\n };\n } catch (_error) {\n // If file doesn't exist, upload hasn't started\n return { bytesUploaded: 0, completed: false };\n }\n });\n\n const cancelUpload = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n await file.delete({ ignoreNotFound: true });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const composeObjects = (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const sources = sourceKeys.map((key) => bucket.file(key));\n const destination = bucket.file(destinationKey);\n\n await bucket.combine(sources, destination);\n\n if (context?.metadata) {\n await destination.setMetadata({\n metadata: context.metadata,\n });\n }\n\n return destinationKey;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putObjectFromStreamWithPatching = (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n isAppend = false,\n ) =>\n Effect.gen(function* () {\n if (!isAppend) {\n // Direct upload for new files\n return yield* putObjectFromStream(\n key,\n offset,\n readableStream,\n context,\n onProgress,\n );\n }\n\n // For append operations, create a patch file and then combine\n const patchKey = `${key}_patch`;\n const bytesWritten = yield* putObjectFromStream(\n patchKey,\n offset,\n readableStream,\n context,\n onProgress,\n );\n\n // Combine original with patch\n yield* composeObjects([key, patchKey], key, context);\n\n // Clean up patch file\n yield* deleteObject(patchKey);\n\n return bytesWritten;\n });\n\n const putTemporaryObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => putObject(`${key}_tmp`, body, context);\n\n const getTemporaryObject = (key: string) =>\n Effect.gen(function* () {\n try {\n return yield* getObject(`${key}_tmp`);\n } catch {\n return undefined;\n }\n });\n\n const deleteTemporaryObject = (key: string) => deleteObject(`${key}_tmp`);\n\n const getObjectBuffer = (key: string) => {\n return Effect.tryPromise({\n try: async () => {\n const [buffer] = await bucket.file(key).download();\n return new Uint8Array(buffer);\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n };\n\n return {\n bucket: config.bucket,\n getObject,\n getObjectBuffer,\n getObjectMetadata,\n objectExists,\n putObject,\n putObjectFromStream,\n putObjectFromStreamWithPatching,\n deleteObject,\n createResumableUpload,\n uploadChunk,\n getUploadStatus,\n cancelUpload,\n composeObjects,\n putTemporaryObject,\n getTemporaryObject,\n deleteTemporaryObject,\n };\n}\n\nexport const GCSClientNodeJSLayer = (config: GCSClientConfig) =>\n Layer.succeed(GCSClientService, createNodeJSGCSClient(config));\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport { Effect, Layer } from \"effect\";\nimport {\n type GCSClientConfig,\n GCSClientService,\n type GCSObjectMetadata,\n type GCSOperationContext,\n} from \"./gcs-client.service\";\n\nfunction createRESTGCSClient(config: GCSClientConfig) {\n if (!config.accessToken) {\n throw new Error(\"accessToken is required for REST API implementation\");\n }\n\n const baseUrl = `https://storage.googleapis.com/storage/v1/b/${config.bucket}`;\n const uploadUrl = `https://storage.googleapis.com/upload/storage/v1/b/${config.bucket}/o`;\n const accessToken = config.accessToken;\n\n const getAuthHeaders = () => ({\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": \"application/json\",\n });\n\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}?alt=media`,\n {\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n if (!response.ok) {\n if (response.status === 404) {\n throw new Error(\"File not found\");\n }\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n if (!response.body) {\n throw new Error(\"body not found\");\n }\n\n return response.body;\n },\n catch: (error) => {\n if (error instanceof Error && error.message.includes(\"not found\")) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const getObjectMetadata = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n headers: getAuthHeaders(),\n },\n );\n\n if (!response.ok) {\n if (response.status === 404) {\n throw new Error(\"File not found\");\n }\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n const data = (await response.json()) as {\n name: string;\n bucket: string;\n size?: string;\n contentType?: string;\n metadata?: Record<string, string>;\n generation?: string;\n timeCreated?: string;\n updated?: string;\n };\n\n return {\n name: data.name,\n bucket: data.bucket,\n size: data.size ? Number.parseInt(data.size, 10) : undefined,\n contentType: data.contentType,\n metadata: data.metadata || {},\n generation: data.generation,\n timeCreated: data.timeCreated,\n updated: data.updated,\n } as GCSObjectMetadata;\n },\n catch: (error) => {\n if (error instanceof Error && error.message.includes(\"not found\")) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const objectExists = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n method: \"HEAD\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n return response.ok;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const putObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const metadata = {\n name: key,\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n };\n\n const response = await fetch(\n `${uploadUrl}?uploadType=media&name=${encodeURIComponent(key)}`,\n {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": metadata.contentType,\n \"Content-Length\": body.length.toString(),\n },\n body: body,\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return key;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n method: \"DELETE\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n // 404 is OK - object didn't exist\n if (!response.ok && response.status !== 404) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const createResumableUpload = (context: GCSOperationContext) =>\n Effect.tryPromise({\n try: async () => {\n const metadata = {\n name: context.key,\n contentType: context.contentType || \"application/octet-stream\",\n metadata: context.metadata || {},\n };\n\n const response = await fetch(\n `${uploadUrl}?uploadType=resumable&name=${encodeURIComponent(context.key)}`,\n {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(metadata),\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n const resumableUploadUrl = response.headers.get(\"Location\");\n if (!resumableUploadUrl) {\n throw new Error(\"No upload URL returned\");\n }\n\n return resumableUploadUrl;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const uploadChunk = (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const end = start + chunk.length - 1;\n const contentRange = total\n ? `bytes ${start}-${end}/${total}`\n : `bytes ${start}-${end}/*`;\n\n const response = await fetch(uploadUrl, {\n method: \"PUT\",\n headers: {\n \"Content-Length\": chunk.length.toString(),\n \"Content-Range\": contentRange,\n },\n body: chunk,\n });\n\n // 308 means more data needed, 200/201 means complete\n const completed = response.status === 200 || response.status === 201;\n\n if (!completed && response.status !== 308) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return {\n completed,\n bytesUploaded: end + 1,\n };\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const getUploadStatus = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(uploadUrl, {\n method: \"PUT\",\n headers: {\n \"Content-Range\": \"bytes */*\",\n },\n });\n\n if (response.status === 308) {\n // Upload incomplete\n const range = response.headers.get(\"Range\");\n const bytesUploaded = range\n ? Number.parseInt(range.split(\"-\")[1], 10) + 1\n : 0;\n\n return {\n bytesUploaded,\n completed: false,\n };\n } else if (response.status === 200 || response.status === 201) {\n // Upload complete\n return {\n bytesUploaded: 0, // We don't know the exact size\n completed: true,\n };\n } else {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const cancelUpload = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n // Cancel by sending DELETE to upload URL\n await fetch(uploadUrl, {\n method: \"DELETE\",\n });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const composeObjects = (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const composeRequest = {\n kind: \"storage#composeRequest\",\n sourceObjects: sourceKeys.map((key) => ({ name: key })),\n destination: {\n name: destinationKey,\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(destinationKey)}/compose`,\n {\n method: \"POST\",\n headers: getAuthHeaders(),\n body: JSON.stringify(composeRequest),\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return destinationKey;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putTemporaryObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => putObject(`${key}_tmp`, body, context);\n\n const getTemporaryObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n return await getObject(`${key}_tmp`).pipe(Effect.runPromise);\n } catch {\n return undefined;\n }\n },\n catch: () => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\");\n },\n });\n\n const deleteTemporaryObject = (key: string) => deleteObject(`${key}_tmp`);\n\n const getObjectBuffer = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}?alt=media`,\n {\n headers: getAuthHeaders(),\n },\n );\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n return new Uint8Array(await response.arrayBuffer());\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", { cause: error });\n },\n });\n\n return {\n bucket: config.bucket,\n getObject,\n getObjectBuffer,\n getObjectMetadata,\n objectExists,\n putObject,\n deleteObject,\n createResumableUpload,\n uploadChunk,\n getUploadStatus,\n cancelUpload,\n composeObjects,\n putTemporaryObject,\n getTemporaryObject,\n deleteTemporaryObject,\n };\n}\n\nexport const GCSClientRESTLayer = (config: GCSClientConfig) =>\n Layer.succeed(GCSClientService, createRESTGCSClient(config));\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n type DataStore,\n type DataStoreCapabilities,\n type DataStoreWriteOptions,\n type KvStore,\n type UploadFile,\n UploadFileKVStore,\n type UploadStrategy,\n} from \"@uploadista/core/types\";\nimport { Effect, Stream } from \"effect\";\nimport type {\n GCSClient,\n GCSClientConfig,\n GCSOperationContext,\n} from \"./services\";\nimport {\n GCSClientNodeJSLayer,\n GCSClientRESTLayer,\n GCSClientService,\n} from \"./services\";\n\nexport type GCSStoreOptions = GCSClientConfig;\n\n/**\n * Convert the Upload object to a format that can be stored in GCS metadata.\n */\nfunction stringifyUploadKeys(\n upload: UploadFile,\n): Record<string, string | null> {\n return {\n size: upload.size?.toString() ?? null,\n sizeIsDeferred: `${upload.sizeIsDeferred}`,\n offset: upload.offset?.toString() ?? \"0\",\n metadata: JSON.stringify(upload.metadata),\n storage: JSON.stringify(upload.storage),\n };\n}\n\nconst getUpload = (\n id: string,\n kvStore: KvStore<UploadFile>,\n gcsClient: GCSClient,\n) => {\n return Effect.gen(function* () {\n try {\n const metadata = yield* gcsClient.getObjectMetadata(id);\n const file = yield* kvStore.get(id);\n\n return {\n id,\n size: metadata.size,\n offset: metadata.size || 0,\n metadata: metadata.metadata,\n storage: {\n id: file.storage.id,\n type: file.storage.type,\n path: id,\n bucket: gcsClient.bucket,\n },\n };\n } catch (error) {\n if (error instanceof UploadistaError && error.code === \"FILE_NOT_FOUND\") {\n return yield* Effect.fail(error);\n }\n throw error;\n }\n });\n};\n\nexport function createGCSStore() {\n return Effect.gen(function* () {\n const gcsClient = yield* GCSClientService;\n const kvStore = yield* UploadFileKVStore;\n\n const getCapabilities = (): DataStoreCapabilities => {\n return {\n supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3\n supportsConcatenation: true, // Can combine files using compose\n supportsDeferredLength: true,\n supportsResumableUploads: true, // Through resumable uploads\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1, // Sequential operations\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB default\n requiresOrderedChunks: true, // Due to compose operation\n };\n };\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n return {\n bucket: gcsClient.bucket,\n create: (file: UploadFile) => {\n return Effect.gen(function* () {\n if (!file.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FILE_NOT_FOUND\"),\n );\n }\n\n file.storage = {\n id: file.storage.id,\n type: file.storage.type,\n path: file.id,\n bucket: gcsClient.bucket,\n };\n\n // Create empty file\n const context = {\n bucket: gcsClient.bucket,\n key: file.id,\n contentType:\n file.metadata?.contentType?.toString() ||\n \"application/octet-stream\",\n metadata: stringifyUploadKeys(file),\n };\n\n yield* gcsClient.putObject(file.id, new Uint8Array(0), context);\n return file;\n });\n },\n\n remove: (file_id: string) => {\n return gcsClient.deleteObject(file_id);\n },\n\n write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => {\n return Effect.gen(function* () {\n const { file_id, offset, stream: effectStream } = options;\n const { onProgress } = dependencies;\n\n // Get current upload metadata\n const upload = yield* getUpload(file_id, kvStore, gcsClient);\n\n upload.offset = offset;\n // Persist the updated offset\n yield* kvStore.set(file_id, upload as UploadFile);\n\n const context = {\n bucket: gcsClient.bucket,\n key: file_id,\n contentType:\n upload.metadata?.contentType || \"application/octet-stream\",\n metadata: stringifyUploadKeys(upload as UploadFile),\n } satisfies Partial<GCSOperationContext>;\n\n // Convert Effect Stream to ReadableStream\n const readableStream = Stream.toReadableStream(effectStream);\n\n // Use native streams if available (Node.js implementation)\n if (gcsClient.putObjectFromStreamWithPatching) {\n const isAppend = upload.offset > 0; // Check original file size, not write offset\n\n return yield* gcsClient.putObjectFromStreamWithPatching(\n file_id,\n upload.offset,\n readableStream,\n context,\n onProgress,\n isAppend,\n );\n } else {\n // Fallback to chunk-based approach for REST implementation\n const reader = readableStream.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n // Read all chunks\n while (true) {\n const { done, value } = yield* Effect.promise(() =>\n reader.read(),\n );\n if (done) break;\n\n chunks.push(value);\n const chunkSize = value.byteLength;\n totalBytes += chunkSize;\n onProgress?.(totalBytes);\n }\n\n // Combine all chunks\n const combinedArray = new Uint8Array(totalBytes);\n let position = 0;\n for (const chunk of chunks) {\n combinedArray.set(chunk, position);\n position += chunk.byteLength;\n }\n\n // Check if we need to handle patches (append data)\n if (upload.offset === 0) {\n // Direct upload\n yield* gcsClient.putObject(file_id, combinedArray, context);\n } else {\n // We need to combine with existing data\n const patchKey = `${file_id}_patch`;\n\n // Upload patch data\n yield* gcsClient.putTemporaryObject(\n patchKey,\n combinedArray,\n context,\n );\n\n // Combine original file with patch\n yield* gcsClient.composeObjects(\n [file_id, patchKey],\n file_id,\n context,\n );\n\n // Clean up patch file\n yield* gcsClient.deleteTemporaryObject(patchKey);\n }\n\n return totalBytes;\n }\n });\n },\n\n getCapabilities,\n validateUploadStrategy,\n read: (file_id: string) => {\n return Effect.gen(function* () {\n const buffer = yield* gcsClient.getObjectBuffer(file_id);\n return buffer;\n });\n },\n } as DataStore<UploadFile>;\n });\n}\n\nexport const gcsStoreRest = (config: GCSStoreOptions) =>\n createGCSStore().pipe(Effect.provide(GCSClientRESTLayer(config)));\n\nexport const gcsStoreNodejs = (config: GCSStoreOptions) =>\n createGCSStore().pipe(Effect.provide(GCSClientNodeJSLayer(config)));\n"],"mappings":"4oBAyCA,SAASA,EAAoB,EAAoB,CAC/C,MAAO,CACL,KAAM,EAAO,MAAQ,KACrB,eAAgB,GAAG,EAAO,iBAC1B,OAAQ,EAAO,OACf,SAAU,KAAK,UAAU,EAAO,SAAS,CACzC,QAAS,KAAK,UAAU,EAAO,QAAQ,CACxC,CAGH,MAAMC,GACJ,EACA,EACA,IAEO,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,GAAM,CAAC,GAAY,MAAO,EAAO,YAC/B,EAAO,KAAK,EAAG,CAAC,aAAa,CAC9B,CACK,CAAE,OAAM,SAAU,GAAS,EAC3B,EAAO,MAAO,EAAQ,IAAI,EAAG,CACnC,MAAO,CACL,KACA,KAAM,EAAO,OAAO,SAAS,GAAG,IAAQ,GAAG,CAAG,IAAA,GAC9C,OAAQ,EAAS,KAAO,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CAAG,EAClE,SAAU,GAA0C,IAAA,GACpD,QAAS,CACP,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EACN,OAAQ,EAAO,KAChB,CACF,OACM,EAAO,CACd,GACE,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAEf,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAGvE,MAAM,IAER,CAGJ,SAAgB,EAAe,CAC7B,cACA,cACA,cACmC,CACnC,OAAO,EAAO,IAAI,WAAa,CAE7B,OAAO,EAAS,CAAE,cAAa,cAAa,aAAY,QADxC,MAAO,EAC0C,CAAC,EAClE,CAGJ,SAAgB,EAAS,CACvB,cACA,cACA,aACA,WACyC,CAKzC,IAAM,EAJU,IAAI,EAClB,EAAc,CAAE,cAAa,CAAG,EAAc,CAAE,cAAa,CAAG,EAAE,CACnE,CAEsB,OAAO,EAAW,CAEnC,OACG,CACL,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,sBAAuB,GACvB,uBAAwB,GACxB,qBAAsB,EACtB,aAAc,IAAA,GACd,aAAc,IAAA,GACd,SAAU,IAAA,GACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACvB,2BAA4B,GAC5B,kBAAmB,IAAA,GACpB,EAsBH,MAAO,CACL,OAAQ,EAAO,KACf,OAAS,GACA,EAAO,IAAI,WAAa,CAK7B,GAJA,MAAOC,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOC,EAAkB,EAAO,QAAQ,EAAK,MAAQ,EAAE,CAAC,CAEpD,CAAC,EAAK,GAER,OADA,MAAOC,EAAkB,EAAO,QAAQ,EAAE,CAAC,CACpC,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAGvE,IAAM,EAAW,EAAO,KAAK,EAAK,GAAG,CAErC,EAAK,QAAU,CACb,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EAAK,GACX,OAAQ,EAAO,KAChB,CAED,QAAQ,IAAI,OAAQ,EAAS,GAAG,CAEhC,IAAMC,EAAoC,CACxC,SAAU,CACR,SAAU,CACR,GAAGN,EAAoB,EAAK,CAC7B,CACF,CACF,CAKD,OAJI,EAAK,UAAU,cACjB,EAAQ,YAAc,EAAK,SAAS,YAAY,UAAU,EAGrD,MAAO,EAAO,WAAW,CAC9B,SACE,QAAQ,IAAI,gBAAiB,EAAS,GAAG,CAClC,IAAI,SAAqB,EAAS,IAAW,CAClD,IAAM,EAAc,IAAI,EACxB,EAAY,KAAK,CACjB,EACG,KAAK,EAAS,kBAAkB,EAAQ,CAAC,CACzC,GAAG,QAAS,EAAO,CACnB,GAAG,aAAgB,CAClB,EAAQ,EAAK,EACb,EACJ,EAEJ,MAAQ,IACN,QAAQ,MAAM,sBAAuB,EAAM,CAC3C,EAAO,QACL,EAAc,SAAU,EAAO,CAC7B,UAAW,EAAK,GAChB,OAAQ,EAAO,KAChB,CAAC,CACH,CACM,EAAgB,SAAS,mBAAoB,CAClD,MAAO,EACR,CAAC,EAEL,CAAC,EACF,CAEJ,KAAO,GACE,EAAO,WAAW,CACvB,IAAK,SAAY,CACf,GAAM,CAAC,GAAU,MAAM,EAAO,KAAK,EAAQ,CAAC,UAAU,CACtD,OAAO,IAAI,WAAW,EAAO,EAE/B,MAAQ,IACN,EAAO,QACL,EAAc,OAAQ,EAAO,CAC3B,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CAEC,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,EAEL,CAAC,CAUJ,YAAa,EAAiB,IAC5B,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAkB,CACtB,GAAG,EACH,GAAG,EACJ,CAGK,EAAO,EAAO,KAAK,EAAQ,CAC3B,CAAC,GAAU,MAAO,EAAO,WAAW,CACxC,QAAW,EAAK,QAAQ,CACxB,MAAQ,IACN,EAAO,QACL,EAAc,aAAc,EAAO,CACjC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACM,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,EAEL,CAAC,CAEF,GAAI,CAAC,EACH,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAIvE,IAAM,EAAa,EAAK,kBAAkB,CAG1C,OAAO,EAAO,MAAoC,GAAS,CACzD,IAAM,EAAY,EAAgB,UAC9B,EAAS,IAAI,WA2CjB,OAzCA,EAAW,GAAG,OAAS,GAAkB,CAEvC,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAM,OAAO,CAM7D,IALA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,IAAI,WAAW,EAAM,CAAE,EAAO,OAAO,CAClD,EAAS,EAGF,EAAO,QAAU,GAAW,CACjC,IAAM,EAAW,EAAO,MAAM,EAAG,EAAU,CAC3C,EAAS,EAAO,MAAM,EAAU,CAChC,EAAK,OAAO,EAAS,GAEvB,CAEF,EAAW,GAAG,UAAa,CAErB,EAAO,OAAS,GAClB,EAAK,OAAO,EAAO,CAErB,EAAK,KAAK,EACV,CAEF,EAAW,GAAG,QAAU,GAAiB,CACvC,EAAO,QACL,EAAc,aAAc,EAAO,CACjC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACD,EAAK,KACH,IAAI,EAAgB,CAClB,KAAM,kBACN,OAAQ,IACR,KAAM,iCACN,QAAS,2BAA2B,OAAO,EAAM,GAClD,CAAC,CACH,EACD,CAGK,EAAO,SAAW,CACvB,EAAW,SAAS,EACpB,EACF,EACF,CACJ,OAAS,GACA,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,MAAO,EAAO,YAAc,EAAO,KAAK,EAAQ,CAAC,QAAQ,CAAC,CAC1D,MAAOG,EAAmB,EAAO,QAAQ,GAAG,CAAC,OACtC,EAAO,CAOd,MANA,EAAO,QACL,EAAc,SAAU,EAAO,CAC7B,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACK,IAER,CAMJ,OACE,EACA,IAIOI,EACL,EAAQ,QACRC,EACEC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CACtB,CAAE,UAAS,SAAQ,OAAQ,GAAiB,EAClD,QAAQ,IAAI,QAAS,EAAS,EAAO,CACrC,GAAM,CAAE,cAAe,EAIjB,EAAS,MAAOR,EAAU,EAAQ,EAAS,EAAQ,CAGzD,OAFA,QAAQ,IAAI,SAAU,EAAO,CAEtB,MAAO,EAAO,YAEjB,IAAI,SAAiB,EAAS,IAAW,CACvC,IAAM,EAAO,EAAO,KAAK,EAAQ,CAC3B,EACJ,EAAO,SAAW,EACd,EACA,EAAO,KAAK,GAAG,EAAQ,QAAQ,CAErC,EAAO,OAAS,EAEhB,IAAM,EAAa,CACjB,SAAU,CACR,SAAU,CACR,GAAGD,EAAoB,EAAO,CAC/B,CACF,CACF,CACK,EACJ,EAAY,kBAAkB,EAAW,CAC3C,GAAI,CAAC,EAAc,CACjB,EAAO,QAAQK,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACpD,EAAO,EAAgB,SAAS,mBAAmB,CAAC,CACpD,OAGF,IAAI,EAAiB,EAAO,OAGtB,EAAiB,EAAO,iBAAiB,EAAa,CAEtD,EAAY,IAAI,EAAU,CAC9B,UACE,EACA,EACA,EACA,CACA,GAAkB,EAAM,OACxB,IAAa,EAAe,CAC5B,EAAS,KAAM,EAAM,EAExB,CAAC,CAIF,EAFqB,EAAS,QAAQ,EAAe,CAInD,EACA,EACA,KAAO,IAAoB,CACzB,GAAI,EAAG,CACL,QAAQ,MAAM,qBAAsB,EAAE,CACtC,EAAO,QACL,EAAc,QAAS,EAAG,CACxB,UAAW,EACX,OAAQ,EAAO,KACf,SACD,CAAC,CACH,CACD,GAAI,CACF,MAAM,EAAY,OAAO,CAAE,eAAgB,GAAM,CAAC,QAC1C,CACR,EAAO,EAAgB,SAAS,mBAAmB,CAAC,OAGtD,GAAI,CACE,IAAS,IACX,MAAM,EAAO,QAAQ,CAAC,EAAM,EAAY,CAAE,EAAK,CAC/C,MAAM,QAAQ,IAAI,CAChB,EAAK,YAAY,EAAW,SAAS,CACrC,EAAY,OAAO,CAAE,eAAgB,GAAM,CAAC,CAC7C,CAAC,EAIJ,EAAO,QACL,EAAuB,EAAS,CAC9B,SAAU,EAAO,MAAQ,EACzB,gBAAiB,KAAK,KAAK,CAAG,EAC9B,WAAY,EACZ,gBAAiB,EAAO,KACxB,eACG,EAAO,MAAQ,IAAM,KAAK,KAAK,CAAG,GACrC,WAAY,EACb,CAAC,CACH,CACD,EAAO,QAAQK,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACrD,EAAO,QACLP,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACvC,CAED,EAAQ,EAAe,OAChB,EAAO,CACd,QAAQ,MAAM,EAAM,CACpB,EAAO,QACL,EAAc,QAAS,EAAO,CAC5B,UAAW,EACX,OAAQ,EAAO,KACf,UAAW,UACZ,CAAC,CACH,CACD,EAAO,EAAgB,SAAS,mBAAmB,CAAC,GAI3D,EACD,CACL,EACD,CACH,CACF,CAUH,aACE,EACA,IAEAK,EACEC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CAE5B,MAAO,EAAO,QAAQ,kCAAkC,CAAC,KACvD,EAAO,aAAa,CAClB,UAAW,EACX,OAAQ,EAAO,KACf,UAAW,EAAQ,SACpB,CAAC,CACH,CAED,MAAOP,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAE5C,IAAM,EAAO,EAAO,KAAK,EAAO,CAG1BQ,EAAuC,CAC3C,UAAW,GACX,SAAU,EAAQ,SACd,CAAE,SAAU,EAAQ,SAAU,CAC9B,IAAA,GACL,CAEG,EAAQ,cACV,EAAW,YAAc,EAAQ,aAInC,IAAM,EAAc,EAAK,kBAAkB,EAAW,CAGhD,EAAS,MAAO,EAAO,WAAW,CACtC,QACE,IAAI,SAAiB,EAAS,IAAW,CACvC,IAAI,EAAa,EAGX,EAAc,IAAI,EAExB,EAAY,GAAG,OAAS,GAAkB,CACxC,GAAc,EAAM,QACpB,CAGF,EAAY,KAAK,EAAY,CAE7B,EAAY,GAAG,QAAU,GAAiB,CACxC,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACD,EAAO,EAAM,EACb,CAEF,EAAY,GAAG,aAAgB,CAC7B,EAAQ,EAAW,EACnB,CAGF,IAAM,EAAiB,EAAO,iBAAiB,EAAQ,OAAO,CACxD,EAAe,EAAS,QAAQ,EAAe,CAErD,EAAa,GAAG,QAAU,GAAiB,CACzC,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KACf,MAAO,OACR,CAAC,CACH,CACD,EAAY,QAAQ,EAAM,CAC1B,EAAO,EAAM,EACb,CAEF,EAAS,EAAc,EAAc,GAAU,CACzC,IACF,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KACf,MAAO,WACR,CAAC,CACH,CACD,EAAO,EAAM,GAEf,EACF,CACJ,MAAQ,IACN,EAAO,QAAQN,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACpD,EAAO,QAAQF,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAAC,CAC/C,IAAI,EAAgB,CACzB,KAAM,mBACN,OAAQ,IACR,KAAM,gCACN,QAAS,+BAA+B,OAAO,EAAM,GACtD,CAAC,EAEL,CAAC,CAII,EADU,KAAK,KAAK,CACQ,EAyBlC,OArBA,MAAO,EAAuB,EAAQ,CACpC,SAAU,EACV,kBACA,WAAY,EACZ,gBAAiB,EACjB,cAPA,EAAkB,EAAK,EAAS,IAAQ,EAAkB,EAQ1D,WAAY,EACb,CAAC,CAEF,MAAOO,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOP,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAC7C,MAAOC,EAAkB,EAAO,QAAQ,EAAO,CAAC,CAEhD,MAAO,EAAO,QAAQ,mCAAmC,CAAC,KACxD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,YAAa,EACd,CAAC,CACH,CAEM,CACL,GAAI,EACJ,KAAM,EACN,KAAM,EACN,OAAQ,EAAO,KAChB,EACD,CACH,CACH,kBACA,uBAzfA,GACkC,CAClC,IAAM,EAAe,GAAiB,CAEhC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EA2e9B,CCzhBH,IAAa,EAAb,cAAsC,EAAQ,IAAI,mBAAmB,EAGlE,AAAC,GC/FJ,SAAS,EAAsB,EAAyB,CAStD,IAAMQ,EANU,IAAI,EAAQ,CAC1B,YAAa,EAAO,YACpB,YAAa,EAAO,YACpB,UAAW,EAAO,UACnB,CAAC,CAE6B,OAAO,EAAO,OAAO,CAE9C,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,IAAM,EADO,EAAO,KAAK,EAAI,CACT,kBAAkB,CAGtC,OAAO,IAAI,eAAe,CACxB,MAAM,EAAY,CAChB,EAAO,GAAG,OAAS,GAAU,CAC3B,EAAW,QAAQ,IAAI,WAAW,EAAM,CAAC,EACzC,CAEF,EAAO,GAAG,UAAa,CACrB,EAAW,OAAO,EAClB,CAEF,EAAO,GAAG,QAAU,GAAU,CAC5B,EAAW,MAAM,EAAM,EACvB,EAEL,CAAC,EAEJ,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAqB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,GAAM,CAAC,GAAY,MADN,EAAO,KAAK,EAAI,CACC,aAAa,CAgB3C,MAAO,CACL,KAAM,EAAS,KACf,OAAQ,EAAS,OACjB,KAAM,EAAS,KACX,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CACvC,IAAA,GACJ,YAAa,EAAS,YACtB,UApBA,GACG,CACH,GAAI,CAAC,EAAM,MAAO,EAAE,CACpB,GAAI,OAAO,EAAK,UAAa,SAC3B,GAAI,CACF,OAAO,KAAK,MAAM,EAAK,SAAS,MAC1B,CACN,OAAO,EAGX,OAAO,IAUiB,EAAS,SAAS,CAC1C,WAAY,EAAS,WACrB,YAAa,EAAS,YACtB,QAAS,EAAS,QACnB,EAEH,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,GAAM,CAAC,GAAU,MADJ,EAAO,KAAK,EAAI,CACD,QAAQ,CACpC,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SACI,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAO,EAAO,KAAK,EAAI,CACvB,EAAU,CACd,SAAU,CACR,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAS,EAAK,kBAAkB,EAAQ,CAE9C,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,aAAgB,CACxB,EAAQ,EAAK,KAAK,EAClB,CAEF,EAAO,IAAI,OAAO,KAAK,EAAK,CAAC,EAC7B,CAEJ,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,GACJ,EACA,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SACI,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAO,EAAO,KAAK,EAAI,CACvB,EAAU,CACd,SAAU,CACR,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAc,EAAK,kBAAkB,EAAQ,CAC/C,EAAe,EAEb,EAAY,IAAI,EAAU,CAC9B,UACE,EACA,EACA,EACA,CACA,GAAgB,EAAM,OACtB,IAAa,EAAa,CAC1B,EAAS,KAAM,EAAM,EAExB,CAAC,CAIF,EAFqB,EAAS,QAAQ,EAAe,CAInD,EACA,EACC,GAAwB,CACnB,EACF,EACE,EAAgB,SAAS,mBAAoB,CAC3C,MAAO,EACR,CAAC,CACH,CAED,EAAQ,EAAa,EAG1B,EACD,CAEJ,MAAQ,IACN,QAAQ,MAAM,mCAAoC,EAAM,CACjD,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,EAExE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,MADa,EAAO,KAAK,EAAI,CAClB,OAAO,CAAE,eAAgB,GAAM,CAAC,EAE7C,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAyB,GAC7B,EAAO,WAAW,CAChB,IAAK,SAII,sBAAsB,EAAQ,OAAO,GAAG,EAAQ,MAEzD,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,GACJ,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAGvC,IAAM,EAAO,EAAO,KAAK,EAAI,CAE7B,OAAO,IAAI,SACR,EAAS,IAAW,CACnB,IAAM,EAAS,EAAK,kBAAkB,CACpC,UAAW,GACX,OAAQ,EACT,CAAC,CAEF,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,aAAgB,CACxB,EAAQ,CACN,UAAW,EAAQ,EAAQ,EAAM,QAAU,EAAQ,GACnD,cAAe,EAAQ,EAAM,OAC9B,CAAC,EACF,CAEF,EAAO,IAAI,OAAO,KAAK,EAAM,CAAC,EAEjC,EAEH,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,EAAmB,GACvB,EAAO,QAAQ,SAAY,CACzB,GAAI,CACF,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAIvC,GAAM,CAAC,GAAY,MADN,EAAO,KAAK,EAAI,CACC,aAAa,CAE3C,MAAO,CACL,cAAe,EAAS,KACpB,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CACvC,EACJ,UAAW,GACZ,MACc,CAEf,MAAO,CAAE,cAAe,EAAG,UAAW,GAAO,GAE/C,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAIvC,MADa,EAAO,KAAK,EAAI,CAClB,OAAO,CAAE,eAAgB,GAAM,CAAC,EAE7C,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAU,EAAW,IAAK,GAAQ,EAAO,KAAK,EAAI,CAAC,CACnD,EAAc,EAAO,KAAK,EAAe,CAU/C,OARA,MAAM,EAAO,QAAQ,EAAS,EAAY,CAEtC,GAAS,UACX,MAAM,EAAY,YAAY,CAC5B,SAAU,EAAQ,SACnB,CAAC,CAGG,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAgFJ,MAAO,CACL,OAAQ,EAAO,OACf,YACA,gBAzBuB,GAChB,EAAO,WAAW,CACvB,IAAK,SAAY,CACf,GAAM,CAAC,GAAU,MAAM,EAAO,KAAK,EAAI,CAAC,UAAU,CAClD,OAAO,IAAI,WAAW,EAAO,EAE/B,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,CAEL,CAAC,CAOF,oBACA,eACA,YACA,sBACA,iCArFA,EACA,EACA,EACA,EACA,EACA,EAAW,KAEX,EAAO,IAAI,WAAa,CACtB,GAAI,CAAC,EAEH,OAAO,MAAO,EACZ,EACA,EACA,EACA,EACA,EACD,CAIH,IAAM,EAAW,GAAG,EAAI,QAClB,EAAe,MAAO,EAC1B,EACA,EACA,EACA,EACA,EACD,CAQD,OALA,MAAO,EAAe,CAAC,EAAK,EAAS,CAAE,EAAK,EAAQ,CAGpD,MAAO,EAAa,EAAS,CAEtB,GACP,CAkDF,eACA,wBACA,cACA,kBACA,eACA,iBACA,oBArDA,EACA,EACA,IACG,EAAU,GAAG,EAAI,MAAO,EAAM,EAAQ,CAmDzC,mBAjD0B,GAC1B,EAAO,IAAI,WAAa,CACtB,GAAI,CACF,OAAO,MAAO,EAAU,GAAG,EAAI,MAAM,MAC/B,CACN,SAEF,CA2CF,sBAzC6B,GAAgB,EAAa,GAAG,EAAI,MAAM,CA0CxE,CAGH,MAAa,EAAwB,GACnC,EAAM,QAAQ,EAAkB,EAAsB,EAAO,CAAC,CCzahE,SAAS,EAAoB,EAAyB,CACpD,GAAI,CAAC,EAAO,YACV,MAAU,MAAM,sDAAsD,CAGxE,IAAM,EAAU,+CAA+C,EAAO,SAChE,EAAY,sDAAsD,EAAO,OAAO,IAChF,EAAc,EAAO,YAErB,OAAwB,CAC5B,cAAe,UAAU,IACzB,eAAgB,mBACjB,EAEK,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,CAAC,YACxC,CACE,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,CAED,GAAI,CAAC,EAAS,GAIZ,MAHI,EAAS,SAAW,IACZ,MAAM,iBAAiB,CAEzB,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAEpE,GAAI,CAAC,EAAS,KACZ,MAAU,MAAM,iBAAiB,CAGnC,OAAO,EAAS,MAElB,MAAQ,GACF,aAAiB,OAAS,EAAM,QAAQ,SAAS,YAAY,CACxD,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAqB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,QAAS,GAAgB,CAC1B,CACF,CAED,GAAI,CAAC,EAAS,GAIZ,MAHI,EAAS,SAAW,IACZ,MAAM,iBAAiB,CAEzB,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,IAAM,EAAQ,MAAM,EAAS,MAAM,CAWnC,MAAO,CACL,KAAM,EAAK,KACX,OAAQ,EAAK,OACb,KAAM,EAAK,KAAO,OAAO,SAAS,EAAK,KAAM,GAAG,CAAG,IAAA,GACnD,YAAa,EAAK,YAClB,SAAU,EAAK,UAAY,EAAE,CAC7B,WAAY,EAAK,WACjB,YAAa,EAAK,YAClB,QAAS,EAAK,QACf,EAEH,MAAQ,GACF,aAAiB,OAAS,EAAM,QAAQ,SAAS,YAAY,CACxD,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,UACc,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,EAEe,GAElB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,CACf,KAAM,EACN,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CAEK,EAAW,MAAM,MACrB,GAAG,EAAU,yBAAyB,mBAAmB,EAAI,GAC7D,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IACzB,eAAgB,EAAS,YACzB,iBAAkB,EAAK,OAAO,UAAU,CACzC,CACK,OACP,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,OAAQ,SACR,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,CAGD,GAAI,CAAC,EAAS,IAAM,EAAS,SAAW,IACtC,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,EAGtE,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CA6MJ,MAAO,CACL,OAAQ,EAAO,OACf,YACA,gBAtBuB,GACvB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,CAAC,YACxC,CACE,QAAS,GAAgB,CAC1B,CACF,CACD,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAEpE,OAAO,IAAI,WAAW,MAAM,EAAS,aAAa,CAAC,EAErD,MAAQ,GACC,EAAgB,SAAS,kBAAmB,CAAE,MAAO,EAAO,CAAC,CAEvE,CAAC,CAMF,oBACA,eACA,YACA,eACA,sBAnN6B,GAC7B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,CACf,KAAM,EAAQ,IACd,YAAa,EAAQ,aAAe,2BACpC,SAAU,EAAQ,UAAY,EAAE,CACjC,CAEK,EAAW,MAAM,MACrB,GAAG,EAAU,6BAA6B,mBAAmB,EAAQ,IAAI,GACzE,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IACzB,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,EAAS,CAC/B,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,IAAM,EAAqB,EAAS,QAAQ,IAAI,WAAW,CAC3D,GAAI,CAAC,EACH,MAAU,MAAM,yBAAyB,CAG3C,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAiLF,aA9KA,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAM,EAAQ,EAAM,OAAS,EAC7B,EAAe,EACjB,SAAS,EAAM,GAAG,EAAI,GAAG,IACzB,SAAS,EAAM,GAAG,EAAI,IAEpB,EAAW,MAAM,MAAMC,EAAW,CACtC,OAAQ,MACR,QAAS,CACP,iBAAkB,EAAM,OAAO,UAAU,CACzC,gBAAiB,EAClB,CACD,KAAM,EACP,CAAC,CAGI,EAAY,EAAS,SAAW,KAAO,EAAS,SAAW,IAEjE,GAAI,CAAC,GAAa,EAAS,SAAW,IACpC,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,MAAO,CACL,YACA,cAAe,EAAM,EACtB,EAEH,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CA2IF,gBAzIuB,GACvB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MAAMA,EAAW,CACtC,OAAQ,MACR,QAAS,CACP,gBAAiB,YAClB,CACF,CAAC,CAEF,GAAI,EAAS,SAAW,IAAK,CAE3B,IAAM,EAAQ,EAAS,QAAQ,IAAI,QAAQ,CAK3C,MAAO,CACL,cALoB,EAClB,OAAO,SAAS,EAAM,MAAM,IAAI,CAAC,GAAI,GAAG,CAAG,EAC3C,EAIF,UAAW,GACZ,SACQ,EAAS,SAAW,KAAO,EAAS,SAAW,IAExD,MAAO,CACL,cAAe,EACf,UAAW,GACZ,MAED,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,EAGtE,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAwGF,aAtGoB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,MAAM,MAAMA,EAAW,CACrB,OAAQ,SACT,CAAC,EAEJ,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CA4FF,gBAzFA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAiB,CACrB,KAAM,yBACN,cAAe,EAAW,IAAK,IAAS,CAAE,KAAM,EAAK,EAAE,CACvD,YAAa,CACX,KAAM,EACN,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAe,CAAC,UACnD,CACE,OAAQ,OACR,QAAS,GAAgB,CACzB,KAAM,KAAK,UAAU,EAAe,CACrC,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAwDF,oBArDA,EACA,EACA,IACG,EAAU,GAAG,EAAI,MAAO,EAAM,EAAQ,CAmDzC,mBAjD0B,GAC1B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CACF,OAAO,MAAM,EAAU,GAAG,EAAI,MAAM,CAAC,KAAK,EAAO,WAAW,MACtD,CACN,SAGJ,UACS,EAAgB,SAAS,gBAAgB,CAEnD,CAAC,CAsCF,sBApC6B,GAAgB,EAAa,GAAG,EAAI,MAAM,CAqCxE,CAGH,MAAa,EAAsB,GACjC,EAAM,QAAQ,EAAkB,EAAoB,EAAO,CAAC,CC1X9D,SAAS,EACP,EAC+B,CAC/B,MAAO,CACL,KAAM,EAAO,MAAM,UAAU,EAAI,KACjC,eAAgB,GAAG,EAAO,iBAC1B,OAAQ,EAAO,QAAQ,UAAU,EAAI,IACrC,SAAU,KAAK,UAAU,EAAO,SAAS,CACzC,QAAS,KAAK,UAAU,EAAO,QAAQ,CACxC,CAGH,MAAM,GACJ,EACA,EACA,IAEO,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,IAAM,EAAW,MAAO,EAAU,kBAAkB,EAAG,CACjD,EAAO,MAAO,EAAQ,IAAI,EAAG,CAEnC,MAAO,CACL,KACA,KAAM,EAAS,KACf,OAAQ,EAAS,MAAQ,EACzB,SAAU,EAAS,SACnB,QAAS,CACP,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EACN,OAAQ,EAAU,OACnB,CACF,OACM,EAAO,CACd,GAAI,aAAiB,GAAmB,EAAM,OAAS,iBACrD,OAAO,MAAO,EAAO,KAAK,EAAM,CAElC,MAAM,IAER,CAGJ,SAAgBC,GAAiB,CAC/B,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAY,MAAO,EACnB,EAAU,MAAO,EAEjB,OACG,CACL,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,qBAAsB,EACtB,aAAc,IAAA,GACd,aAAc,IAAA,GACd,SAAU,IAAA,GACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACxB,EAsBH,MAAO,CACL,OAAQ,EAAU,OAClB,OAAS,GACA,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,EAAK,GACR,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAiB,CAC3C,CAGH,EAAK,QAAU,CACb,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EAAK,GACX,OAAQ,EAAU,OACnB,CAGD,IAAM,EAAU,CACd,OAAQ,EAAU,OAClB,IAAK,EAAK,GACV,YACE,EAAK,UAAU,aAAa,UAAU,EACtC,2BACF,SAAU,EAAoB,EAAK,CACpC,CAGD,OADA,MAAO,EAAU,UAAU,EAAK,GAAI,IAAI,WAAe,EAAQ,CACxD,GACP,CAGJ,OAAS,GACA,EAAU,aAAa,EAAQ,CAGxC,OACE,EACA,IAIO,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,UAAS,SAAQ,OAAQ,GAAiB,EAC5C,CAAE,cAAe,EAGjB,EAAS,MAAO,EAAU,EAAS,EAAS,EAAU,CAE5D,EAAO,OAAS,EAEhB,MAAO,EAAQ,IAAI,EAAS,EAAqB,CAEjD,IAAM,EAAU,CACd,OAAQ,EAAU,OAClB,IAAK,EACL,YACE,EAAO,UAAU,aAAe,2BAClC,SAAU,EAAoB,EAAqB,CACpD,CAGK,EAAiB,EAAO,iBAAiB,EAAa,CAG5D,GAAI,EAAU,gCAAiC,CAC7C,IAAM,EAAW,EAAO,OAAS,EAEjC,OAAO,MAAO,EAAU,gCACtB,EACA,EAAO,OACP,EACA,EACA,EACA,EACD,KACI,CAEL,IAAM,EAAS,EAAe,WAAW,CACnCC,EAAuB,EAAE,CAC3B,EAAa,EAGjB,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAO,EAAO,YACpC,EAAO,MAAM,CACd,CACD,GAAI,EAAM,MAEV,EAAO,KAAK,EAAM,CAClB,IAAM,EAAY,EAAM,WACxB,GAAc,EACd,IAAa,EAAW,CAI1B,IAAM,EAAgB,IAAI,WAAW,EAAW,CAC5C,EAAW,EACf,IAAK,IAAM,KAAS,EAClB,EAAc,IAAI,EAAO,EAAS,CAClC,GAAY,EAAM,WAIpB,GAAI,EAAO,SAAW,EAEpB,MAAO,EAAU,UAAU,EAAS,EAAe,EAAQ,KACtD,CAEL,IAAM,EAAW,GAAG,EAAQ,QAG5B,MAAO,EAAU,mBACf,EACA,EACA,EACD,CAGD,MAAO,EAAU,eACf,CAAC,EAAS,EAAS,CACnB,EACA,EACD,CAGD,MAAO,EAAU,sBAAsB,EAAS,CAGlD,OAAO,IAET,CAGJ,kBACA,uBAzJA,GACkC,CAClC,IAAM,EAAe,GAAiB,CAEhC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EA2I7B,KAAO,GACE,EAAO,IAAI,WAAa,CAE7B,OADe,MAAO,EAAU,gBAAgB,EAAQ,EAExD,CAEL,EACD,CAGJ,MAAa,EAAgB,GAC3BD,GAAgB,CAAC,KAAK,EAAO,QAAQ,EAAmB,EAAO,CAAC,CAAC,CAEtD,EAAkB,GAC7BA,GAAgB,CAAC,KAAK,EAAO,QAAQ,EAAqB,EAAO,CAAC,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.mjs","names":["stringifyUploadKeys","getUpload","uploadRequestsTotal","activeUploadsGauge","fileSizeHistogram","uploadErrorsTotal","withUploadMetrics","withTimingMetrics","uploadDurationHistogram","uploadSuccessTotal","uploadUrl","createGCSStore"],"sources":["../src/gcs-store.ts","../src/services/gcs-client.service.ts","../src/services/gcs-client-nodejs.service.ts","../src/services/gcs-client-rest.service.ts","../src/gcs-store-v2.ts"],"sourcesContent":["import { PassThrough, pipeline, Readable, Transform } from \"node:stream\";\nimport type { Bucket, CreateWriteStreamOptions } from \"@google-cloud/storage\";\nimport { Storage } from \"@google-cloud/storage\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n type DataStore,\n type DataStoreCapabilities,\n type DataStoreWriteOptions,\n DEFAULT_STREAMING_CONFIG,\n type KvStore,\n type StreamingConfig,\n type StreamWriteOptions,\n type StreamWriteResult,\n type UploadFile,\n UploadFileKVStore,\n type UploadStrategy,\n} from \"@uploadista/core/types\";\nimport {\n gcsActiveUploadsGauge as activeUploadsGauge,\n gcsFileSizeHistogram as fileSizeHistogram,\n logGCSUploadCompletion,\n trackGCSError,\n gcsUploadDurationHistogram as uploadDurationHistogram,\n gcsUploadErrorsTotal as uploadErrorsTotal,\n gcsUploadRequestsTotal as uploadRequestsTotal,\n gcsUploadSuccessTotal as uploadSuccessTotal,\n withGCSTimingMetrics as withTimingMetrics,\n withGCSUploadMetrics as withUploadMetrics,\n} from \"@uploadista/observability\";\nimport { Effect, Stream } from \"effect\";\n\nexport type GCSStoreOptions = {\n keyFilename?: string;\n credentials?: object;\n bucketName: string;\n kvStore: KvStore<UploadFile>;\n};\n\n/**\n * Convert the Upload object to a format that can be stored in GCS metadata.\n */\nfunction stringifyUploadKeys(upload: UploadFile) {\n return {\n size: upload.size ?? null,\n sizeIsDeferred: `${upload.sizeIsDeferred}`,\n offset: upload.offset,\n metadata: JSON.stringify(upload.metadata),\n storage: JSON.stringify(upload.storage),\n };\n}\n\nconst getUpload = (\n bucket: Bucket,\n id: string,\n kvStore: KvStore<UploadFile>,\n) => {\n return Effect.gen(function* () {\n try {\n const [metadata] = yield* Effect.promise(() =>\n bucket.file(id).getMetadata(),\n );\n const { size, metadata: meta } = metadata;\n const file = yield* kvStore.get(id);\n return {\n id,\n size: size ? Number.parseInt(`${size}`, 10) : undefined,\n offset: metadata.size ? Number.parseInt(`${metadata.size}`, 10) : 0, // `size` is set by GCS\n metadata: meta ? (meta as Record<string, string>) : undefined,\n storage: {\n id: file.storage.id,\n type: file.storage.type,\n path: id,\n bucket: bucket.name,\n },\n };\n } catch (error) {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n throw error;\n }\n });\n};\n\nexport function createGCSStore({\n keyFilename,\n credentials,\n bucketName,\n}: Omit<GCSStoreOptions, \"kvStore\">) {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n return gcsStore({ keyFilename, credentials, bucketName, kvStore });\n });\n}\n\nexport function gcsStore({\n keyFilename,\n credentials,\n bucketName,\n kvStore,\n}: GCSStoreOptions): DataStore<UploadFile> {\n const storage = new Storage(\n keyFilename ? { keyFilename } : credentials ? { credentials } : {},\n );\n\n const bucket = storage.bucket(bucketName);\n\n const getCapabilities = (): DataStoreCapabilities => {\n return {\n supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3\n supportsConcatenation: true, // Can combine files using bucket.combine\n supportsDeferredLength: true,\n supportsResumableUploads: true, // Through patch files\n supportsTransactionalUploads: false,\n supportsStreamingRead: true, // Supports streaming reads via file.createReadStream\n supportsStreamingWrite: true, // Supports streaming writes via resumable uploads\n maxConcurrentUploads: 1, // Sequential operations\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB default\n requiresOrderedChunks: true, // Due to combine operation\n requiresMimeTypeValidation: true,\n maxValidationSize: undefined, // no size limit\n };\n };\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n return {\n bucket: bucket.name,\n create: (file: UploadFile) => {\n return Effect.gen(function* () {\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n yield* fileSizeHistogram(Effect.succeed(file.size || 0));\n\n if (!file.id) {\n yield* uploadErrorsTotal(Effect.succeed(1));\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n const gcs_file = bucket.file(file.id);\n\n file.storage = {\n id: file.storage.id,\n type: file.storage.type,\n path: file.id,\n bucket: bucket.name,\n };\n\n console.log(\"file\", gcs_file.id);\n\n const options: CreateWriteStreamOptions = {\n metadata: {\n metadata: {\n ...stringifyUploadKeys(file),\n },\n },\n };\n if (file.metadata?.contentType) {\n options.contentType = file.metadata.contentType.toString();\n }\n\n return yield* Effect.tryPromise({\n try: () => {\n console.log(\"creating file\", gcs_file.id);\n return new Promise<UploadFile>((resolve, reject) => {\n const fake_stream = new PassThrough();\n fake_stream.end();\n fake_stream\n .pipe(gcs_file.createWriteStream(options))\n .on(\"error\", reject)\n .on(\"finish\", () => {\n resolve(file);\n });\n });\n },\n catch: (error) => {\n console.error(\"error creating file\", error);\n Effect.runSync(\n trackGCSError(\"create\", error, {\n upload_id: file.id,\n bucket: bucket.name,\n }),\n );\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", {\n cause: error,\n });\n },\n });\n });\n },\n read: (file_id: string) => {\n return Effect.tryPromise({\n try: async () => {\n const [buffer] = await bucket.file(file_id).download();\n return new Uint8Array(buffer);\n },\n catch: (error) => {\n Effect.runSync(\n trackGCSError(\"read\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n },\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Uses GCS file.createReadStream under the hood.\n *\n * @param file_id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n */\n readStream: (file_id: string, config?: StreamingConfig) =>\n Effect.gen(function* () {\n // Merge config with defaults\n const effectiveConfig = {\n ...DEFAULT_STREAMING_CONFIG,\n ...config,\n };\n\n // Verify file exists\n const file = bucket.file(file_id);\n const [exists] = yield* Effect.tryPromise({\n try: () => file.exists(),\n catch: (error) => {\n Effect.runSync(\n trackGCSError(\"readStream\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n\n if (!exists) {\n return yield* Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n }\n\n // Create a Node.js readable stream from GCS\n const nodeStream = file.createReadStream();\n\n // Convert Node.js stream to Effect Stream with chunking\n return Stream.async<Uint8Array, UploadistaError>((emit) => {\n const chunkSize = effectiveConfig.chunkSize;\n let buffer = new Uint8Array(0);\n\n nodeStream.on(\"data\", (chunk: Buffer) => {\n // Combine buffer with new data\n const combined = new Uint8Array(buffer.length + chunk.length);\n combined.set(buffer);\n combined.set(new Uint8Array(chunk), buffer.length);\n buffer = combined;\n\n // Emit chunks of the configured size\n while (buffer.length >= chunkSize) {\n const outChunk = buffer.slice(0, chunkSize);\n buffer = buffer.slice(chunkSize);\n emit.single(outChunk);\n }\n });\n\n nodeStream.on(\"end\", () => {\n // Emit any remaining data in buffer\n if (buffer.length > 0) {\n emit.single(buffer);\n }\n emit.end();\n });\n\n nodeStream.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"readStream\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n emit.fail(\n new UploadistaError({\n code: \"FILE_READ_ERROR\",\n status: 500,\n body: \"Failed to read GCS file stream\",\n details: `GCS stream read failed: ${String(error)}`,\n }),\n );\n });\n\n // Cleanup function when stream is interrupted\n return Effect.sync(() => {\n nodeStream.destroy();\n });\n });\n }),\n remove: (file_id: string) => {\n return Effect.gen(function* () {\n try {\n yield* Effect.promise(() => bucket.file(file_id).delete());\n yield* activeUploadsGauge(Effect.succeed(-1));\n } catch (error) {\n Effect.runSync(\n trackGCSError(\"remove\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n }),\n );\n throw error;\n }\n });\n },\n /**\n * Get the file metatata from the object in GCS, then upload a new version\n * passing through the metadata to the new version.\n */\n write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => {\n return withUploadMetrics(\n options.file_id,\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n const { file_id, offset, stream: effectStream } = options;\n console.log(\"write\", file_id, offset);\n const { onProgress } = dependencies;\n\n // GCS Doesn't persist metadata within versions,\n // get that metadata first\n const upload = yield* getUpload(bucket, file_id, kvStore);\n console.log(\"upload\", upload);\n\n return yield* Effect.promise(\n () =>\n new Promise<number>((resolve, reject) => {\n const file = bucket.file(file_id);\n const destination =\n upload.offset === 0\n ? file\n : bucket.file(`${file_id}_patch`);\n\n upload.offset = offset;\n\n const gcsOptions = {\n metadata: {\n metadata: {\n ...stringifyUploadKeys(upload),\n },\n },\n };\n const write_stream =\n destination.createWriteStream(gcsOptions);\n if (!write_stream) {\n Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n return;\n }\n\n let bytes_received = upload.offset;\n\n // Convert Effect Stream to ReadableStream\n const readableStream = Stream.toReadableStream(effectStream);\n\n const transform = new Transform({\n transform(\n chunk: Buffer,\n _: string,\n callback: (error?: Error | null, data?: Buffer) => void,\n ) {\n bytes_received += chunk.length;\n onProgress?.(bytes_received);\n callback(null, chunk);\n },\n });\n\n const nodeReadable = Readable.fromWeb(readableStream);\n\n pipeline(\n nodeReadable,\n transform,\n write_stream,\n async (e: Error | null) => {\n if (e) {\n console.error(\"error writing file\", e);\n Effect.runSync(\n trackGCSError(\"write\", e, {\n upload_id: file_id,\n bucket: bucket.name,\n offset,\n }),\n );\n try {\n await destination.delete({ ignoreNotFound: true });\n } finally {\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n }\n } else {\n try {\n if (file !== destination) {\n await bucket.combine([file, destination], file);\n await Promise.all([\n file.setMetadata(gcsOptions.metadata),\n destination.delete({ ignoreNotFound: true }),\n ]);\n }\n\n // Log completion\n Effect.runSync(\n logGCSUploadCompletion(file_id, {\n fileSize: upload.size || 0,\n totalDurationMs: Date.now() - startTime,\n partsCount: 1,\n averagePartSize: upload.size,\n throughputBps:\n (upload.size || 0) / (Date.now() - startTime),\n retryCount: 0,\n }),\n );\n Effect.runSync(uploadSuccessTotal(Effect.succeed(1)));\n Effect.runSync(\n activeUploadsGauge(Effect.succeed(-1)),\n );\n\n resolve(bytes_received);\n } catch (error) {\n console.error(error);\n Effect.runSync(\n trackGCSError(\"write\", error, {\n upload_id: file_id,\n bucket: bucket.name,\n operation: \"combine\",\n }),\n );\n reject(UploadistaError.fromCode(\"FILE_WRITE_ERROR\"));\n }\n }\n },\n );\n }),\n );\n }),\n ),\n );\n },\n /**\n * Writes file content from a stream without knowing the final size upfront.\n * Uses GCS resumable upload with streaming directly to the write stream.\n *\n * @param fileId - The unique identifier for the file\n * @param options - Stream write options including the Effect Stream\n * @returns StreamWriteResult with final size after stream completes\n */\n writeStream: (\n fileId: string,\n options: StreamWriteOptions,\n ): Effect.Effect<StreamWriteResult, UploadistaError> =>\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n\n yield* Effect.logInfo(\"Starting streaming write to GCS\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n bucket: bucket.name,\n size_hint: options.sizeHint,\n }),\n );\n\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n\n const file = bucket.file(fileId);\n\n // Set up write stream options\n const gcsOptions: CreateWriteStreamOptions = {\n resumable: true, // Enable resumable uploads for better reliability\n metadata: options.metadata\n ? { metadata: options.metadata }\n : undefined,\n };\n\n if (options.contentType) {\n gcsOptions.contentType = options.contentType;\n }\n\n // Create the write stream\n const writeStream = file.createWriteStream(gcsOptions);\n\n // Stream the content and track bytes\n const result = yield* Effect.tryPromise({\n try: () =>\n new Promise<number>((resolve, reject) => {\n let totalBytes = 0;\n\n // Create a pass-through stream to track bytes\n const passThrough = new PassThrough();\n\n passThrough.on(\"data\", (chunk: Buffer) => {\n totalBytes += chunk.length;\n });\n\n // Pipe passThrough to GCS writeStream\n passThrough.pipe(writeStream);\n\n writeStream.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n }),\n );\n reject(error);\n });\n\n writeStream.on(\"finish\", () => {\n resolve(totalBytes);\n });\n\n // Convert Effect Stream to readable and pipe to passThrough\n const readableStream = Stream.toReadableStream(options.stream);\n const nodeReadable = Readable.fromWeb(readableStream);\n\n nodeReadable.on(\"error\", (error: Error) => {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n phase: \"read\",\n }),\n );\n passThrough.destroy(error);\n reject(error);\n });\n\n pipeline(nodeReadable, passThrough, (error) => {\n if (error) {\n Effect.runSync(\n trackGCSError(\"writeStream\", error, {\n upload_id: fileId,\n bucket: bucket.name,\n phase: \"pipeline\",\n }),\n );\n reject(error);\n }\n });\n }),\n catch: (error) => {\n Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));\n Effect.runSync(activeUploadsGauge(Effect.succeed(-1)));\n return new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 500,\n body: \"Failed to write stream to GCS\",\n details: `GCS streaming write failed: ${String(error)}`,\n });\n },\n });\n\n // Log completion metrics\n const endTime = Date.now();\n const totalDurationMs = endTime - startTime;\n const throughputBps =\n totalDurationMs > 0 ? (result * 1000) / totalDurationMs : 0;\n\n yield* logGCSUploadCompletion(fileId, {\n fileSize: result,\n totalDurationMs,\n partsCount: 1,\n averagePartSize: result,\n throughputBps,\n retryCount: 0,\n });\n\n yield* uploadSuccessTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n yield* fileSizeHistogram(Effect.succeed(result));\n\n yield* Effect.logInfo(\"Streaming write to GCS completed\").pipe(\n Effect.annotateLogs({\n upload_id: fileId,\n total_bytes: result,\n duration_ms: totalDurationMs,\n }),\n );\n\n return {\n id: fileId,\n size: result,\n path: fileId,\n bucket: bucket.name,\n } satisfies StreamWriteResult;\n }),\n ),\n getCapabilities,\n validateUploadStrategy,\n };\n}\n","import type { UploadistaError } from \"@uploadista/core/errors\";\nimport { Context, type Effect } from \"effect\";\n\nexport interface GCSOperationContext {\n bucket: string;\n key: string;\n contentType?: string;\n metadata?: Record<string, string | null>;\n}\n\nexport interface GCSObjectMetadata {\n name: string;\n bucket: string;\n size?: number;\n contentType?: string;\n metadata?: Record<string, string | null>;\n generation?: string;\n timeCreated?: string;\n updated?: string;\n}\n\nexport type GCSClient = {\n readonly bucket: string;\n\n // Basic GCS operations\n readonly getObject: (\n key: string,\n ) => Effect.Effect<ReadableStream, UploadistaError>;\n readonly getObjectMetadata: (\n key: string,\n ) => Effect.Effect<GCSObjectMetadata, UploadistaError>;\n readonly getObjectBuffer: (\n key: string,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n readonly objectExists: (\n key: string,\n ) => Effect.Effect<boolean, UploadistaError>;\n readonly putObject: (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n readonly putObjectFromStream?: (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n ) => Effect.Effect<number, UploadistaError>;\n readonly putObjectFromStreamWithPatching?: (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n isAppend?: boolean,\n ) => Effect.Effect<number, UploadistaError>;\n readonly deleteObject: (key: string) => Effect.Effect<void, UploadistaError>;\n\n // Resumable upload operations\n readonly createResumableUpload: (\n context: GCSOperationContext,\n ) => Effect.Effect<string, UploadistaError>; // Returns upload URL\n readonly uploadChunk: (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) => Effect.Effect<\n { completed: boolean; bytesUploaded: number },\n UploadistaError\n >;\n readonly getUploadStatus: (\n uploadUrl: string,\n ) => Effect.Effect<\n { bytesUploaded: number; completed: boolean },\n UploadistaError\n >;\n readonly cancelUpload: (\n uploadUrl: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n // Compose operations (GCS specific - for combining files)\n readonly composeObjects: (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n\n // Temporary file operations (for patches)\n readonly putTemporaryObject: (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => Effect.Effect<string, UploadistaError>;\n readonly getTemporaryObject: (\n key: string,\n ) => Effect.Effect<ReadableStream | undefined, UploadistaError>;\n readonly deleteTemporaryObject: (\n key: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\nexport class GCSClientService extends Context.Tag(\"GCSClientService\")<\n GCSClientService,\n GCSClient\n>() {}\n\nexport interface GCSClientConfig {\n bucket: string;\n // For Node.js implementation\n keyFilename?: string;\n credentials?: object;\n projectId?: string;\n // For REST API implementation\n accessToken?: string;\n}\n","import { pipeline, Readable, Transform } from \"node:stream\";\nimport { type Bucket, Storage } from \"@google-cloud/storage\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport { Effect, Layer } from \"effect\";\nimport {\n type GCSClientConfig,\n GCSClientService,\n type GCSObjectMetadata,\n type GCSOperationContext,\n} from \"./gcs-client.service\";\n\nfunction createNodeJSGCSClient(config: GCSClientConfig) {\n // Dynamic import to avoid issues in non-Node environments\n\n const storage = new Storage({\n keyFilename: config.keyFilename,\n credentials: config.credentials,\n projectId: config.projectId,\n });\n\n const bucket: Bucket = storage.bucket(config.bucket);\n\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const stream = file.createReadStream();\n\n // Convert Node.js stream to Web ReadableStream\n return new ReadableStream({\n start(controller) {\n stream.on(\"data\", (chunk) => {\n controller.enqueue(new Uint8Array(chunk));\n });\n\n stream.on(\"end\", () => {\n controller.close();\n });\n\n stream.on(\"error\", (error) => {\n controller.error(error);\n });\n },\n });\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const getObjectMetadata = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const [metadata] = await file.getMetadata();\n\n const parseMetadata = (\n meta: Record<string, string | number | boolean | null> | undefined,\n ) => {\n if (!meta) return {};\n if (typeof meta.metadata === \"string\") {\n try {\n return JSON.parse(meta.metadata);\n } catch {\n return meta;\n }\n }\n return meta;\n };\n\n return {\n name: metadata.name,\n bucket: metadata.bucket,\n size: metadata.size\n ? Number.parseInt(`${metadata.size}`, 10)\n : undefined,\n contentType: metadata.contentType,\n metadata: parseMetadata(metadata.metadata),\n generation: metadata.generation,\n timeCreated: metadata.timeCreated,\n updated: metadata.updated,\n } as GCSObjectMetadata;\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const objectExists = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n const [exists] = await file.exists();\n return exists;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const putObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n return new Promise<string>((resolve, reject) => {\n const file = bucket.file(key);\n const options = {\n metadata: {\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const stream = file.createWriteStream(options);\n\n stream.on(\"error\", reject);\n stream.on(\"finish\", () => {\n resolve(file.name);\n });\n\n stream.end(Buffer.from(body));\n });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putObjectFromStream = (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n ) =>\n Effect.tryPromise({\n try: async () => {\n return new Promise<number>((resolve, reject) => {\n const file = bucket.file(key);\n const options = {\n metadata: {\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const writeStream = file.createWriteStream(options);\n let bytesWritten = offset;\n\n const transform = new Transform({\n transform(\n chunk: Buffer,\n _: string,\n callback: (error?: Error | null, data?: Buffer) => void,\n ) {\n bytesWritten += chunk.length;\n onProgress?.(bytesWritten);\n callback(null, chunk);\n },\n });\n\n const nodeReadable = Readable.fromWeb(readableStream);\n\n pipeline(\n nodeReadable,\n transform,\n writeStream,\n (error: Error | null) => {\n if (error) {\n reject(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", {\n cause: error,\n }),\n );\n } else {\n resolve(bytesWritten);\n }\n },\n );\n });\n },\n catch: (error) => {\n console.error(\"error putting object from stream\", error);\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const file = bucket.file(key);\n await file.delete({ ignoreNotFound: true });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const createResumableUpload = (context: GCSOperationContext) =>\n Effect.tryPromise({\n try: async () => {\n // For Node.js, we'll use a simplified approach\n // In production, you'd want to implement proper resumable uploads\n // Return a pseudo-URL that we can use to identify this upload\n return `resumable://nodejs/${context.bucket}/${context.key}`;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const uploadChunk = (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) =>\n Effect.tryPromise({\n try: async () => {\n // Extract key from pseudo-URL\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n\n return new Promise<{ completed: boolean; bytesUploaded: number }>(\n (resolve, reject) => {\n const stream = file.createWriteStream({\n resumable: true,\n offset: start,\n });\n\n stream.on(\"error\", reject);\n stream.on(\"finish\", () => {\n resolve({\n completed: total ? start + chunk.length >= total : false,\n bytesUploaded: start + chunk.length,\n });\n });\n\n stream.end(Buffer.from(chunk));\n },\n );\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const getUploadStatus = (uploadUrl: string) =>\n Effect.promise(async () => {\n try {\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n const [metadata] = await file.getMetadata();\n\n return {\n bytesUploaded: metadata.size\n ? Number.parseInt(`${metadata.size}`, 10)\n : 0,\n completed: true, // Simplified for now\n };\n } catch (_error) {\n // If file doesn't exist, upload hasn't started\n return { bytesUploaded: 0, completed: false };\n }\n });\n\n const cancelUpload = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n const key = uploadUrl.split(\"/\").pop();\n if (!key) {\n throw new Error(\"Invalid upload URL\");\n }\n\n const file = bucket.file(key);\n await file.delete({ ignoreNotFound: true });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const composeObjects = (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const sources = sourceKeys.map((key) => bucket.file(key));\n const destination = bucket.file(destinationKey);\n\n await bucket.combine(sources, destination);\n\n if (context?.metadata) {\n await destination.setMetadata({\n metadata: context.metadata,\n });\n }\n\n return destinationKey;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putObjectFromStreamWithPatching = (\n key: string,\n offset: number,\n readableStream: ReadableStream,\n context?: Partial<GCSOperationContext>,\n onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk\n isAppend = false,\n ) =>\n Effect.gen(function* () {\n if (!isAppend) {\n // Direct upload for new files\n return yield* putObjectFromStream(\n key,\n offset,\n readableStream,\n context,\n onProgress,\n );\n }\n\n // For append operations, create a patch file and then combine\n const patchKey = `${key}_patch`;\n const bytesWritten = yield* putObjectFromStream(\n patchKey,\n offset,\n readableStream,\n context,\n onProgress,\n );\n\n // Combine original with patch\n yield* composeObjects([key, patchKey], key, context);\n\n // Clean up patch file\n yield* deleteObject(patchKey);\n\n return bytesWritten;\n });\n\n const putTemporaryObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => putObject(`${key}_tmp`, body, context);\n\n const getTemporaryObject = (key: string) =>\n Effect.gen(function* () {\n try {\n return yield* getObject(`${key}_tmp`);\n } catch {\n return undefined;\n }\n });\n\n const deleteTemporaryObject = (key: string) => deleteObject(`${key}_tmp`);\n\n const getObjectBuffer = (key: string) => {\n return Effect.tryPromise({\n try: async () => {\n const [buffer] = await bucket.file(key).download();\n return new Uint8Array(buffer);\n },\n catch: (error) => {\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === 404\n ) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", {\n cause: error,\n });\n },\n });\n };\n\n return {\n bucket: config.bucket,\n getObject,\n getObjectBuffer,\n getObjectMetadata,\n objectExists,\n putObject,\n putObjectFromStream,\n putObjectFromStreamWithPatching,\n deleteObject,\n createResumableUpload,\n uploadChunk,\n getUploadStatus,\n cancelUpload,\n composeObjects,\n putTemporaryObject,\n getTemporaryObject,\n deleteTemporaryObject,\n };\n}\n\nexport const GCSClientNodeJSLayer = (config: GCSClientConfig) =>\n Layer.succeed(GCSClientService, createNodeJSGCSClient(config));\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport { Effect, Layer } from \"effect\";\nimport {\n type GCSClientConfig,\n GCSClientService,\n type GCSObjectMetadata,\n type GCSOperationContext,\n} from \"./gcs-client.service\";\n\nfunction createRESTGCSClient(config: GCSClientConfig) {\n if (!config.accessToken) {\n throw new Error(\"accessToken is required for REST API implementation\");\n }\n\n const baseUrl = `https://storage.googleapis.com/storage/v1/b/${config.bucket}`;\n const uploadUrl = `https://storage.googleapis.com/upload/storage/v1/b/${config.bucket}/o`;\n const accessToken = config.accessToken;\n\n const getAuthHeaders = () => ({\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": \"application/json\",\n });\n\n const getObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}?alt=media`,\n {\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n if (!response.ok) {\n if (response.status === 404) {\n throw new Error(\"File not found\");\n }\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n if (!response.body) {\n throw new Error(\"body not found\");\n }\n\n return response.body;\n },\n catch: (error) => {\n if (error instanceof Error && error.message.includes(\"not found\")) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const getObjectMetadata = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n headers: getAuthHeaders(),\n },\n );\n\n if (!response.ok) {\n if (response.status === 404) {\n throw new Error(\"File not found\");\n }\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n const data = (await response.json()) as {\n name: string;\n bucket: string;\n size?: string;\n contentType?: string;\n metadata?: Record<string, string>;\n generation?: string;\n timeCreated?: string;\n updated?: string;\n };\n\n return {\n name: data.name,\n bucket: data.bucket,\n size: data.size ? Number.parseInt(data.size, 10) : undefined,\n contentType: data.contentType,\n metadata: data.metadata || {},\n generation: data.generation,\n timeCreated: data.timeCreated,\n updated: data.updated,\n } as GCSObjectMetadata;\n },\n catch: (error) => {\n if (error instanceof Error && error.message.includes(\"not found\")) {\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n }\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const objectExists = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n method: \"HEAD\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n return response.ok;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const putObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const metadata = {\n name: key,\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n };\n\n const response = await fetch(\n `${uploadUrl}?uploadType=media&name=${encodeURIComponent(key)}`,\n {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": metadata.contentType,\n \"Content-Length\": body.length.toString(),\n },\n body: body,\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return key;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const deleteObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}`,\n {\n method: \"DELETE\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n },\n },\n );\n\n // 404 is OK - object didn't exist\n if (!response.ok && response.status !== 404) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const createResumableUpload = (context: GCSOperationContext) =>\n Effect.tryPromise({\n try: async () => {\n const metadata = {\n name: context.key,\n contentType: context.contentType || \"application/octet-stream\",\n metadata: context.metadata || {},\n };\n\n const response = await fetch(\n `${uploadUrl}?uploadType=resumable&name=${encodeURIComponent(context.key)}`,\n {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${accessToken}`,\n \"Content-Type\": \"application/json\",\n },\n body: JSON.stringify(metadata),\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n const resumableUploadUrl = response.headers.get(\"Location\");\n if (!resumableUploadUrl) {\n throw new Error(\"No upload URL returned\");\n }\n\n return resumableUploadUrl;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const uploadChunk = (\n uploadUrl: string,\n chunk: Uint8Array,\n start: number,\n total?: number,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const end = start + chunk.length - 1;\n const contentRange = total\n ? `bytes ${start}-${end}/${total}`\n : `bytes ${start}-${end}/*`;\n\n const response = await fetch(uploadUrl, {\n method: \"PUT\",\n headers: {\n \"Content-Length\": chunk.length.toString(),\n \"Content-Range\": contentRange,\n },\n body: chunk,\n });\n\n // 308 means more data needed, 200/201 means complete\n const completed = response.status === 200 || response.status === 201;\n\n if (!completed && response.status !== 308) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return {\n completed,\n bytesUploaded: end + 1,\n };\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const getUploadStatus = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(uploadUrl, {\n method: \"PUT\",\n headers: {\n \"Content-Range\": \"bytes */*\",\n },\n });\n\n if (response.status === 308) {\n // Upload incomplete\n const range = response.headers.get(\"Range\");\n const bytesUploaded = range\n ? Number.parseInt(range.split(\"-\")[1], 10) + 1\n : 0;\n\n return {\n bytesUploaded,\n completed: false,\n };\n } else if (response.status === 200 || response.status === 201) {\n // Upload complete\n return {\n bytesUploaded: 0, // We don't know the exact size\n completed: true,\n };\n } else {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const cancelUpload = (uploadUrl: string) =>\n Effect.tryPromise({\n try: async () => {\n // Cancel by sending DELETE to upload URL\n await fetch(uploadUrl, {\n method: \"DELETE\",\n });\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error });\n },\n });\n\n const composeObjects = (\n sourceKeys: string[],\n destinationKey: string,\n context?: Partial<GCSOperationContext>,\n ) =>\n Effect.tryPromise({\n try: async () => {\n const composeRequest = {\n kind: \"storage#composeRequest\",\n sourceObjects: sourceKeys.map((key) => ({ name: key })),\n destination: {\n name: destinationKey,\n contentType: context?.contentType || \"application/octet-stream\",\n metadata: context?.metadata || {},\n },\n };\n\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(destinationKey)}/compose`,\n {\n method: \"POST\",\n headers: getAuthHeaders(),\n body: JSON.stringify(composeRequest),\n },\n );\n\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n\n return destinationKey;\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error });\n },\n });\n\n const putTemporaryObject = (\n key: string,\n body: Uint8Array,\n context?: Partial<GCSOperationContext>,\n ) => putObject(`${key}_tmp`, body, context);\n\n const getTemporaryObject = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n try {\n return await getObject(`${key}_tmp`).pipe(Effect.runPromise);\n } catch {\n return undefined;\n }\n },\n catch: () => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\");\n },\n });\n\n const deleteTemporaryObject = (key: string) => deleteObject(`${key}_tmp`);\n\n const getObjectBuffer = (key: string) =>\n Effect.tryPromise({\n try: async () => {\n const response = await fetch(\n `${baseUrl}/o/${encodeURIComponent(key)}?alt=media`,\n {\n headers: getAuthHeaders(),\n },\n );\n if (!response.ok) {\n throw new Error(`HTTP ${response.status}: ${response.statusText}`);\n }\n return new Uint8Array(await response.arrayBuffer());\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"FILE_READ_ERROR\", { cause: error });\n },\n });\n\n return {\n bucket: config.bucket,\n getObject,\n getObjectBuffer,\n getObjectMetadata,\n objectExists,\n putObject,\n deleteObject,\n createResumableUpload,\n uploadChunk,\n getUploadStatus,\n cancelUpload,\n composeObjects,\n putTemporaryObject,\n getTemporaryObject,\n deleteTemporaryObject,\n };\n}\n\nexport const GCSClientRESTLayer = (config: GCSClientConfig) =>\n Layer.succeed(GCSClientService, createRESTGCSClient(config));\n","import { UploadistaError } from \"@uploadista/core/errors\";\nimport {\n type DataStore,\n type DataStoreCapabilities,\n type DataStoreWriteOptions,\n type KvStore,\n type UploadFile,\n UploadFileKVStore,\n type UploadStrategy,\n} from \"@uploadista/core/types\";\nimport { Effect, Stream } from \"effect\";\nimport type {\n GCSClient,\n GCSClientConfig,\n GCSOperationContext,\n} from \"./services\";\nimport {\n GCSClientNodeJSLayer,\n GCSClientRESTLayer,\n GCSClientService,\n} from \"./services\";\n\nexport type GCSStoreOptions = GCSClientConfig;\n\n/**\n * Convert the Upload object to a format that can be stored in GCS metadata.\n */\nfunction stringifyUploadKeys(\n upload: UploadFile,\n): Record<string, string | null> {\n return {\n size: upload.size?.toString() ?? null,\n sizeIsDeferred: `${upload.sizeIsDeferred}`,\n offset: upload.offset?.toString() ?? \"0\",\n metadata: JSON.stringify(upload.metadata),\n storage: JSON.stringify(upload.storage),\n };\n}\n\nconst getUpload = (\n id: string,\n kvStore: KvStore<UploadFile>,\n gcsClient: GCSClient,\n) => {\n return Effect.gen(function* () {\n try {\n const metadata = yield* gcsClient.getObjectMetadata(id);\n const file = yield* kvStore.get(id);\n\n return {\n id,\n size: metadata.size,\n offset: metadata.size || 0,\n metadata: metadata.metadata,\n storage: {\n id: file.storage.id,\n type: file.storage.type,\n path: id,\n bucket: gcsClient.bucket,\n },\n };\n } catch (error) {\n if (error instanceof UploadistaError && error.code === \"FILE_NOT_FOUND\") {\n return yield* Effect.fail(error);\n }\n throw error;\n }\n });\n};\n\nexport function createGCSStore() {\n return Effect.gen(function* () {\n const gcsClient = yield* GCSClientService;\n const kvStore = yield* UploadFileKVStore;\n\n const getCapabilities = (): DataStoreCapabilities => {\n return {\n supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3\n supportsConcatenation: true, // Can combine files using compose\n supportsDeferredLength: true,\n supportsResumableUploads: true, // Through resumable uploads\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1, // Sequential operations\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 8 * 1024 * 1024, // 8MB default\n requiresOrderedChunks: true, // Due to compose operation\n };\n };\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n\n const result = (() => {\n switch (strategy) {\n case \"parallel\":\n return capabilities.supportsParallelUploads;\n case \"single\":\n return true;\n default:\n return false;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n return {\n bucket: gcsClient.bucket,\n create: (file: UploadFile) => {\n return Effect.gen(function* () {\n if (!file.id) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FILE_NOT_FOUND\"),\n );\n }\n\n file.storage = {\n id: file.storage.id,\n type: file.storage.type,\n path: file.id,\n bucket: gcsClient.bucket,\n };\n\n // Create empty file\n const context = {\n bucket: gcsClient.bucket,\n key: file.id,\n contentType:\n file.metadata?.contentType?.toString() ||\n \"application/octet-stream\",\n metadata: stringifyUploadKeys(file),\n };\n\n yield* gcsClient.putObject(file.id, new Uint8Array(0), context);\n return file;\n });\n },\n\n remove: (file_id: string) => {\n return gcsClient.deleteObject(file_id);\n },\n\n write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => {\n return Effect.gen(function* () {\n const { file_id, offset, stream: effectStream } = options;\n const { onProgress } = dependencies;\n\n // Get current upload metadata\n const upload = yield* getUpload(file_id, kvStore, gcsClient);\n\n upload.offset = offset;\n // Persist the updated offset\n yield* kvStore.set(file_id, upload as UploadFile);\n\n const context = {\n bucket: gcsClient.bucket,\n key: file_id,\n contentType:\n upload.metadata?.contentType || \"application/octet-stream\",\n metadata: stringifyUploadKeys(upload as UploadFile),\n } satisfies Partial<GCSOperationContext>;\n\n // Convert Effect Stream to ReadableStream\n const readableStream = Stream.toReadableStream(effectStream);\n\n // Use native streams if available (Node.js implementation)\n if (gcsClient.putObjectFromStreamWithPatching) {\n const isAppend = upload.offset > 0; // Check original file size, not write offset\n\n return yield* gcsClient.putObjectFromStreamWithPatching(\n file_id,\n upload.offset,\n readableStream,\n context,\n onProgress,\n isAppend,\n );\n } else {\n // Fallback to chunk-based approach for REST implementation\n const reader = readableStream.getReader();\n const chunks: Uint8Array[] = [];\n let totalBytes = 0;\n\n // Read all chunks\n while (true) {\n const { done, value } = yield* Effect.promise(() =>\n reader.read(),\n );\n if (done) break;\n\n chunks.push(value);\n const chunkSize = value.byteLength;\n totalBytes += chunkSize;\n onProgress?.(totalBytes);\n }\n\n // Combine all chunks\n const combinedArray = new Uint8Array(totalBytes);\n let position = 0;\n for (const chunk of chunks) {\n combinedArray.set(chunk, position);\n position += chunk.byteLength;\n }\n\n // Check if we need to handle patches (append data)\n if (upload.offset === 0) {\n // Direct upload\n yield* gcsClient.putObject(file_id, combinedArray, context);\n } else {\n // We need to combine with existing data\n const patchKey = `${file_id}_patch`;\n\n // Upload patch data\n yield* gcsClient.putTemporaryObject(\n patchKey,\n combinedArray,\n context,\n );\n\n // Combine original file with patch\n yield* gcsClient.composeObjects(\n [file_id, patchKey],\n file_id,\n context,\n );\n\n // Clean up patch file\n yield* gcsClient.deleteTemporaryObject(patchKey);\n }\n\n return totalBytes;\n }\n });\n },\n\n getCapabilities,\n validateUploadStrategy,\n read: (file_id: string) => {\n return Effect.gen(function* () {\n const buffer = yield* gcsClient.getObjectBuffer(file_id);\n return buffer;\n });\n },\n } as DataStore<UploadFile>;\n });\n}\n\nexport const gcsStoreRest = (config: GCSStoreOptions) =>\n createGCSStore().pipe(Effect.provide(GCSClientRESTLayer(config)));\n\nexport const gcsStoreNodejs = (config: GCSStoreOptions) =>\n createGCSStore().pipe(Effect.provide(GCSClientNodeJSLayer(config)));\n"],"mappings":"4oBAyCA,SAASA,EAAoB,EAAoB,CAC/C,MAAO,CACL,KAAM,EAAO,MAAQ,KACrB,eAAgB,GAAG,EAAO,iBAC1B,OAAQ,EAAO,OACf,SAAU,KAAK,UAAU,EAAO,SAAS,CACzC,QAAS,KAAK,UAAU,EAAO,QAAQ,CACxC,CAGH,MAAMC,GACJ,EACA,EACA,IAEO,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,GAAM,CAAC,GAAY,MAAO,EAAO,YAC/B,EAAO,KAAK,EAAG,CAAC,aAAa,CAC9B,CACK,CAAE,OAAM,SAAU,GAAS,EAC3B,EAAO,MAAO,EAAQ,IAAI,EAAG,CACnC,MAAO,CACL,KACA,KAAM,EAAO,OAAO,SAAS,GAAG,IAAQ,GAAG,CAAG,IAAA,GAC9C,OAAQ,EAAS,KAAO,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CAAG,EAClE,SAAU,GAA0C,IAAA,GACpD,QAAS,CACP,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EACN,OAAQ,EAAO,KAChB,CACF,OACM,EAAO,CACd,GACE,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAEf,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAGvE,MAAM,IAER,CAGJ,SAAgB,EAAe,CAC7B,cACA,cACA,cACmC,CACnC,OAAO,EAAO,IAAI,WAAa,CAE7B,OAAO,EAAS,CAAE,cAAa,cAAa,aAAY,QADxC,MAAO,EAC0C,CAAC,EAClE,CAGJ,SAAgB,EAAS,CACvB,cACA,cACA,aACA,WACyC,CAKzC,IAAM,EAJU,IAAI,EAClB,EAAc,CAAE,cAAa,CAAG,EAAc,CAAE,cAAa,CAAG,EAAE,CACnE,CAEsB,OAAO,EAAW,CAEnC,OACG,CACL,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,sBAAuB,GACvB,uBAAwB,GACxB,qBAAsB,EACtB,aAAc,IAAA,GACd,aAAc,IAAA,GACd,SAAU,IAAA,GACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACvB,2BAA4B,GAC5B,kBAAmB,IAAA,GACpB,EAsBH,MAAO,CACL,OAAQ,EAAO,KACf,OAAS,GACA,EAAO,IAAI,WAAa,CAK7B,GAJA,MAAOC,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOC,EAAkB,EAAO,QAAQ,EAAK,MAAQ,EAAE,CAAC,CAEpD,CAAC,EAAK,GAER,OADA,MAAOC,EAAkB,EAAO,QAAQ,EAAE,CAAC,CACpC,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAGvE,IAAM,EAAW,EAAO,KAAK,EAAK,GAAG,CAErC,EAAK,QAAU,CACb,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EAAK,GACX,OAAQ,EAAO,KAChB,CAED,QAAQ,IAAI,OAAQ,EAAS,GAAG,CAEhC,IAAM,EAAoC,CACxC,SAAU,CACR,SAAU,CACR,GAAGL,EAAoB,EAAK,CAC7B,CACF,CACF,CAKD,OAJI,EAAK,UAAU,cACjB,EAAQ,YAAc,EAAK,SAAS,YAAY,UAAU,EAGrD,MAAO,EAAO,WAAW,CAC9B,SACE,QAAQ,IAAI,gBAAiB,EAAS,GAAG,CAClC,IAAI,SAAqB,EAAS,IAAW,CAClD,IAAM,EAAc,IAAI,EACxB,EAAY,KAAK,CACjB,EACG,KAAK,EAAS,kBAAkB,EAAQ,CAAC,CACzC,GAAG,QAAS,EAAO,CACnB,GAAG,aAAgB,CAClB,EAAQ,EAAK,EACb,EACJ,EAEJ,MAAQ,IACN,QAAQ,MAAM,sBAAuB,EAAM,CAC3C,EAAO,QACL,EAAc,SAAU,EAAO,CAC7B,UAAW,EAAK,GAChB,OAAQ,EAAO,KAChB,CAAC,CACH,CACM,EAAgB,SAAS,mBAAoB,CAClD,MAAO,EACR,CAAC,EAEL,CAAC,EACF,CAEJ,KAAO,GACE,EAAO,WAAW,CACvB,IAAK,SAAY,CACf,GAAM,CAAC,GAAU,MAAM,EAAO,KAAK,EAAQ,CAAC,UAAU,CACtD,OAAO,IAAI,WAAW,EAAO,EAE/B,MAAQ,IACN,EAAO,QACL,EAAc,OAAQ,EAAO,CAC3B,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CAEC,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,EAEL,CAAC,CAUJ,YAAa,EAAiB,IAC5B,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAkB,CACtB,GAAG,EACH,GAAG,EACJ,CAGK,EAAO,EAAO,KAAK,EAAQ,CAC3B,CAAC,GAAU,MAAO,EAAO,WAAW,CACxC,QAAW,EAAK,QAAQ,CACxB,MAAQ,IACN,EAAO,QACL,EAAc,aAAc,EAAO,CACjC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACM,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,EAEL,CAAC,CAEF,GAAI,CAAC,EACH,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,CAIvE,IAAM,EAAa,EAAK,kBAAkB,CAG1C,OAAO,EAAO,MAAoC,GAAS,CACzD,IAAM,EAAY,EAAgB,UAC9B,EAAS,IAAI,WA2CjB,OAzCA,EAAW,GAAG,OAAS,GAAkB,CAEvC,IAAM,EAAW,IAAI,WAAW,EAAO,OAAS,EAAM,OAAO,CAM7D,IALA,EAAS,IAAI,EAAO,CACpB,EAAS,IAAI,IAAI,WAAW,EAAM,CAAE,EAAO,OAAO,CAClD,EAAS,EAGF,EAAO,QAAU,GAAW,CACjC,IAAM,EAAW,EAAO,MAAM,EAAG,EAAU,CAC3C,EAAS,EAAO,MAAM,EAAU,CAChC,EAAK,OAAO,EAAS,GAEvB,CAEF,EAAW,GAAG,UAAa,CAErB,EAAO,OAAS,GAClB,EAAK,OAAO,EAAO,CAErB,EAAK,KAAK,EACV,CAEF,EAAW,GAAG,QAAU,GAAiB,CACvC,EAAO,QACL,EAAc,aAAc,EAAO,CACjC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACD,EAAK,KACH,IAAI,EAAgB,CAClB,KAAM,kBACN,OAAQ,IACR,KAAM,iCACN,QAAS,2BAA2B,OAAO,EAAM,GAClD,CAAC,CACH,EACD,CAGK,EAAO,SAAW,CACvB,EAAW,SAAS,EACpB,EACF,EACF,CACJ,OAAS,GACA,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,MAAO,EAAO,YAAc,EAAO,KAAK,EAAQ,CAAC,QAAQ,CAAC,CAC1D,MAAOG,EAAmB,EAAO,QAAQ,GAAG,CAAC,OACtC,EAAO,CAOd,MANA,EAAO,QACL,EAAc,SAAU,EAAO,CAC7B,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACK,IAER,CAMJ,OACE,EACA,IAIOG,EACL,EAAQ,QACRC,EACEC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CACtB,CAAE,UAAS,SAAQ,OAAQ,GAAiB,EAClD,QAAQ,IAAI,QAAS,EAAS,EAAO,CACrC,GAAM,CAAE,cAAe,EAIjB,EAAS,MAAOP,EAAU,EAAQ,EAAS,EAAQ,CAGzD,OAFA,QAAQ,IAAI,SAAU,EAAO,CAEtB,MAAO,EAAO,YAEjB,IAAI,SAAiB,EAAS,IAAW,CACvC,IAAM,EAAO,EAAO,KAAK,EAAQ,CAC3B,EACJ,EAAO,SAAW,EACd,EACA,EAAO,KAAK,GAAG,EAAQ,QAAQ,CAErC,EAAO,OAAS,EAEhB,IAAM,EAAa,CACjB,SAAU,CACR,SAAU,CACR,GAAGD,EAAoB,EAAO,CAC/B,CACF,CACF,CACK,EACJ,EAAY,kBAAkB,EAAW,CAC3C,GAAI,CAAC,EAAc,CACjB,EAAO,QAAQK,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACpD,EAAO,EAAgB,SAAS,mBAAmB,CAAC,CACpD,OAGF,IAAI,EAAiB,EAAO,OAGtB,EAAiB,EAAO,iBAAiB,EAAa,CAEtD,EAAY,IAAI,EAAU,CAC9B,UACE,EACA,EACA,EACA,CACA,GAAkB,EAAM,OACxB,IAAa,EAAe,CAC5B,EAAS,KAAM,EAAM,EAExB,CAAC,CAIF,EAFqB,EAAS,QAAQ,EAAe,CAInD,EACA,EACA,KAAO,IAAoB,CACzB,GAAI,EAAG,CACL,QAAQ,MAAM,qBAAsB,EAAE,CACtC,EAAO,QACL,EAAc,QAAS,EAAG,CACxB,UAAW,EACX,OAAQ,EAAO,KACf,SACD,CAAC,CACH,CACD,GAAI,CACF,MAAM,EAAY,OAAO,CAAE,eAAgB,GAAM,CAAC,QAC1C,CACR,EAAO,EAAgB,SAAS,mBAAmB,CAAC,OAGtD,GAAI,CACE,IAAS,IACX,MAAM,EAAO,QAAQ,CAAC,EAAM,EAAY,CAAE,EAAK,CAC/C,MAAM,QAAQ,IAAI,CAChB,EAAK,YAAY,EAAW,SAAS,CACrC,EAAY,OAAO,CAAE,eAAgB,GAAM,CAAC,CAC7C,CAAC,EAIJ,EAAO,QACL,EAAuB,EAAS,CAC9B,SAAU,EAAO,MAAQ,EACzB,gBAAiB,KAAK,KAAK,CAAG,EAC9B,WAAY,EACZ,gBAAiB,EAAO,KACxB,eACG,EAAO,MAAQ,IAAM,KAAK,KAAK,CAAG,GACrC,WAAY,EACb,CAAC,CACH,CACD,EAAO,QAAQI,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACrD,EAAO,QACLN,EAAmB,EAAO,QAAQ,GAAG,CAAC,CACvC,CAED,EAAQ,EAAe,OAChB,EAAO,CACd,QAAQ,MAAM,EAAM,CACpB,EAAO,QACL,EAAc,QAAS,EAAO,CAC5B,UAAW,EACX,OAAQ,EAAO,KACf,UAAW,UACZ,CAAC,CACH,CACD,EAAO,EAAgB,SAAS,mBAAmB,CAAC,GAI3D,EACD,CACL,EACD,CACH,CACF,CAUH,aACE,EACA,IAEAI,EACEC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CAE5B,MAAO,EAAO,QAAQ,kCAAkC,CAAC,KACvD,EAAO,aAAa,CAClB,UAAW,EACX,OAAQ,EAAO,KACf,UAAW,EAAQ,SACpB,CAAC,CACH,CAED,MAAON,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAE5C,IAAM,EAAO,EAAO,KAAK,EAAO,CAG1B,EAAuC,CAC3C,UAAW,GACX,SAAU,EAAQ,SACd,CAAE,SAAU,EAAQ,SAAU,CAC9B,IAAA,GACL,CAEG,EAAQ,cACV,EAAW,YAAc,EAAQ,aAInC,IAAM,EAAc,EAAK,kBAAkB,EAAW,CAGhD,EAAS,MAAO,EAAO,WAAW,CACtC,QACE,IAAI,SAAiB,EAAS,IAAW,CACvC,IAAI,EAAa,EAGX,EAAc,IAAI,EAExB,EAAY,GAAG,OAAS,GAAkB,CACxC,GAAc,EAAM,QACpB,CAGF,EAAY,KAAK,EAAY,CAE7B,EAAY,GAAG,QAAU,GAAiB,CACxC,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KAChB,CAAC,CACH,CACD,EAAO,EAAM,EACb,CAEF,EAAY,GAAG,aAAgB,CAC7B,EAAQ,EAAW,EACnB,CAGF,IAAM,EAAiB,EAAO,iBAAiB,EAAQ,OAAO,CACxD,EAAe,EAAS,QAAQ,EAAe,CAErD,EAAa,GAAG,QAAU,GAAiB,CACzC,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KACf,MAAO,OACR,CAAC,CACH,CACD,EAAY,QAAQ,EAAM,CAC1B,EAAO,EAAM,EACb,CAEF,EAAS,EAAc,EAAc,GAAU,CACzC,IACF,EAAO,QACL,EAAc,cAAe,EAAO,CAClC,UAAW,EACX,OAAQ,EAAO,KACf,MAAO,WACR,CAAC,CACH,CACD,EAAO,EAAM,GAEf,EACF,CACJ,MAAQ,IACN,EAAO,QAAQE,EAAkB,EAAO,QAAQ,EAAE,CAAC,CAAC,CACpD,EAAO,QAAQF,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAAC,CAC/C,IAAI,EAAgB,CACzB,KAAM,mBACN,OAAQ,IACR,KAAM,gCACN,QAAS,+BAA+B,OAAO,EAAM,GACtD,CAAC,EAEL,CAAC,CAII,EADU,KAAK,KAAK,CACQ,EAyBlC,OArBA,MAAO,EAAuB,EAAQ,CACpC,SAAU,EACV,kBACA,WAAY,EACZ,gBAAiB,EACjB,cAPA,EAAkB,EAAK,EAAS,IAAQ,EAAkB,EAQ1D,WAAY,EACb,CAAC,CAEF,MAAOM,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAON,EAAmB,EAAO,QAAQ,GAAG,CAAC,CAC7C,MAAOC,EAAkB,EAAO,QAAQ,EAAO,CAAC,CAEhD,MAAO,EAAO,QAAQ,mCAAmC,CAAC,KACxD,EAAO,aAAa,CAClB,UAAW,EACX,YAAa,EACb,YAAa,EACd,CAAC,CACH,CAEM,CACL,GAAI,EACJ,KAAM,EACN,KAAM,EACN,OAAQ,EAAO,KAChB,EACD,CACH,CACH,kBACA,uBAzfA,GACkC,CAClC,IAAM,EAAe,GAAiB,CAEhC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EA2e9B,CCzhBH,IAAa,EAAb,cAAsC,EAAQ,IAAI,mBAAmB,EAGlE,AAAC,GC/FJ,SAAS,EAAsB,EAAyB,CAStD,IAAM,EANU,IAAI,EAAQ,CAC1B,YAAa,EAAO,YACpB,YAAa,EAAO,YACpB,UAAW,EAAO,UACnB,CAAC,CAE6B,OAAO,EAAO,OAAO,CAE9C,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,IAAM,EADO,EAAO,KAAK,EAAI,CACT,kBAAkB,CAGtC,OAAO,IAAI,eAAe,CACxB,MAAM,EAAY,CAChB,EAAO,GAAG,OAAS,GAAU,CAC3B,EAAW,QAAQ,IAAI,WAAW,EAAM,CAAC,EACzC,CAEF,EAAO,GAAG,UAAa,CACrB,EAAW,OAAO,EAClB,CAEF,EAAO,GAAG,QAAU,GAAU,CAC5B,EAAW,MAAM,EAAM,EACvB,EAEL,CAAC,EAEJ,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAqB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,GAAM,CAAC,GAAY,MADN,EAAO,KAAK,EAAI,CACC,aAAa,CAgB3C,MAAO,CACL,KAAM,EAAS,KACf,OAAQ,EAAS,OACjB,KAAM,EAAS,KACX,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CACvC,IAAA,GACJ,YAAa,EAAS,YACtB,UApBA,GACG,CACH,GAAI,CAAC,EAAM,MAAO,EAAE,CACpB,GAAI,OAAO,EAAK,UAAa,SAC3B,GAAI,CACF,OAAO,KAAK,MAAM,EAAK,SAAS,MAC1B,CACN,OAAO,EAGX,OAAO,IAUiB,EAAS,SAAS,CAC1C,WAAY,EAAS,WACrB,YAAa,EAAS,YACtB,QAAS,EAAS,QACnB,EAEH,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,GAAM,CAAC,GAAU,MADJ,EAAO,KAAK,EAAI,CACD,QAAQ,CACpC,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SACI,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAO,EAAO,KAAK,EAAI,CACvB,EAAU,CACd,SAAU,CACR,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAS,EAAK,kBAAkB,EAAQ,CAE9C,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,aAAgB,CACxB,EAAQ,EAAK,KAAK,EAClB,CAEF,EAAO,IAAI,OAAO,KAAK,EAAK,CAAC,EAC7B,CAEJ,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,GACJ,EACA,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SACI,IAAI,SAAiB,EAAS,IAAW,CAC9C,IAAM,EAAO,EAAO,KAAK,EAAI,CACvB,EAAU,CACd,SAAU,CACR,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAc,EAAK,kBAAkB,EAAQ,CAC/C,EAAe,EAEb,EAAY,IAAI,EAAU,CAC9B,UACE,EACA,EACA,EACA,CACA,GAAgB,EAAM,OACtB,IAAa,EAAa,CAC1B,EAAS,KAAM,EAAM,EAExB,CAAC,CAIF,EAFqB,EAAS,QAAQ,EAAe,CAInD,EACA,EACC,GAAwB,CACnB,EACF,EACE,EAAgB,SAAS,mBAAoB,CAC3C,MAAO,EACR,CAAC,CACH,CAED,EAAQ,EAAa,EAG1B,EACD,CAEJ,MAAQ,IACN,QAAQ,MAAM,mCAAoC,EAAM,CACjD,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,EAExE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,MADa,EAAO,KAAK,EAAI,CAClB,OAAO,CAAE,eAAgB,GAAM,CAAC,EAE7C,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAyB,GAC7B,EAAO,WAAW,CAChB,IAAK,SAII,sBAAsB,EAAQ,OAAO,GAAG,EAAQ,MAEzD,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,GACJ,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAGvC,IAAM,EAAO,EAAO,KAAK,EAAI,CAE7B,OAAO,IAAI,SACR,EAAS,IAAW,CACnB,IAAM,EAAS,EAAK,kBAAkB,CACpC,UAAW,GACX,OAAQ,EACT,CAAC,CAEF,EAAO,GAAG,QAAS,EAAO,CAC1B,EAAO,GAAG,aAAgB,CACxB,EAAQ,CACN,UAAW,EAAQ,EAAQ,EAAM,QAAU,EAAQ,GACnD,cAAe,EAAQ,EAAM,OAC9B,CAAC,EACF,CAEF,EAAO,IAAI,OAAO,KAAK,EAAM,CAAC,EAEjC,EAEH,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,EAAmB,GACvB,EAAO,QAAQ,SAAY,CACzB,GAAI,CACF,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAIvC,GAAM,CAAC,GAAY,MADN,EAAO,KAAK,EAAI,CACC,aAAa,CAE3C,MAAO,CACL,cAAe,EAAS,KACpB,OAAO,SAAS,GAAG,EAAS,OAAQ,GAAG,CACvC,EACJ,UAAW,GACZ,MACc,CAEf,MAAO,CAAE,cAAe,EAAG,UAAW,GAAO,GAE/C,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAM,EAAU,MAAM,IAAI,CAAC,KAAK,CACtC,GAAI,CAAC,EACH,MAAU,MAAM,qBAAqB,CAIvC,MADa,EAAO,KAAK,EAAI,CAClB,OAAO,CAAE,eAAgB,GAAM,CAAC,EAE7C,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAU,EAAW,IAAK,GAAQ,EAAO,KAAK,EAAI,CAAC,CACnD,EAAc,EAAO,KAAK,EAAe,CAU/C,OARA,MAAM,EAAO,QAAQ,EAAS,EAAY,CAEtC,GAAS,UACX,MAAM,EAAY,YAAY,CAC5B,SAAU,EAAQ,SACnB,CAAC,CAGG,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAgFJ,MAAO,CACL,OAAQ,EAAO,OACf,YACA,gBAzBuB,GAChB,EAAO,WAAW,CACvB,IAAK,SAAY,CACf,GAAM,CAAC,GAAU,MAAM,EAAO,KAAK,EAAI,CAAC,UAAU,CAClD,OAAO,IAAI,WAAW,EAAO,EAE/B,MAAQ,GAEJ,GACA,OAAO,GAAU,UACjB,SAAU,GACV,EAAM,OAAS,IAER,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,kBAAmB,CACjD,MAAO,EACR,CAAC,CAEL,CAAC,CAOF,oBACA,eACA,YACA,sBACA,iCArFA,EACA,EACA,EACA,EACA,EACA,EAAW,KAEX,EAAO,IAAI,WAAa,CACtB,GAAI,CAAC,EAEH,OAAO,MAAO,EACZ,EACA,EACA,EACA,EACA,EACD,CAIH,IAAM,EAAW,GAAG,EAAI,QAClB,EAAe,MAAO,EAC1B,EACA,EACA,EACA,EACA,EACD,CAQD,OALA,MAAO,EAAe,CAAC,EAAK,EAAS,CAAE,EAAK,EAAQ,CAGpD,MAAO,EAAa,EAAS,CAEtB,GACP,CAkDF,eACA,wBACA,cACA,kBACA,eACA,iBACA,oBArDA,EACA,EACA,IACG,EAAU,GAAG,EAAI,MAAO,EAAM,EAAQ,CAmDzC,mBAjD0B,GAC1B,EAAO,IAAI,WAAa,CACtB,GAAI,CACF,OAAO,MAAO,EAAU,GAAG,EAAI,MAAM,MAC/B,CACN,SAEF,CA2CF,sBAzC6B,GAAgB,EAAa,GAAG,EAAI,MAAM,CA0CxE,CAGH,MAAa,EAAwB,GACnC,EAAM,QAAQ,EAAkB,EAAsB,EAAO,CAAC,CCzahE,SAAS,EAAoB,EAAyB,CACpD,GAAI,CAAC,EAAO,YACV,MAAU,MAAM,sDAAsD,CAGxE,IAAM,EAAU,+CAA+C,EAAO,SAChE,EAAY,sDAAsD,EAAO,OAAO,IAChF,EAAc,EAAO,YAErB,OAAwB,CAC5B,cAAe,UAAU,IACzB,eAAgB,mBACjB,EAEK,EAAa,GACjB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,CAAC,YACxC,CACE,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,CAED,GAAI,CAAC,EAAS,GAIZ,MAHI,EAAS,SAAW,IACZ,MAAM,iBAAiB,CAEzB,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAEpE,GAAI,CAAC,EAAS,KACZ,MAAU,MAAM,iBAAiB,CAGnC,OAAO,EAAS,MAElB,MAAQ,GACF,aAAiB,OAAS,EAAM,QAAQ,SAAS,YAAY,CACxD,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAqB,GACzB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,QAAS,GAAgB,CAC1B,CACF,CAED,GAAI,CAAC,EAAS,GAIZ,MAHI,EAAS,SAAW,IACZ,MAAM,iBAAiB,CAEzB,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,IAAM,EAAQ,MAAM,EAAS,MAAM,CAWnC,MAAO,CACL,KAAM,EAAK,KACX,OAAQ,EAAK,OACb,KAAM,EAAK,KAAO,OAAO,SAAS,EAAK,KAAM,GAAG,CAAG,IAAA,GACnD,YAAa,EAAK,YAClB,SAAU,EAAK,UAAY,EAAE,CAC7B,WAAY,EAAK,WACjB,YAAa,EAAK,YAClB,QAAS,EAAK,QACf,EAEH,MAAQ,GACF,aAAiB,OAAS,EAAM,QAAQ,SAAS,YAAY,CACxD,EAAgB,SAAS,iBAAiB,CAE5C,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,UACc,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,EAEe,GAElB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAEE,GACJ,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,CACf,KAAM,EACN,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CAEK,EAAW,MAAM,MACrB,GAAG,EAAU,yBAAyB,mBAAmB,EAAI,GAC7D,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IACzB,eAAgB,EAAS,YACzB,iBAAkB,EAAK,OAAO,UAAU,CACzC,CACK,OACP,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAEE,EAAgB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,GACvC,CACE,OAAQ,SACR,QAAS,CACP,cAAe,UAAU,IAC1B,CACF,CACF,CAGD,GAAI,CAAC,EAAS,IAAM,EAAS,SAAW,IACtC,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,EAGtE,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CA6MJ,MAAO,CACL,OAAQ,EAAO,OACf,YACA,gBAtBuB,GACvB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAI,CAAC,YACxC,CACE,QAAS,GAAgB,CAC1B,CACF,CACD,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAEpE,OAAO,IAAI,WAAW,MAAM,EAAS,aAAa,CAAC,EAErD,MAAQ,GACC,EAAgB,SAAS,kBAAmB,CAAE,MAAO,EAAO,CAAC,CAEvE,CAAC,CAMF,oBACA,eACA,YACA,eACA,sBAnN6B,GAC7B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,CACf,KAAM,EAAQ,IACd,YAAa,EAAQ,aAAe,2BACpC,SAAU,EAAQ,UAAY,EAAE,CACjC,CAEK,EAAW,MAAM,MACrB,GAAG,EAAU,6BAA6B,mBAAmB,EAAQ,IAAI,GACzE,CACE,OAAQ,OACR,QAAS,CACP,cAAe,UAAU,IACzB,eAAgB,mBACjB,CACD,KAAM,KAAK,UAAU,EAAS,CAC/B,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,IAAM,EAAqB,EAAS,QAAQ,IAAI,WAAW,CAC3D,GAAI,CAAC,EACH,MAAU,MAAM,yBAAyB,CAG3C,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAiLF,aA9KA,EACA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAM,EAAQ,EAAM,OAAS,EAC7B,EAAe,EACjB,SAAS,EAAM,GAAG,EAAI,GAAG,IACzB,SAAS,EAAM,GAAG,EAAI,IAEpB,EAAW,MAAM,MAAMM,EAAW,CACtC,OAAQ,MACR,QAAS,CACP,iBAAkB,EAAM,OAAO,UAAU,CACzC,gBAAiB,EAClB,CACD,KAAM,EACP,CAAC,CAGI,EAAY,EAAS,SAAW,KAAO,EAAS,SAAW,IAEjE,GAAI,CAAC,GAAa,EAAS,SAAW,IACpC,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,MAAO,CACL,YACA,cAAe,EAAM,EACtB,EAEH,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CA2IF,gBAzIuB,GACvB,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAW,MAAM,MAAMA,EAAW,CACtC,OAAQ,MACR,QAAS,CACP,gBAAiB,YAClB,CACF,CAAC,CAEF,GAAI,EAAS,SAAW,IAAK,CAE3B,IAAM,EAAQ,EAAS,QAAQ,IAAI,QAAQ,CAK3C,MAAO,CACL,cALoB,EAClB,OAAO,SAAS,EAAM,MAAM,IAAI,CAAC,GAAI,GAAG,CAAG,EAC3C,EAIF,UAAW,GACZ,SACQ,EAAS,SAAW,KAAO,EAAS,SAAW,IAExD,MAAO,CACL,cAAe,EACf,UAAW,GACZ,MAED,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,EAGtE,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CAwGF,aAtGoB,GACpB,EAAO,WAAW,CAChB,IAAK,SAAY,CAEf,MAAM,MAAMA,EAAW,CACrB,OAAQ,SACT,CAAC,EAEJ,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAAE,MAAO,EAAO,CAAC,CAErE,CAAC,CA4FF,gBAzFA,EACA,EACA,IAEA,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,IAAM,EAAiB,CACrB,KAAM,yBACN,cAAe,EAAW,IAAK,IAAS,CAAE,KAAM,EAAK,EAAE,CACvD,YAAa,CACX,KAAM,EACN,YAAa,GAAS,aAAe,2BACrC,SAAU,GAAS,UAAY,EAAE,CAClC,CACF,CAEK,EAAW,MAAM,MACrB,GAAG,EAAQ,KAAK,mBAAmB,EAAe,CAAC,UACnD,CACE,OAAQ,OACR,QAAS,GAAgB,CACzB,KAAM,KAAK,UAAU,EAAe,CACrC,CACF,CAED,GAAI,CAAC,EAAS,GACZ,MAAU,MAAM,QAAQ,EAAS,OAAO,IAAI,EAAS,aAAa,CAGpE,OAAO,GAET,MAAQ,GACC,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAExE,CAAC,CAwDF,oBArDA,EACA,EACA,IACG,EAAU,GAAG,EAAI,MAAO,EAAM,EAAQ,CAmDzC,mBAjD0B,GAC1B,EAAO,WAAW,CAChB,IAAK,SAAY,CACf,GAAI,CACF,OAAO,MAAM,EAAU,GAAG,EAAI,MAAM,CAAC,KAAK,EAAO,WAAW,MACtD,CACN,SAGJ,UACS,EAAgB,SAAS,gBAAgB,CAEnD,CAAC,CAsCF,sBApC6B,GAAgB,EAAa,GAAG,EAAI,MAAM,CAqCxE,CAGH,MAAa,EAAsB,GACjC,EAAM,QAAQ,EAAkB,EAAoB,EAAO,CAAC,CC1X9D,SAAS,EACP,EAC+B,CAC/B,MAAO,CACL,KAAM,EAAO,MAAM,UAAU,EAAI,KACjC,eAAgB,GAAG,EAAO,iBAC1B,OAAQ,EAAO,QAAQ,UAAU,EAAI,IACrC,SAAU,KAAK,UAAU,EAAO,SAAS,CACzC,QAAS,KAAK,UAAU,EAAO,QAAQ,CACxC,CAGH,MAAM,GACJ,EACA,EACA,IAEO,EAAO,IAAI,WAAa,CAC7B,GAAI,CACF,IAAM,EAAW,MAAO,EAAU,kBAAkB,EAAG,CACjD,EAAO,MAAO,EAAQ,IAAI,EAAG,CAEnC,MAAO,CACL,KACA,KAAM,EAAS,KACf,OAAQ,EAAS,MAAQ,EACzB,SAAU,EAAS,SACnB,QAAS,CACP,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EACN,OAAQ,EAAU,OACnB,CACF,OACM,EAAO,CACd,GAAI,aAAiB,GAAmB,EAAM,OAAS,iBACrD,OAAO,MAAO,EAAO,KAAK,EAAM,CAElC,MAAM,IAER,CAGJ,SAAgBC,GAAiB,CAC/B,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAY,MAAO,EACnB,EAAU,MAAO,EAEjB,OACG,CACL,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,qBAAsB,EACtB,aAAc,IAAA,GACd,aAAc,IAAA,GACd,SAAU,IAAA,GACV,iBAAkB,EAAI,KAAO,KAC7B,sBAAuB,GACxB,EAsBH,MAAO,CACL,OAAQ,EAAU,OAClB,OAAS,GACA,EAAO,IAAI,WAAa,CAC7B,GAAI,CAAC,EAAK,GACR,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAiB,CAC3C,CAGH,EAAK,QAAU,CACb,GAAI,EAAK,QAAQ,GACjB,KAAM,EAAK,QAAQ,KACnB,KAAM,EAAK,GACX,OAAQ,EAAU,OACnB,CAGD,IAAM,EAAU,CACd,OAAQ,EAAU,OAClB,IAAK,EAAK,GACV,YACE,EAAK,UAAU,aAAa,UAAU,EACtC,2BACF,SAAU,EAAoB,EAAK,CACpC,CAGD,OADA,MAAO,EAAU,UAAU,EAAK,GAAI,IAAI,WAAe,EAAQ,CACxD,GACP,CAGJ,OAAS,GACA,EAAU,aAAa,EAAQ,CAGxC,OACE,EACA,IAIO,EAAO,IAAI,WAAa,CAC7B,GAAM,CAAE,UAAS,SAAQ,OAAQ,GAAiB,EAC5C,CAAE,cAAe,EAGjB,EAAS,MAAO,EAAU,EAAS,EAAS,EAAU,CAE5D,EAAO,OAAS,EAEhB,MAAO,EAAQ,IAAI,EAAS,EAAqB,CAEjD,IAAM,EAAU,CACd,OAAQ,EAAU,OAClB,IAAK,EACL,YACE,EAAO,UAAU,aAAe,2BAClC,SAAU,EAAoB,EAAqB,CACpD,CAGK,EAAiB,EAAO,iBAAiB,EAAa,CAG5D,GAAI,EAAU,gCAAiC,CAC7C,IAAM,EAAW,EAAO,OAAS,EAEjC,OAAO,MAAO,EAAU,gCACtB,EACA,EAAO,OACP,EACA,EACA,EACA,EACD,KACI,CAEL,IAAM,EAAS,EAAe,WAAW,CACnC,EAAuB,EAAE,CAC3B,EAAa,EAGjB,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAO,EAAO,YACpC,EAAO,MAAM,CACd,CACD,GAAI,EAAM,MAEV,EAAO,KAAK,EAAM,CAClB,IAAM,EAAY,EAAM,WACxB,GAAc,EACd,IAAa,EAAW,CAI1B,IAAM,EAAgB,IAAI,WAAW,EAAW,CAC5C,EAAW,EACf,IAAK,IAAM,KAAS,EAClB,EAAc,IAAI,EAAO,EAAS,CAClC,GAAY,EAAM,WAIpB,GAAI,EAAO,SAAW,EAEpB,MAAO,EAAU,UAAU,EAAS,EAAe,EAAQ,KACtD,CAEL,IAAM,EAAW,GAAG,EAAQ,QAG5B,MAAO,EAAU,mBACf,EACA,EACA,EACD,CAGD,MAAO,EAAU,eACf,CAAC,EAAS,EAAS,CACnB,EACA,EACD,CAGD,MAAO,EAAU,sBAAsB,EAAS,CAGlD,OAAO,IAET,CAGJ,kBACA,uBAzJA,GACkC,CAClC,IAAM,EAAe,GAAiB,CAEhC,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAa,wBACtB,IAAK,SACH,MAAO,GACT,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EA2I7B,KAAO,GACE,EAAO,IAAI,WAAa,CAE7B,OADe,MAAO,EAAU,gBAAgB,EAAQ,EAExD,CAEL,EACD,CAGJ,MAAa,EAAgB,GAC3BA,GAAgB,CAAC,KAAK,EAAO,QAAQ,EAAmB,EAAO,CAAC,CAAC,CAEtD,EAAkB,GAC7BA,GAAgB,CAAC,KAAK,EAAO,QAAQ,EAAqB,EAAO,CAAC,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@uploadista/data-store-gcs",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "0.0
|
|
4
|
+
"version": "0.1.0-beta.5",
|
|
5
5
|
"description": "Google Cloud Storage data store for Uploadista",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"author": "Uploadista",
|
|
@@ -15,26 +15,26 @@
|
|
|
15
15
|
},
|
|
16
16
|
"dependencies": {
|
|
17
17
|
"@google-cloud/storage": "7.18.0",
|
|
18
|
-
"@uploadista/
|
|
19
|
-
"@uploadista/
|
|
18
|
+
"@uploadista/core": "0.1.0-beta.5",
|
|
19
|
+
"@uploadista/observability": "0.1.0-beta.5"
|
|
20
20
|
},
|
|
21
21
|
"peerDependencies": {
|
|
22
22
|
"effect": "^3.0.0"
|
|
23
23
|
},
|
|
24
24
|
"devDependencies": {
|
|
25
25
|
"@effect/vitest": "0.27.0",
|
|
26
|
-
"effect": "3.19.
|
|
27
|
-
"tsdown": "0.
|
|
28
|
-
"vitest": "4.0.
|
|
29
|
-
"@uploadista/kv-store-memory": "0.0
|
|
30
|
-
"@uploadista/typescript-config": "0.0
|
|
26
|
+
"effect": "3.19.14",
|
|
27
|
+
"tsdown": "0.19.0",
|
|
28
|
+
"vitest": "4.0.17",
|
|
29
|
+
"@uploadista/kv-store-memory": "0.1.0-beta.5",
|
|
30
|
+
"@uploadista/typescript-config": "0.1.0-beta.5"
|
|
31
31
|
},
|
|
32
32
|
"scripts": {
|
|
33
33
|
"build": "tsc --noEmit && tsdown",
|
|
34
34
|
"check": "biome check --write ./src",
|
|
35
35
|
"format": "biome format --write ./src",
|
|
36
36
|
"lint": "biome lint --write ./src",
|
|
37
|
-
"test": "vitest",
|
|
37
|
+
"test": "vitest run",
|
|
38
38
|
"test:run": "vitest run",
|
|
39
39
|
"test:watch": "vitest --watch",
|
|
40
40
|
"typecheck": "tsc --noEmit"
|