@uploadista/data-store-gcs 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/.turbo/turbo-build.log +5 -0
  2. package/.turbo/turbo-check.log +5 -0
  3. package/LICENSE +21 -0
  4. package/README.md +479 -0
  5. package/dist/examples.d.ts +44 -0
  6. package/dist/examples.d.ts.map +1 -0
  7. package/dist/examples.js +82 -0
  8. package/dist/gcs-store-rest.d.ts +16 -0
  9. package/dist/gcs-store-rest.d.ts.map +1 -0
  10. package/dist/gcs-store-rest.js +188 -0
  11. package/dist/gcs-store-v2.d.ts +13 -0
  12. package/dist/gcs-store-v2.d.ts.map +1 -0
  13. package/dist/gcs-store-v2.js +190 -0
  14. package/dist/gcs-store.d.ts +12 -0
  15. package/dist/gcs-store.d.ts.map +1 -0
  16. package/dist/gcs-store.js +282 -0
  17. package/dist/index.d.ts +4 -0
  18. package/dist/index.d.ts.map +1 -0
  19. package/dist/index.js +5 -0
  20. package/dist/services/gcs-client-nodejs.service.d.ts +4 -0
  21. package/dist/services/gcs-client-nodejs.service.d.ts.map +1 -0
  22. package/dist/services/gcs-client-nodejs.service.js +312 -0
  23. package/dist/services/gcs-client-rest.service.d.ts +4 -0
  24. package/dist/services/gcs-client-rest.service.d.ts.map +1 -0
  25. package/dist/services/gcs-client-rest.service.js +299 -0
  26. package/dist/services/gcs-client.service.d.ts +56 -0
  27. package/dist/services/gcs-client.service.d.ts.map +1 -0
  28. package/dist/services/gcs-client.service.js +3 -0
  29. package/dist/services/index.d.ts +4 -0
  30. package/dist/services/index.d.ts.map +1 -0
  31. package/dist/services/index.js +3 -0
  32. package/package.json +31 -0
  33. package/src/gcs-store-v2.ts +286 -0
  34. package/src/gcs-store.ts +398 -0
  35. package/src/index.ts +6 -0
  36. package/src/services/gcs-client-nodejs.service.ts +435 -0
  37. package/src/services/gcs-client-rest.service.ts +406 -0
  38. package/src/services/gcs-client.service.ts +117 -0
  39. package/src/services/index.ts +3 -0
  40. package/tsconfig.json +12 -0
  41. package/tsconfig.tsbuildinfo +1 -0
@@ -0,0 +1,435 @@
1
+ import { pipeline, Readable, Transform } from "node:stream";
2
+ import { type Bucket, Storage } from "@google-cloud/storage";
3
+ import { UploadistaError } from "@uploadista/core/errors";
4
+ import { Effect, Layer } from "effect";
5
+ import {
6
+ type GCSClientConfig,
7
+ GCSClientService,
8
+ type GCSObjectMetadata,
9
+ type GCSOperationContext,
10
+ } from "./gcs-client.service";
11
+
12
+ function createNodeJSGCSClient(config: GCSClientConfig) {
13
+ // Dynamic import to avoid issues in non-Node environments
14
+
15
+ const storage = new Storage({
16
+ keyFilename: config.keyFilename,
17
+ credentials: config.credentials,
18
+ projectId: config.projectId,
19
+ });
20
+
21
+ const bucket: Bucket = storage.bucket(config.bucket);
22
+
23
+ const getObject = (key: string) =>
24
+ Effect.tryPromise({
25
+ try: async () => {
26
+ const file = bucket.file(key);
27
+ const stream = file.createReadStream();
28
+
29
+ // Convert Node.js stream to Web ReadableStream
30
+ return new ReadableStream({
31
+ start(controller) {
32
+ stream.on("data", (chunk) => {
33
+ controller.enqueue(new Uint8Array(chunk));
34
+ });
35
+
36
+ stream.on("end", () => {
37
+ controller.close();
38
+ });
39
+
40
+ stream.on("error", (error) => {
41
+ controller.error(error);
42
+ });
43
+ },
44
+ });
45
+ },
46
+ catch: (error) => {
47
+ if (
48
+ error &&
49
+ typeof error === "object" &&
50
+ "code" in error &&
51
+ error.code === 404
52
+ ) {
53
+ return UploadistaError.fromCode("FILE_NOT_FOUND");
54
+ }
55
+ return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
56
+ },
57
+ });
58
+
59
+ const getObjectMetadata = (key: string) =>
60
+ Effect.tryPromise({
61
+ try: async () => {
62
+ const file = bucket.file(key);
63
+ const [metadata] = await file.getMetadata();
64
+
65
+ const parseMetadata = (
66
+ meta: Record<string, string | number | boolean | null> | undefined,
67
+ ) => {
68
+ if (!meta) return {};
69
+ if (typeof meta.metadata === "string") {
70
+ try {
71
+ return JSON.parse(meta.metadata);
72
+ } catch {
73
+ return meta;
74
+ }
75
+ }
76
+ return meta;
77
+ };
78
+
79
+ return {
80
+ name: metadata.name,
81
+ bucket: metadata.bucket,
82
+ size: metadata.size
83
+ ? Number.parseInt(`${metadata.size}`, 10)
84
+ : undefined,
85
+ contentType: metadata.contentType,
86
+ metadata: parseMetadata(metadata.metadata),
87
+ generation: metadata.generation,
88
+ timeCreated: metadata.timeCreated,
89
+ updated: metadata.updated,
90
+ } as GCSObjectMetadata;
91
+ },
92
+ catch: (error) => {
93
+ if (
94
+ error &&
95
+ typeof error === "object" &&
96
+ "code" in error &&
97
+ error.code === 404
98
+ ) {
99
+ return UploadistaError.fromCode("FILE_NOT_FOUND");
100
+ }
101
+ return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
102
+ },
103
+ });
104
+
105
+ const objectExists = (key: string) =>
106
+ Effect.tryPromise({
107
+ try: async () => {
108
+ const file = bucket.file(key);
109
+ const [exists] = await file.exists();
110
+ return exists;
111
+ },
112
+ catch: (error) => {
113
+ return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
114
+ },
115
+ });
116
+
117
+ const putObject = (
118
+ key: string,
119
+ body: Uint8Array,
120
+ context?: Partial<GCSOperationContext>,
121
+ ) =>
122
+ Effect.tryPromise({
123
+ try: async () => {
124
+ return new Promise<string>((resolve, reject) => {
125
+ const file = bucket.file(key);
126
+ const options = {
127
+ metadata: {
128
+ contentType: context?.contentType || "application/octet-stream",
129
+ metadata: context?.metadata || {},
130
+ },
131
+ };
132
+
133
+ const stream = file.createWriteStream(options);
134
+
135
+ stream.on("error", reject);
136
+ stream.on("finish", () => {
137
+ resolve(file.name);
138
+ });
139
+
140
+ stream.end(Buffer.from(body));
141
+ });
142
+ },
143
+ catch: (error) => {
144
+ return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
145
+ },
146
+ });
147
+
148
+ const putObjectFromStream = (
149
+ key: string,
150
+ offset: number,
151
+ readableStream: ReadableStream,
152
+ context?: Partial<GCSOperationContext>,
153
+ onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk
154
+ ) =>
155
+ Effect.tryPromise({
156
+ try: async () => {
157
+ return new Promise<number>((resolve, reject) => {
158
+ const file = bucket.file(key);
159
+ const options = {
160
+ metadata: {
161
+ contentType: context?.contentType || "application/octet-stream",
162
+ metadata: context?.metadata || {},
163
+ },
164
+ };
165
+
166
+ const writeStream = file.createWriteStream(options);
167
+ let bytesWritten = offset;
168
+
169
+ const transform = new Transform({
170
+ transform(
171
+ chunk: Buffer,
172
+ _: string,
173
+ callback: (error?: Error | null, data?: Buffer) => void,
174
+ ) {
175
+ bytesWritten += chunk.length;
176
+ onProgress?.(bytesWritten);
177
+ callback(null, chunk);
178
+ },
179
+ });
180
+
181
+ const nodeReadable = Readable.fromWeb(readableStream);
182
+
183
+ pipeline(
184
+ nodeReadable,
185
+ transform,
186
+ writeStream,
187
+ (error: Error | null) => {
188
+ if (error) {
189
+ reject(
190
+ UploadistaError.fromCode("FILE_WRITE_ERROR", {
191
+ cause: error,
192
+ }),
193
+ );
194
+ } else {
195
+ resolve(bytesWritten);
196
+ }
197
+ },
198
+ );
199
+ });
200
+ },
201
+ catch: (error) => {
202
+ console.error("error putting object from stream", error);
203
+ return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
204
+ },
205
+ });
206
+
207
+ const deleteObject = (key: string) =>
208
+ Effect.tryPromise({
209
+ try: async () => {
210
+ const file = bucket.file(key);
211
+ await file.delete({ ignoreNotFound: true });
212
+ },
213
+ catch: (error) => {
214
+ return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
215
+ },
216
+ });
217
+
218
+ const createResumableUpload = (context: GCSOperationContext) =>
219
+ Effect.tryPromise({
220
+ try: async () => {
221
+ // For Node.js, we'll use a simplified approach
222
+ // In production, you'd want to implement proper resumable uploads
223
+ // Return a pseudo-URL that we can use to identify this upload
224
+ return `resumable://nodejs/${context.bucket}/${context.key}`;
225
+ },
226
+ catch: (error) => {
227
+ return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
228
+ },
229
+ });
230
+
231
+ const uploadChunk = (
232
+ uploadUrl: string,
233
+ chunk: Uint8Array,
234
+ start: number,
235
+ total?: number,
236
+ ) =>
237
+ Effect.tryPromise({
238
+ try: async () => {
239
+ // Extract key from pseudo-URL
240
+ const key = uploadUrl.split("/").pop();
241
+ if (!key) {
242
+ throw new Error("Invalid upload URL");
243
+ }
244
+
245
+ const file = bucket.file(key);
246
+
247
+ return new Promise<{ completed: boolean; bytesUploaded: number }>(
248
+ (resolve, reject) => {
249
+ const stream = file.createWriteStream({
250
+ resumable: true,
251
+ offset: start,
252
+ });
253
+
254
+ stream.on("error", reject);
255
+ stream.on("finish", () => {
256
+ resolve({
257
+ completed: total ? start + chunk.length >= total : false,
258
+ bytesUploaded: start + chunk.length,
259
+ });
260
+ });
261
+
262
+ stream.end(Buffer.from(chunk));
263
+ },
264
+ );
265
+ },
266
+ catch: (error) => {
267
+ return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
268
+ },
269
+ });
270
+
271
+ const getUploadStatus = (uploadUrl: string) =>
272
+ Effect.promise(async () => {
273
+ try {
274
+ const key = uploadUrl.split("/").pop();
275
+ if (!key) {
276
+ throw new Error("Invalid upload URL");
277
+ }
278
+
279
+ const file = bucket.file(key);
280
+ const [metadata] = await file.getMetadata();
281
+
282
+ return {
283
+ bytesUploaded: metadata.size
284
+ ? Number.parseInt(`${metadata.size}`, 10)
285
+ : 0,
286
+ completed: true, // Simplified for now
287
+ };
288
+ } catch (_error) {
289
+ // If file doesn't exist, upload hasn't started
290
+ return { bytesUploaded: 0, completed: false };
291
+ }
292
+ });
293
+
294
+ const cancelUpload = (uploadUrl: string) =>
295
+ Effect.tryPromise({
296
+ try: async () => {
297
+ const key = uploadUrl.split("/").pop();
298
+ if (!key) {
299
+ throw new Error("Invalid upload URL");
300
+ }
301
+
302
+ const file = bucket.file(key);
303
+ await file.delete({ ignoreNotFound: true });
304
+ },
305
+ catch: (error) => {
306
+ return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
307
+ },
308
+ });
309
+
310
+ const composeObjects = (
311
+ sourceKeys: string[],
312
+ destinationKey: string,
313
+ context?: Partial<GCSOperationContext>,
314
+ ) =>
315
+ Effect.tryPromise({
316
+ try: async () => {
317
+ const sources = sourceKeys.map((key) => bucket.file(key));
318
+ const destination = bucket.file(destinationKey);
319
+
320
+ await bucket.combine(sources, destination);
321
+
322
+ if (context?.metadata) {
323
+ await destination.setMetadata({
324
+ metadata: context.metadata,
325
+ });
326
+ }
327
+
328
+ return destinationKey;
329
+ },
330
+ catch: (error) => {
331
+ return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
332
+ },
333
+ });
334
+
335
+ const putObjectFromStreamWithPatching = (
336
+ key: string,
337
+ offset: number,
338
+ readableStream: ReadableStream,
339
+ context?: Partial<GCSOperationContext>,
340
+ onProgress?: (chunkSize: number) => void, // Called with incremental bytes per chunk
341
+ isAppend = false,
342
+ ) =>
343
+ Effect.gen(function* () {
344
+ if (!isAppend) {
345
+ // Direct upload for new files
346
+ return yield* putObjectFromStream(
347
+ key,
348
+ offset,
349
+ readableStream,
350
+ context,
351
+ onProgress,
352
+ );
353
+ }
354
+
355
+ // For append operations, create a patch file and then combine
356
+ const patchKey = `${key}_patch`;
357
+ const bytesWritten = yield* putObjectFromStream(
358
+ patchKey,
359
+ offset,
360
+ readableStream,
361
+ context,
362
+ onProgress,
363
+ );
364
+
365
+ // Combine original with patch
366
+ yield* composeObjects([key, patchKey], key, context);
367
+
368
+ // Clean up patch file
369
+ yield* deleteObject(patchKey);
370
+
371
+ return bytesWritten;
372
+ });
373
+
374
+ const putTemporaryObject = (
375
+ key: string,
376
+ body: Uint8Array,
377
+ context?: Partial<GCSOperationContext>,
378
+ ) => putObject(`${key}_tmp`, body, context);
379
+
380
+ const getTemporaryObject = (key: string) =>
381
+ Effect.gen(function* () {
382
+ try {
383
+ return yield* getObject(`${key}_tmp`);
384
+ } catch {
385
+ return undefined;
386
+ }
387
+ });
388
+
389
+ const deleteTemporaryObject = (key: string) => deleteObject(`${key}_tmp`);
390
+
391
+ const getObjectBuffer = (key: string) => {
392
+ return Effect.tryPromise({
393
+ try: async () => {
394
+ const [buffer] = await bucket.file(key).download();
395
+ return new Uint8Array(buffer);
396
+ },
397
+ catch: (error) => {
398
+ if (
399
+ error &&
400
+ typeof error === "object" &&
401
+ "code" in error &&
402
+ error.code === 404
403
+ ) {
404
+ return UploadistaError.fromCode("FILE_NOT_FOUND");
405
+ }
406
+ return UploadistaError.fromCode("FILE_READ_ERROR", {
407
+ cause: error,
408
+ });
409
+ },
410
+ });
411
+ };
412
+
413
+ return {
414
+ bucket: config.bucket,
415
+ getObject,
416
+ getObjectBuffer,
417
+ getObjectMetadata,
418
+ objectExists,
419
+ putObject,
420
+ putObjectFromStream,
421
+ putObjectFromStreamWithPatching,
422
+ deleteObject,
423
+ createResumableUpload,
424
+ uploadChunk,
425
+ getUploadStatus,
426
+ cancelUpload,
427
+ composeObjects,
428
+ putTemporaryObject,
429
+ getTemporaryObject,
430
+ deleteTemporaryObject,
431
+ };
432
+ }
433
+
434
+ export const GCSClientNodeJSLayer = (config: GCSClientConfig) =>
435
+ Layer.succeed(GCSClientService, createNodeJSGCSClient(config));