@uploadista/data-store-gcs 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +5 -0
- package/LICENSE +21 -0
- package/README.md +479 -0
- package/dist/examples.d.ts +44 -0
- package/dist/examples.d.ts.map +1 -0
- package/dist/examples.js +82 -0
- package/dist/gcs-store-rest.d.ts +16 -0
- package/dist/gcs-store-rest.d.ts.map +1 -0
- package/dist/gcs-store-rest.js +188 -0
- package/dist/gcs-store-v2.d.ts +13 -0
- package/dist/gcs-store-v2.d.ts.map +1 -0
- package/dist/gcs-store-v2.js +190 -0
- package/dist/gcs-store.d.ts +12 -0
- package/dist/gcs-store.d.ts.map +1 -0
- package/dist/gcs-store.js +282 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/services/gcs-client-nodejs.service.d.ts +4 -0
- package/dist/services/gcs-client-nodejs.service.d.ts.map +1 -0
- package/dist/services/gcs-client-nodejs.service.js +312 -0
- package/dist/services/gcs-client-rest.service.d.ts +4 -0
- package/dist/services/gcs-client-rest.service.d.ts.map +1 -0
- package/dist/services/gcs-client-rest.service.js +299 -0
- package/dist/services/gcs-client.service.d.ts +56 -0
- package/dist/services/gcs-client.service.d.ts.map +1 -0
- package/dist/services/gcs-client.service.js +3 -0
- package/dist/services/index.d.ts +4 -0
- package/dist/services/index.d.ts.map +1 -0
- package/dist/services/index.js +3 -0
- package/package.json +31 -0
- package/src/gcs-store-v2.ts +286 -0
- package/src/gcs-store.ts +398 -0
- package/src/index.ts +6 -0
- package/src/services/gcs-client-nodejs.service.ts +435 -0
- package/src/services/gcs-client-rest.service.ts +406 -0
- package/src/services/gcs-client.service.ts +117 -0
- package/src/services/index.ts +3 -0
- package/tsconfig.json +12 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,282 @@
|
|
|
1
|
+
import { PassThrough, pipeline, Readable, Transform } from "node:stream";
|
|
2
|
+
import { Storage } from "@google-cloud/storage";
|
|
3
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
4
|
+
import { UploadFileDataStore, UploadFileKVStore, } from "@uploadista/core/types";
|
|
5
|
+
import { gcsActiveUploadsGauge as activeUploadsGauge, gcsFileSizeHistogram as fileSizeHistogram, logGCSUploadCompletion, trackGCSError, gcsUploadDurationHistogram as uploadDurationHistogram, gcsUploadErrorsTotal as uploadErrorsTotal, gcsUploadRequestsTotal as uploadRequestsTotal, gcsUploadSuccessTotal as uploadSuccessTotal, withGCSTimingMetrics as withTimingMetrics, withGCSUploadMetrics as withUploadMetrics, } from "@uploadista/observability";
|
|
6
|
+
import { Effect, Layer, Stream } from "effect";
|
|
7
|
+
/**
|
|
8
|
+
* Convert the Upload object to a format that can be stored in GCS metadata.
|
|
9
|
+
*/
|
|
10
|
+
function stringifyUploadKeys(upload) {
|
|
11
|
+
return {
|
|
12
|
+
size: upload.size ?? null,
|
|
13
|
+
sizeIsDeferred: `${upload.sizeIsDeferred}`,
|
|
14
|
+
offset: upload.offset,
|
|
15
|
+
metadata: JSON.stringify(upload.metadata),
|
|
16
|
+
storage: JSON.stringify(upload.storage),
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
const getUpload = (bucket, id, kvStore) => {
|
|
20
|
+
return Effect.gen(function* () {
|
|
21
|
+
try {
|
|
22
|
+
const [metadata] = yield* Effect.promise(() => bucket.file(id).getMetadata());
|
|
23
|
+
const { size, metadata: meta } = metadata;
|
|
24
|
+
const file = yield* kvStore.get(id);
|
|
25
|
+
return {
|
|
26
|
+
id,
|
|
27
|
+
size: size ? Number.parseInt(`${size}`, 10) : undefined,
|
|
28
|
+
offset: metadata.size ? Number.parseInt(`${metadata.size}`, 10) : 0, // `size` is set by GCS
|
|
29
|
+
metadata: meta ? meta : undefined,
|
|
30
|
+
storage: {
|
|
31
|
+
id: file.storage.id,
|
|
32
|
+
type: file.storage.type,
|
|
33
|
+
path: id,
|
|
34
|
+
bucket: bucket.name,
|
|
35
|
+
},
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
catch (error) {
|
|
39
|
+
if (error &&
|
|
40
|
+
typeof error === "object" &&
|
|
41
|
+
"code" in error &&
|
|
42
|
+
error.code === 404) {
|
|
43
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
44
|
+
}
|
|
45
|
+
throw error;
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
};
|
|
49
|
+
export function createGCSStore({ keyFilename, credentials, bucketName, }) {
|
|
50
|
+
return Effect.gen(function* () {
|
|
51
|
+
const kvStore = yield* UploadFileKVStore;
|
|
52
|
+
return gcsStore({ keyFilename, credentials, bucketName, kvStore });
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
export function gcsStore({ keyFilename, credentials, bucketName, kvStore, }) {
|
|
56
|
+
const storage = new Storage(keyFilename ? { keyFilename } : credentials ? { credentials } : {});
|
|
57
|
+
const bucket = storage.bucket(bucketName);
|
|
58
|
+
const getCapabilities = () => {
|
|
59
|
+
return {
|
|
60
|
+
supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3
|
|
61
|
+
supportsConcatenation: true, // Can combine files using bucket.combine
|
|
62
|
+
supportsDeferredLength: true,
|
|
63
|
+
supportsResumableUploads: true, // Through patch files
|
|
64
|
+
supportsTransactionalUploads: false,
|
|
65
|
+
maxConcurrentUploads: 1, // Sequential operations
|
|
66
|
+
minChunkSize: undefined,
|
|
67
|
+
maxChunkSize: undefined,
|
|
68
|
+
maxParts: undefined,
|
|
69
|
+
optimalChunkSize: 8 * 1024 * 1024, // 8MB default
|
|
70
|
+
requiresOrderedChunks: true, // Due to combine operation
|
|
71
|
+
requiresMimeTypeValidation: true,
|
|
72
|
+
maxValidationSize: undefined, // no size limit
|
|
73
|
+
};
|
|
74
|
+
};
|
|
75
|
+
const validateUploadStrategy = (strategy) => {
|
|
76
|
+
const capabilities = getCapabilities();
|
|
77
|
+
const result = (() => {
|
|
78
|
+
switch (strategy) {
|
|
79
|
+
case "parallel":
|
|
80
|
+
return capabilities.supportsParallelUploads;
|
|
81
|
+
case "single":
|
|
82
|
+
return true;
|
|
83
|
+
default:
|
|
84
|
+
return false;
|
|
85
|
+
}
|
|
86
|
+
})();
|
|
87
|
+
return Effect.succeed(result);
|
|
88
|
+
};
|
|
89
|
+
return {
|
|
90
|
+
bucket: bucket.name,
|
|
91
|
+
create: (file) => {
|
|
92
|
+
return Effect.gen(function* () {
|
|
93
|
+
yield* uploadRequestsTotal(Effect.succeed(1));
|
|
94
|
+
yield* activeUploadsGauge(Effect.succeed(1));
|
|
95
|
+
yield* fileSizeHistogram(Effect.succeed(file.size || 0));
|
|
96
|
+
if (!file.id) {
|
|
97
|
+
yield* uploadErrorsTotal(Effect.succeed(1));
|
|
98
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
99
|
+
}
|
|
100
|
+
const gcs_file = bucket.file(file.id);
|
|
101
|
+
file.storage = {
|
|
102
|
+
id: file.storage.id,
|
|
103
|
+
type: file.storage.type,
|
|
104
|
+
path: file.id,
|
|
105
|
+
bucket: bucket.name,
|
|
106
|
+
};
|
|
107
|
+
console.log("file", gcs_file.id);
|
|
108
|
+
const options = {
|
|
109
|
+
metadata: {
|
|
110
|
+
metadata: {
|
|
111
|
+
...stringifyUploadKeys(file),
|
|
112
|
+
},
|
|
113
|
+
},
|
|
114
|
+
};
|
|
115
|
+
if (file.metadata?.contentType) {
|
|
116
|
+
options.contentType = file.metadata.contentType.toString();
|
|
117
|
+
}
|
|
118
|
+
return yield* Effect.tryPromise({
|
|
119
|
+
try: () => {
|
|
120
|
+
console.log("creating file", gcs_file.id);
|
|
121
|
+
return new Promise((resolve, reject) => {
|
|
122
|
+
const fake_stream = new PassThrough();
|
|
123
|
+
fake_stream.end();
|
|
124
|
+
fake_stream
|
|
125
|
+
.pipe(gcs_file.createWriteStream(options))
|
|
126
|
+
.on("error", reject)
|
|
127
|
+
.on("finish", () => {
|
|
128
|
+
resolve(file);
|
|
129
|
+
});
|
|
130
|
+
});
|
|
131
|
+
},
|
|
132
|
+
catch: (error) => {
|
|
133
|
+
console.error("error creating file", error);
|
|
134
|
+
Effect.runSync(trackGCSError("create", error, {
|
|
135
|
+
upload_id: file.id,
|
|
136
|
+
bucket: bucket.name,
|
|
137
|
+
}));
|
|
138
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", {
|
|
139
|
+
cause: error,
|
|
140
|
+
});
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
},
|
|
145
|
+
read: (file_id) => {
|
|
146
|
+
return Effect.tryPromise({
|
|
147
|
+
try: async () => {
|
|
148
|
+
const [buffer] = await bucket.file(file_id).download();
|
|
149
|
+
return new Uint8Array(buffer);
|
|
150
|
+
},
|
|
151
|
+
catch: (error) => {
|
|
152
|
+
Effect.runSync(trackGCSError("read", error, {
|
|
153
|
+
upload_id: file_id,
|
|
154
|
+
bucket: bucket.name,
|
|
155
|
+
}));
|
|
156
|
+
if (error &&
|
|
157
|
+
typeof error === "object" &&
|
|
158
|
+
"code" in error &&
|
|
159
|
+
error.code === 404) {
|
|
160
|
+
return UploadistaError.fromCode("FILE_NOT_FOUND");
|
|
161
|
+
}
|
|
162
|
+
return UploadistaError.fromCode("FILE_READ_ERROR", {
|
|
163
|
+
cause: error,
|
|
164
|
+
});
|
|
165
|
+
},
|
|
166
|
+
});
|
|
167
|
+
},
|
|
168
|
+
remove: (file_id) => {
|
|
169
|
+
return Effect.gen(function* () {
|
|
170
|
+
try {
|
|
171
|
+
yield* Effect.promise(() => bucket.file(file_id).delete());
|
|
172
|
+
yield* activeUploadsGauge(Effect.succeed(-1));
|
|
173
|
+
}
|
|
174
|
+
catch (error) {
|
|
175
|
+
Effect.runSync(trackGCSError("remove", error, {
|
|
176
|
+
upload_id: file_id,
|
|
177
|
+
bucket: bucket.name,
|
|
178
|
+
}));
|
|
179
|
+
throw error;
|
|
180
|
+
}
|
|
181
|
+
});
|
|
182
|
+
},
|
|
183
|
+
/**
|
|
184
|
+
* Get the file metatata from the object in GCS, then upload a new version
|
|
185
|
+
* passing through the metadata to the new version.
|
|
186
|
+
*/
|
|
187
|
+
write: (options, dependencies) => {
|
|
188
|
+
return withUploadMetrics(options.file_id, withTimingMetrics(uploadDurationHistogram, Effect.gen(function* () {
|
|
189
|
+
const startTime = Date.now();
|
|
190
|
+
const { file_id, offset, stream: effectStream } = options;
|
|
191
|
+
console.log("write", file_id, offset);
|
|
192
|
+
const { onProgress } = dependencies;
|
|
193
|
+
// GCS Doesn't persist metadata within versions,
|
|
194
|
+
// get that metadata first
|
|
195
|
+
const upload = yield* getUpload(bucket, file_id, kvStore);
|
|
196
|
+
console.log("upload", upload);
|
|
197
|
+
return yield* Effect.promise(() => new Promise((resolve, reject) => {
|
|
198
|
+
const file = bucket.file(file_id);
|
|
199
|
+
const destination = upload.offset === 0
|
|
200
|
+
? file
|
|
201
|
+
: bucket.file(`${file_id}_patch`);
|
|
202
|
+
upload.offset = offset;
|
|
203
|
+
const gcsOptions = {
|
|
204
|
+
metadata: {
|
|
205
|
+
metadata: {
|
|
206
|
+
...stringifyUploadKeys(upload),
|
|
207
|
+
},
|
|
208
|
+
},
|
|
209
|
+
};
|
|
210
|
+
const write_stream = destination.createWriteStream(gcsOptions);
|
|
211
|
+
if (!write_stream) {
|
|
212
|
+
Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));
|
|
213
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
214
|
+
return;
|
|
215
|
+
}
|
|
216
|
+
let bytes_received = upload.offset;
|
|
217
|
+
// Convert Effect Stream to ReadableStream
|
|
218
|
+
const readableStream = Stream.toReadableStream(effectStream);
|
|
219
|
+
const transform = new Transform({
|
|
220
|
+
transform(chunk, _, callback) {
|
|
221
|
+
bytes_received += chunk.length;
|
|
222
|
+
onProgress?.(bytes_received);
|
|
223
|
+
callback(null, chunk);
|
|
224
|
+
},
|
|
225
|
+
});
|
|
226
|
+
const nodeReadable = Readable.fromWeb(readableStream);
|
|
227
|
+
pipeline(nodeReadable, transform, write_stream, async (e) => {
|
|
228
|
+
if (e) {
|
|
229
|
+
console.error("error writing file", e);
|
|
230
|
+
Effect.runSync(trackGCSError("write", e, {
|
|
231
|
+
upload_id: file_id,
|
|
232
|
+
bucket: bucket.name,
|
|
233
|
+
offset,
|
|
234
|
+
}));
|
|
235
|
+
try {
|
|
236
|
+
await destination.delete({ ignoreNotFound: true });
|
|
237
|
+
}
|
|
238
|
+
finally {
|
|
239
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
else {
|
|
243
|
+
try {
|
|
244
|
+
if (file !== destination) {
|
|
245
|
+
await bucket.combine([file, destination], file);
|
|
246
|
+
await Promise.all([
|
|
247
|
+
file.setMetadata(gcsOptions.metadata),
|
|
248
|
+
destination.delete({ ignoreNotFound: true }),
|
|
249
|
+
]);
|
|
250
|
+
}
|
|
251
|
+
// Log completion
|
|
252
|
+
Effect.runSync(logGCSUploadCompletion(file_id, {
|
|
253
|
+
fileSize: upload.size || 0,
|
|
254
|
+
totalDurationMs: Date.now() - startTime,
|
|
255
|
+
partsCount: 1,
|
|
256
|
+
averagePartSize: upload.size,
|
|
257
|
+
throughputBps: (upload.size || 0) / (Date.now() - startTime),
|
|
258
|
+
retryCount: 0,
|
|
259
|
+
}));
|
|
260
|
+
Effect.runSync(uploadSuccessTotal(Effect.succeed(1)));
|
|
261
|
+
Effect.runSync(activeUploadsGauge(Effect.succeed(-1)));
|
|
262
|
+
resolve(bytes_received);
|
|
263
|
+
}
|
|
264
|
+
catch (error) {
|
|
265
|
+
console.error(error);
|
|
266
|
+
Effect.runSync(trackGCSError("write", error, {
|
|
267
|
+
upload_id: file_id,
|
|
268
|
+
bucket: bucket.name,
|
|
269
|
+
operation: "combine",
|
|
270
|
+
}));
|
|
271
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
});
|
|
275
|
+
}));
|
|
276
|
+
})));
|
|
277
|
+
},
|
|
278
|
+
getCapabilities,
|
|
279
|
+
validateUploadStrategy,
|
|
280
|
+
};
|
|
281
|
+
}
|
|
282
|
+
export const GCSStoreLayer = (options) => Layer.effect(UploadFileDataStore, createGCSStore(options));
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,cAAc,aAAa,CAAC;AAG5B,OAAO,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9D,cAAc,YAAY,CAAC"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { Layer } from "effect";
|
|
2
|
+
import { type GCSClientConfig, GCSClientService } from "./gcs-client.service";
|
|
3
|
+
export declare const GCSClientNodeJSLayer: (config: GCSClientConfig) => Layer.Layer<GCSClientService, never, never>;
|
|
4
|
+
//# sourceMappingURL=gcs-client-nodejs.service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcs-client-nodejs.service.d.ts","sourceRoot":"","sources":["../../src/services/gcs-client-nodejs.service.ts"],"names":[],"mappings":"AAGA,OAAO,EAAU,KAAK,EAAE,MAAM,QAAQ,CAAC;AACvC,OAAO,EACL,KAAK,eAAe,EACpB,gBAAgB,EAGjB,MAAM,sBAAsB,CAAC;AAwa9B,eAAO,MAAM,oBAAoB,GAAI,QAAQ,eAAe,gDACI,CAAC"}
|
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
import { pipeline, Readable, Transform } from "node:stream";
|
|
2
|
+
import { Storage } from "@google-cloud/storage";
|
|
3
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
4
|
+
import { Effect, Layer } from "effect";
|
|
5
|
+
import { GCSClientService, } from "./gcs-client.service";
|
|
6
|
+
function createNodeJSGCSClient(config) {
|
|
7
|
+
// Dynamic import to avoid issues in non-Node environments
|
|
8
|
+
const storage = new Storage({
|
|
9
|
+
keyFilename: config.keyFilename,
|
|
10
|
+
credentials: config.credentials,
|
|
11
|
+
projectId: config.projectId,
|
|
12
|
+
});
|
|
13
|
+
const bucket = storage.bucket(config.bucket);
|
|
14
|
+
const getObject = (key) => Effect.tryPromise({
|
|
15
|
+
try: async () => {
|
|
16
|
+
const file = bucket.file(key);
|
|
17
|
+
const stream = file.createReadStream();
|
|
18
|
+
// Convert Node.js stream to Web ReadableStream
|
|
19
|
+
return new ReadableStream({
|
|
20
|
+
start(controller) {
|
|
21
|
+
stream.on("data", (chunk) => {
|
|
22
|
+
controller.enqueue(new Uint8Array(chunk));
|
|
23
|
+
});
|
|
24
|
+
stream.on("end", () => {
|
|
25
|
+
controller.close();
|
|
26
|
+
});
|
|
27
|
+
stream.on("error", (error) => {
|
|
28
|
+
controller.error(error);
|
|
29
|
+
});
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
},
|
|
33
|
+
catch: (error) => {
|
|
34
|
+
if (error &&
|
|
35
|
+
typeof error === "object" &&
|
|
36
|
+
"code" in error &&
|
|
37
|
+
error.code === 404) {
|
|
38
|
+
return UploadistaError.fromCode("FILE_NOT_FOUND");
|
|
39
|
+
}
|
|
40
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
|
|
41
|
+
},
|
|
42
|
+
});
|
|
43
|
+
const getObjectMetadata = (key) => Effect.tryPromise({
|
|
44
|
+
try: async () => {
|
|
45
|
+
const file = bucket.file(key);
|
|
46
|
+
const [metadata] = await file.getMetadata();
|
|
47
|
+
const parseMetadata = (meta) => {
|
|
48
|
+
if (!meta)
|
|
49
|
+
return {};
|
|
50
|
+
if (typeof meta.metadata === "string") {
|
|
51
|
+
try {
|
|
52
|
+
return JSON.parse(meta.metadata);
|
|
53
|
+
}
|
|
54
|
+
catch {
|
|
55
|
+
return meta;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return meta;
|
|
59
|
+
};
|
|
60
|
+
return {
|
|
61
|
+
name: metadata.name,
|
|
62
|
+
bucket: metadata.bucket,
|
|
63
|
+
size: metadata.size
|
|
64
|
+
? Number.parseInt(`${metadata.size}`, 10)
|
|
65
|
+
: undefined,
|
|
66
|
+
contentType: metadata.contentType,
|
|
67
|
+
metadata: parseMetadata(metadata.metadata),
|
|
68
|
+
generation: metadata.generation,
|
|
69
|
+
timeCreated: metadata.timeCreated,
|
|
70
|
+
updated: metadata.updated,
|
|
71
|
+
};
|
|
72
|
+
},
|
|
73
|
+
catch: (error) => {
|
|
74
|
+
if (error &&
|
|
75
|
+
typeof error === "object" &&
|
|
76
|
+
"code" in error &&
|
|
77
|
+
error.code === 404) {
|
|
78
|
+
return UploadistaError.fromCode("FILE_NOT_FOUND");
|
|
79
|
+
}
|
|
80
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
|
|
81
|
+
},
|
|
82
|
+
});
|
|
83
|
+
const objectExists = (key) => Effect.tryPromise({
|
|
84
|
+
try: async () => {
|
|
85
|
+
const file = bucket.file(key);
|
|
86
|
+
const [exists] = await file.exists();
|
|
87
|
+
return exists;
|
|
88
|
+
},
|
|
89
|
+
catch: (error) => {
|
|
90
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
|
|
91
|
+
},
|
|
92
|
+
});
|
|
93
|
+
const putObject = (key, body, context) => Effect.tryPromise({
|
|
94
|
+
try: async () => {
|
|
95
|
+
return new Promise((resolve, reject) => {
|
|
96
|
+
const file = bucket.file(key);
|
|
97
|
+
const options = {
|
|
98
|
+
metadata: {
|
|
99
|
+
contentType: context?.contentType || "application/octet-stream",
|
|
100
|
+
metadata: context?.metadata || {},
|
|
101
|
+
},
|
|
102
|
+
};
|
|
103
|
+
const stream = file.createWriteStream(options);
|
|
104
|
+
stream.on("error", reject);
|
|
105
|
+
stream.on("finish", () => {
|
|
106
|
+
resolve(file.name);
|
|
107
|
+
});
|
|
108
|
+
stream.end(Buffer.from(body));
|
|
109
|
+
});
|
|
110
|
+
},
|
|
111
|
+
catch: (error) => {
|
|
112
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
|
|
113
|
+
},
|
|
114
|
+
});
|
|
115
|
+
const putObjectFromStream = (key, offset, readableStream, context, onProgress) => Effect.tryPromise({
|
|
116
|
+
try: async () => {
|
|
117
|
+
return new Promise((resolve, reject) => {
|
|
118
|
+
const file = bucket.file(key);
|
|
119
|
+
const options = {
|
|
120
|
+
metadata: {
|
|
121
|
+
contentType: context?.contentType || "application/octet-stream",
|
|
122
|
+
metadata: context?.metadata || {},
|
|
123
|
+
},
|
|
124
|
+
};
|
|
125
|
+
const writeStream = file.createWriteStream(options);
|
|
126
|
+
let bytesWritten = offset;
|
|
127
|
+
const transform = new Transform({
|
|
128
|
+
transform(chunk, _, callback) {
|
|
129
|
+
bytesWritten += chunk.length;
|
|
130
|
+
onProgress?.(bytesWritten);
|
|
131
|
+
callback(null, chunk);
|
|
132
|
+
},
|
|
133
|
+
});
|
|
134
|
+
const nodeReadable = Readable.fromWeb(readableStream);
|
|
135
|
+
pipeline(nodeReadable, transform, writeStream, (error) => {
|
|
136
|
+
if (error) {
|
|
137
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR", {
|
|
138
|
+
cause: error,
|
|
139
|
+
}));
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
resolve(bytesWritten);
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
});
|
|
146
|
+
},
|
|
147
|
+
catch: (error) => {
|
|
148
|
+
console.error("error putting object from stream", error);
|
|
149
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
|
|
150
|
+
},
|
|
151
|
+
});
|
|
152
|
+
const deleteObject = (key) => Effect.tryPromise({
|
|
153
|
+
try: async () => {
|
|
154
|
+
const file = bucket.file(key);
|
|
155
|
+
await file.delete({ ignoreNotFound: true });
|
|
156
|
+
},
|
|
157
|
+
catch: (error) => {
|
|
158
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
|
|
159
|
+
},
|
|
160
|
+
});
|
|
161
|
+
const createResumableUpload = (context) => Effect.tryPromise({
|
|
162
|
+
try: async () => {
|
|
163
|
+
// For Node.js, we'll use a simplified approach
|
|
164
|
+
// In production, you'd want to implement proper resumable uploads
|
|
165
|
+
// Return a pseudo-URL that we can use to identify this upload
|
|
166
|
+
return `resumable://nodejs/${context.bucket}/${context.key}`;
|
|
167
|
+
},
|
|
168
|
+
catch: (error) => {
|
|
169
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
|
|
170
|
+
},
|
|
171
|
+
});
|
|
172
|
+
const uploadChunk = (uploadUrl, chunk, start, total) => Effect.tryPromise({
|
|
173
|
+
try: async () => {
|
|
174
|
+
// Extract key from pseudo-URL
|
|
175
|
+
const key = uploadUrl.split("/").pop();
|
|
176
|
+
if (!key) {
|
|
177
|
+
throw new Error("Invalid upload URL");
|
|
178
|
+
}
|
|
179
|
+
const file = bucket.file(key);
|
|
180
|
+
return new Promise((resolve, reject) => {
|
|
181
|
+
const stream = file.createWriteStream({
|
|
182
|
+
resumable: true,
|
|
183
|
+
offset: start,
|
|
184
|
+
});
|
|
185
|
+
stream.on("error", reject);
|
|
186
|
+
stream.on("finish", () => {
|
|
187
|
+
resolve({
|
|
188
|
+
completed: total ? start + chunk.length >= total : false,
|
|
189
|
+
bytesUploaded: start + chunk.length,
|
|
190
|
+
});
|
|
191
|
+
});
|
|
192
|
+
stream.end(Buffer.from(chunk));
|
|
193
|
+
});
|
|
194
|
+
},
|
|
195
|
+
catch: (error) => {
|
|
196
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
|
|
197
|
+
},
|
|
198
|
+
});
|
|
199
|
+
const getUploadStatus = (uploadUrl) => Effect.promise(async () => {
|
|
200
|
+
try {
|
|
201
|
+
const key = uploadUrl.split("/").pop();
|
|
202
|
+
if (!key) {
|
|
203
|
+
throw new Error("Invalid upload URL");
|
|
204
|
+
}
|
|
205
|
+
const file = bucket.file(key);
|
|
206
|
+
const [metadata] = await file.getMetadata();
|
|
207
|
+
return {
|
|
208
|
+
bytesUploaded: metadata.size
|
|
209
|
+
? Number.parseInt(`${metadata.size}`, 10)
|
|
210
|
+
: 0,
|
|
211
|
+
completed: true, // Simplified for now
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
catch (_error) {
|
|
215
|
+
// If file doesn't exist, upload hasn't started
|
|
216
|
+
return { bytesUploaded: 0, completed: false };
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
const cancelUpload = (uploadUrl) => Effect.tryPromise({
|
|
220
|
+
try: async () => {
|
|
221
|
+
const key = uploadUrl.split("/").pop();
|
|
222
|
+
if (!key) {
|
|
223
|
+
throw new Error("Invalid upload URL");
|
|
224
|
+
}
|
|
225
|
+
const file = bucket.file(key);
|
|
226
|
+
await file.delete({ ignoreNotFound: true });
|
|
227
|
+
},
|
|
228
|
+
catch: (error) => {
|
|
229
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", { cause: error });
|
|
230
|
+
},
|
|
231
|
+
});
|
|
232
|
+
const composeObjects = (sourceKeys, destinationKey, context) => Effect.tryPromise({
|
|
233
|
+
try: async () => {
|
|
234
|
+
const sources = sourceKeys.map((key) => bucket.file(key));
|
|
235
|
+
const destination = bucket.file(destinationKey);
|
|
236
|
+
await bucket.combine(sources, destination);
|
|
237
|
+
if (context?.metadata) {
|
|
238
|
+
await destination.setMetadata({
|
|
239
|
+
metadata: context.metadata,
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
return destinationKey;
|
|
243
|
+
},
|
|
244
|
+
catch: (error) => {
|
|
245
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error });
|
|
246
|
+
},
|
|
247
|
+
});
|
|
248
|
+
const putObjectFromStreamWithPatching = (key, offset, readableStream, context, onProgress, // Called with incremental bytes per chunk
|
|
249
|
+
isAppend = false) => Effect.gen(function* () {
|
|
250
|
+
if (!isAppend) {
|
|
251
|
+
// Direct upload for new files
|
|
252
|
+
return yield* putObjectFromStream(key, offset, readableStream, context, onProgress);
|
|
253
|
+
}
|
|
254
|
+
// For append operations, create a patch file and then combine
|
|
255
|
+
const patchKey = `${key}_patch`;
|
|
256
|
+
const bytesWritten = yield* putObjectFromStream(patchKey, offset, readableStream, context, onProgress);
|
|
257
|
+
// Combine original with patch
|
|
258
|
+
yield* composeObjects([key, patchKey], key, context);
|
|
259
|
+
// Clean up patch file
|
|
260
|
+
yield* deleteObject(patchKey);
|
|
261
|
+
return bytesWritten;
|
|
262
|
+
});
|
|
263
|
+
const putTemporaryObject = (key, body, context) => putObject(`${key}_tmp`, body, context);
|
|
264
|
+
const getTemporaryObject = (key) => Effect.gen(function* () {
|
|
265
|
+
try {
|
|
266
|
+
return yield* getObject(`${key}_tmp`);
|
|
267
|
+
}
|
|
268
|
+
catch {
|
|
269
|
+
return undefined;
|
|
270
|
+
}
|
|
271
|
+
});
|
|
272
|
+
const deleteTemporaryObject = (key) => deleteObject(`${key}_tmp`);
|
|
273
|
+
const getObjectBuffer = (key) => {
|
|
274
|
+
return Effect.tryPromise({
|
|
275
|
+
try: async () => {
|
|
276
|
+
const [buffer] = await bucket.file(key).download();
|
|
277
|
+
return new Uint8Array(buffer);
|
|
278
|
+
},
|
|
279
|
+
catch: (error) => {
|
|
280
|
+
if (error &&
|
|
281
|
+
typeof error === "object" &&
|
|
282
|
+
"code" in error &&
|
|
283
|
+
error.code === 404) {
|
|
284
|
+
return UploadistaError.fromCode("FILE_NOT_FOUND");
|
|
285
|
+
}
|
|
286
|
+
return UploadistaError.fromCode("FILE_READ_ERROR", {
|
|
287
|
+
cause: error,
|
|
288
|
+
});
|
|
289
|
+
},
|
|
290
|
+
});
|
|
291
|
+
};
|
|
292
|
+
return {
|
|
293
|
+
bucket: config.bucket,
|
|
294
|
+
getObject,
|
|
295
|
+
getObjectBuffer,
|
|
296
|
+
getObjectMetadata,
|
|
297
|
+
objectExists,
|
|
298
|
+
putObject,
|
|
299
|
+
putObjectFromStream,
|
|
300
|
+
putObjectFromStreamWithPatching,
|
|
301
|
+
deleteObject,
|
|
302
|
+
createResumableUpload,
|
|
303
|
+
uploadChunk,
|
|
304
|
+
getUploadStatus,
|
|
305
|
+
cancelUpload,
|
|
306
|
+
composeObjects,
|
|
307
|
+
putTemporaryObject,
|
|
308
|
+
getTemporaryObject,
|
|
309
|
+
deleteTemporaryObject,
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
export const GCSClientNodeJSLayer = (config) => Layer.succeed(GCSClientService, createNodeJSGCSClient(config));
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { Layer } from "effect";
|
|
2
|
+
import { type GCSClientConfig, GCSClientService } from "./gcs-client.service";
|
|
3
|
+
export declare const GCSClientRESTLayer: (config: GCSClientConfig) => Layer.Layer<GCSClientService, never, never>;
|
|
4
|
+
//# sourceMappingURL=gcs-client-rest.service.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"gcs-client-rest.service.d.ts","sourceRoot":"","sources":["../../src/services/gcs-client-rest.service.ts"],"names":[],"mappings":"AACA,OAAO,EAAU,KAAK,EAAE,MAAM,QAAQ,CAAC;AACvC,OAAO,EACL,KAAK,eAAe,EACpB,gBAAgB,EAGjB,MAAM,sBAAsB,CAAC;AA6Y9B,eAAO,MAAM,kBAAkB,GAAI,QAAQ,eAAe,gDACI,CAAC"}
|