@uploadista/data-store-gcs 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +5 -0
- package/LICENSE +21 -0
- package/README.md +479 -0
- package/dist/examples.d.ts +44 -0
- package/dist/examples.d.ts.map +1 -0
- package/dist/examples.js +82 -0
- package/dist/gcs-store-rest.d.ts +16 -0
- package/dist/gcs-store-rest.d.ts.map +1 -0
- package/dist/gcs-store-rest.js +188 -0
- package/dist/gcs-store-v2.d.ts +13 -0
- package/dist/gcs-store-v2.d.ts.map +1 -0
- package/dist/gcs-store-v2.js +190 -0
- package/dist/gcs-store.d.ts +12 -0
- package/dist/gcs-store.d.ts.map +1 -0
- package/dist/gcs-store.js +282 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/services/gcs-client-nodejs.service.d.ts +4 -0
- package/dist/services/gcs-client-nodejs.service.d.ts.map +1 -0
- package/dist/services/gcs-client-nodejs.service.js +312 -0
- package/dist/services/gcs-client-rest.service.d.ts +4 -0
- package/dist/services/gcs-client-rest.service.d.ts.map +1 -0
- package/dist/services/gcs-client-rest.service.js +299 -0
- package/dist/services/gcs-client.service.d.ts +56 -0
- package/dist/services/gcs-client.service.d.ts.map +1 -0
- package/dist/services/gcs-client.service.js +3 -0
- package/dist/services/index.d.ts +4 -0
- package/dist/services/index.d.ts.map +1 -0
- package/dist/services/index.js +3 -0
- package/package.json +31 -0
- package/src/gcs-store-v2.ts +286 -0
- package/src/gcs-store.ts +398 -0
- package/src/index.ts +6 -0
- package/src/services/gcs-client-nodejs.service.ts +435 -0
- package/src/services/gcs-client-rest.service.ts +406 -0
- package/src/services/gcs-client.service.ts +117 -0
- package/src/services/index.ts +3 -0
- package/tsconfig.json +12 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
2
|
+
import {
|
|
3
|
+
type DataStore,
|
|
4
|
+
type DataStoreCapabilities,
|
|
5
|
+
type DataStoreWriteOptions,
|
|
6
|
+
type KvStore,
|
|
7
|
+
type UploadFile,
|
|
8
|
+
UploadFileDataStore,
|
|
9
|
+
UploadFileKVStore,
|
|
10
|
+
type UploadStrategy,
|
|
11
|
+
} from "@uploadista/core/types";
|
|
12
|
+
import { Effect, Layer, Stream } from "effect";
|
|
13
|
+
import type {
|
|
14
|
+
GCSClient,
|
|
15
|
+
GCSClientConfig,
|
|
16
|
+
GCSOperationContext,
|
|
17
|
+
} from "./services";
|
|
18
|
+
import {
|
|
19
|
+
GCSClientNodeJSLayer,
|
|
20
|
+
GCSClientRESTLayer,
|
|
21
|
+
GCSClientService,
|
|
22
|
+
} from "./services";
|
|
23
|
+
|
|
24
|
+
export type GCSStoreOptions = {
|
|
25
|
+
kvStore: KvStore<UploadFile>;
|
|
26
|
+
} & GCSClientConfig;
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Convert the Upload object to a format that can be stored in GCS metadata.
|
|
30
|
+
*/
|
|
31
|
+
function stringifyUploadKeys(
|
|
32
|
+
upload: UploadFile,
|
|
33
|
+
): Record<string, string | null> {
|
|
34
|
+
return {
|
|
35
|
+
size: upload.size?.toString() ?? null,
|
|
36
|
+
sizeIsDeferred: `${upload.sizeIsDeferred}`,
|
|
37
|
+
offset: upload.offset?.toString() ?? "0",
|
|
38
|
+
metadata: JSON.stringify(upload.metadata),
|
|
39
|
+
storage: JSON.stringify(upload.storage),
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const getUpload = (
|
|
44
|
+
id: string,
|
|
45
|
+
kvStore: KvStore<UploadFile>,
|
|
46
|
+
gcsClient: GCSClient,
|
|
47
|
+
) => {
|
|
48
|
+
return Effect.gen(function* () {
|
|
49
|
+
try {
|
|
50
|
+
const metadata = yield* gcsClient.getObjectMetadata(id);
|
|
51
|
+
const file = yield* kvStore.get(id);
|
|
52
|
+
|
|
53
|
+
return {
|
|
54
|
+
id,
|
|
55
|
+
size: metadata.size,
|
|
56
|
+
offset: metadata.size || 0,
|
|
57
|
+
metadata: metadata.metadata,
|
|
58
|
+
storage: {
|
|
59
|
+
id: file.storage.id,
|
|
60
|
+
type: file.storage.type,
|
|
61
|
+
path: id,
|
|
62
|
+
bucket: gcsClient.bucket,
|
|
63
|
+
},
|
|
64
|
+
};
|
|
65
|
+
} catch (error) {
|
|
66
|
+
if (error instanceof UploadistaError && error.code === "FILE_NOT_FOUND") {
|
|
67
|
+
return yield* Effect.fail(error);
|
|
68
|
+
}
|
|
69
|
+
throw error;
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export function createGCSStore(options: Omit<GCSStoreOptions, "kvStore">) {
|
|
75
|
+
return Effect.gen(function* () {
|
|
76
|
+
const kvStore = yield* UploadFileKVStore;
|
|
77
|
+
return yield* createGCSStoreImplementation({ ...options, kvStore });
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export function createGCSStoreImplementation(
|
|
82
|
+
config: GCSStoreOptions,
|
|
83
|
+
): Effect.Effect<DataStore<UploadFile>, never, GCSClientService> {
|
|
84
|
+
return Effect.gen(function* () {
|
|
85
|
+
const gcsClient = yield* GCSClientService;
|
|
86
|
+
const { kvStore } = config;
|
|
87
|
+
|
|
88
|
+
const getCapabilities = (): DataStoreCapabilities => {
|
|
89
|
+
return {
|
|
90
|
+
supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3
|
|
91
|
+
supportsConcatenation: true, // Can combine files using compose
|
|
92
|
+
supportsDeferredLength: true,
|
|
93
|
+
supportsResumableUploads: true, // Through resumable uploads
|
|
94
|
+
supportsTransactionalUploads: false,
|
|
95
|
+
maxConcurrentUploads: 1, // Sequential operations
|
|
96
|
+
minChunkSize: undefined,
|
|
97
|
+
maxChunkSize: undefined,
|
|
98
|
+
maxParts: undefined,
|
|
99
|
+
optimalChunkSize: 8 * 1024 * 1024, // 8MB default
|
|
100
|
+
requiresOrderedChunks: true, // Due to compose operation
|
|
101
|
+
};
|
|
102
|
+
};
|
|
103
|
+
|
|
104
|
+
const validateUploadStrategy = (
|
|
105
|
+
strategy: UploadStrategy,
|
|
106
|
+
): Effect.Effect<boolean, never> => {
|
|
107
|
+
const capabilities = getCapabilities();
|
|
108
|
+
|
|
109
|
+
const result = (() => {
|
|
110
|
+
switch (strategy) {
|
|
111
|
+
case "parallel":
|
|
112
|
+
return capabilities.supportsParallelUploads;
|
|
113
|
+
case "single":
|
|
114
|
+
return true;
|
|
115
|
+
default:
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
})();
|
|
119
|
+
|
|
120
|
+
return Effect.succeed(result);
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
return {
|
|
124
|
+
bucket: gcsClient.bucket,
|
|
125
|
+
create: (file: UploadFile) => {
|
|
126
|
+
return Effect.gen(function* () {
|
|
127
|
+
if (!file.id) {
|
|
128
|
+
return yield* Effect.fail(
|
|
129
|
+
UploadistaError.fromCode("FILE_NOT_FOUND"),
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
file.storage = {
|
|
134
|
+
id: file.storage.id,
|
|
135
|
+
type: file.storage.type,
|
|
136
|
+
path: file.id,
|
|
137
|
+
bucket: gcsClient.bucket,
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
// Create empty file
|
|
141
|
+
const context = {
|
|
142
|
+
bucket: gcsClient.bucket,
|
|
143
|
+
key: file.id,
|
|
144
|
+
contentType:
|
|
145
|
+
file.metadata?.contentType?.toString() ||
|
|
146
|
+
"application/octet-stream",
|
|
147
|
+
metadata: stringifyUploadKeys(file),
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
yield* gcsClient.putObject(file.id, new Uint8Array(0), context);
|
|
151
|
+
return file;
|
|
152
|
+
});
|
|
153
|
+
},
|
|
154
|
+
|
|
155
|
+
remove: (file_id: string) => {
|
|
156
|
+
return gcsClient.deleteObject(file_id);
|
|
157
|
+
},
|
|
158
|
+
|
|
159
|
+
write: (
|
|
160
|
+
options: DataStoreWriteOptions,
|
|
161
|
+
dependencies: {
|
|
162
|
+
onProgress?: (chunkSize: number) => void;
|
|
163
|
+
},
|
|
164
|
+
) => {
|
|
165
|
+
return Effect.gen(function* () {
|
|
166
|
+
const { file_id, offset, stream: effectStream } = options;
|
|
167
|
+
const { onProgress } = dependencies;
|
|
168
|
+
|
|
169
|
+
// Get current upload metadata
|
|
170
|
+
const upload = yield* getUpload(file_id, kvStore, gcsClient);
|
|
171
|
+
|
|
172
|
+
upload.offset = offset;
|
|
173
|
+
// Persist the updated offset
|
|
174
|
+
yield* kvStore.set(file_id, upload as UploadFile);
|
|
175
|
+
|
|
176
|
+
const context = {
|
|
177
|
+
bucket: gcsClient.bucket,
|
|
178
|
+
key: file_id,
|
|
179
|
+
contentType:
|
|
180
|
+
upload.metadata?.contentType || "application/octet-stream",
|
|
181
|
+
metadata: stringifyUploadKeys(upload as UploadFile),
|
|
182
|
+
} satisfies Partial<GCSOperationContext>;
|
|
183
|
+
|
|
184
|
+
// Convert Effect Stream to ReadableStream
|
|
185
|
+
const readableStream = Stream.toReadableStream(effectStream);
|
|
186
|
+
|
|
187
|
+
// Use native streams if available (Node.js implementation)
|
|
188
|
+
if (gcsClient.putObjectFromStreamWithPatching) {
|
|
189
|
+
const isAppend = upload.offset > 0; // Check original file size, not write offset
|
|
190
|
+
|
|
191
|
+
return yield* gcsClient.putObjectFromStreamWithPatching(
|
|
192
|
+
file_id,
|
|
193
|
+
upload.offset,
|
|
194
|
+
readableStream,
|
|
195
|
+
context,
|
|
196
|
+
onProgress,
|
|
197
|
+
isAppend,
|
|
198
|
+
);
|
|
199
|
+
} else {
|
|
200
|
+
// Fallback to chunk-based approach for REST implementation
|
|
201
|
+
const reader = readableStream.getReader();
|
|
202
|
+
const chunks: Uint8Array[] = [];
|
|
203
|
+
let totalBytes = 0;
|
|
204
|
+
|
|
205
|
+
// Read all chunks
|
|
206
|
+
while (true) {
|
|
207
|
+
const { done, value } = yield* Effect.promise(() =>
|
|
208
|
+
reader.read(),
|
|
209
|
+
);
|
|
210
|
+
if (done) break;
|
|
211
|
+
|
|
212
|
+
chunks.push(value);
|
|
213
|
+
const chunkSize = value.byteLength;
|
|
214
|
+
totalBytes += chunkSize;
|
|
215
|
+
onProgress?.(totalBytes);
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// Combine all chunks
|
|
219
|
+
const combinedArray = new Uint8Array(totalBytes);
|
|
220
|
+
let position = 0;
|
|
221
|
+
for (const chunk of chunks) {
|
|
222
|
+
combinedArray.set(chunk, position);
|
|
223
|
+
position += chunk.byteLength;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
// Check if we need to handle patches (append data)
|
|
227
|
+
if (upload.offset === 0) {
|
|
228
|
+
// Direct upload
|
|
229
|
+
yield* gcsClient.putObject(file_id, combinedArray, context);
|
|
230
|
+
} else {
|
|
231
|
+
// We need to combine with existing data
|
|
232
|
+
const patchKey = `${file_id}_patch`;
|
|
233
|
+
|
|
234
|
+
// Upload patch data
|
|
235
|
+
yield* gcsClient.putTemporaryObject(
|
|
236
|
+
patchKey,
|
|
237
|
+
combinedArray,
|
|
238
|
+
context,
|
|
239
|
+
);
|
|
240
|
+
|
|
241
|
+
// Combine original file with patch
|
|
242
|
+
yield* gcsClient.composeObjects(
|
|
243
|
+
[file_id, patchKey],
|
|
244
|
+
file_id,
|
|
245
|
+
context,
|
|
246
|
+
);
|
|
247
|
+
|
|
248
|
+
// Clean up patch file
|
|
249
|
+
yield* gcsClient.deleteTemporaryObject(patchKey);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
return totalBytes;
|
|
253
|
+
}
|
|
254
|
+
});
|
|
255
|
+
},
|
|
256
|
+
|
|
257
|
+
getCapabilities,
|
|
258
|
+
validateUploadStrategy,
|
|
259
|
+
read: (file_id: string) => {
|
|
260
|
+
return Effect.gen(function* () {
|
|
261
|
+
const buffer = yield* gcsClient.getObjectBuffer(file_id);
|
|
262
|
+
return buffer;
|
|
263
|
+
});
|
|
264
|
+
},
|
|
265
|
+
};
|
|
266
|
+
});
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
export const GCSStoreLayer = (options: Omit<GCSStoreOptions, "kvStore">) =>
|
|
270
|
+
Layer.effect(UploadFileDataStore, createGCSStore(options));
|
|
271
|
+
|
|
272
|
+
export const gcsStoreRest = (config: GCSStoreOptions) => {
|
|
273
|
+
return Effect.runPromise(
|
|
274
|
+
createGCSStoreImplementation(config).pipe(
|
|
275
|
+
Effect.provide(GCSClientRESTLayer(config)),
|
|
276
|
+
),
|
|
277
|
+
);
|
|
278
|
+
};
|
|
279
|
+
|
|
280
|
+
export const gcsStoreNodejs = (config: GCSStoreOptions) => {
|
|
281
|
+
return Effect.runPromise(
|
|
282
|
+
createGCSStoreImplementation(config).pipe(
|
|
283
|
+
Effect.provide(GCSClientNodeJSLayer(config)),
|
|
284
|
+
),
|
|
285
|
+
);
|
|
286
|
+
};
|
package/src/gcs-store.ts
ADDED
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
import { PassThrough, pipeline, Readable, Transform } from "node:stream";
|
|
2
|
+
import type { Bucket, CreateWriteStreamOptions } from "@google-cloud/storage";
|
|
3
|
+
import { Storage } from "@google-cloud/storage";
|
|
4
|
+
import { UploadistaError } from "@uploadista/core/errors";
|
|
5
|
+
import {
|
|
6
|
+
type DataStore,
|
|
7
|
+
type DataStoreCapabilities,
|
|
8
|
+
type DataStoreWriteOptions,
|
|
9
|
+
type KvStore,
|
|
10
|
+
type UploadFile,
|
|
11
|
+
UploadFileDataStore,
|
|
12
|
+
UploadFileKVStore,
|
|
13
|
+
type UploadStrategy,
|
|
14
|
+
} from "@uploadista/core/types";
|
|
15
|
+
import {
|
|
16
|
+
gcsActiveUploadsGauge as activeUploadsGauge,
|
|
17
|
+
gcsFileSizeHistogram as fileSizeHistogram,
|
|
18
|
+
logGCSUploadCompletion,
|
|
19
|
+
trackGCSError,
|
|
20
|
+
gcsUploadDurationHistogram as uploadDurationHistogram,
|
|
21
|
+
gcsUploadErrorsTotal as uploadErrorsTotal,
|
|
22
|
+
gcsUploadRequestsTotal as uploadRequestsTotal,
|
|
23
|
+
gcsUploadSuccessTotal as uploadSuccessTotal,
|
|
24
|
+
withGCSTimingMetrics as withTimingMetrics,
|
|
25
|
+
withGCSUploadMetrics as withUploadMetrics,
|
|
26
|
+
} from "@uploadista/observability";
|
|
27
|
+
import { Effect, Layer, Stream } from "effect";
|
|
28
|
+
|
|
29
|
+
export type GCSStoreOptions = {
|
|
30
|
+
keyFilename?: string;
|
|
31
|
+
credentials?: object;
|
|
32
|
+
bucketName: string;
|
|
33
|
+
kvStore: KvStore<UploadFile>;
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Convert the Upload object to a format that can be stored in GCS metadata.
|
|
38
|
+
*/
|
|
39
|
+
function stringifyUploadKeys(upload: UploadFile) {
|
|
40
|
+
return {
|
|
41
|
+
size: upload.size ?? null,
|
|
42
|
+
sizeIsDeferred: `${upload.sizeIsDeferred}`,
|
|
43
|
+
offset: upload.offset,
|
|
44
|
+
metadata: JSON.stringify(upload.metadata),
|
|
45
|
+
storage: JSON.stringify(upload.storage),
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const getUpload = (
|
|
50
|
+
bucket: Bucket,
|
|
51
|
+
id: string,
|
|
52
|
+
kvStore: KvStore<UploadFile>,
|
|
53
|
+
) => {
|
|
54
|
+
return Effect.gen(function* () {
|
|
55
|
+
try {
|
|
56
|
+
const [metadata] = yield* Effect.promise(() =>
|
|
57
|
+
bucket.file(id).getMetadata(),
|
|
58
|
+
);
|
|
59
|
+
const { size, metadata: meta } = metadata;
|
|
60
|
+
const file = yield* kvStore.get(id);
|
|
61
|
+
return {
|
|
62
|
+
id,
|
|
63
|
+
size: size ? Number.parseInt(`${size}`, 10) : undefined,
|
|
64
|
+
offset: metadata.size ? Number.parseInt(`${metadata.size}`, 10) : 0, // `size` is set by GCS
|
|
65
|
+
metadata: meta ? (meta as Record<string, string>) : undefined,
|
|
66
|
+
storage: {
|
|
67
|
+
id: file.storage.id,
|
|
68
|
+
type: file.storage.type,
|
|
69
|
+
path: id,
|
|
70
|
+
bucket: bucket.name,
|
|
71
|
+
},
|
|
72
|
+
};
|
|
73
|
+
} catch (error) {
|
|
74
|
+
if (
|
|
75
|
+
error &&
|
|
76
|
+
typeof error === "object" &&
|
|
77
|
+
"code" in error &&
|
|
78
|
+
error.code === 404
|
|
79
|
+
) {
|
|
80
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
throw error;
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export function createGCSStore({
|
|
89
|
+
keyFilename,
|
|
90
|
+
credentials,
|
|
91
|
+
bucketName,
|
|
92
|
+
}: Omit<GCSStoreOptions, "kvStore">) {
|
|
93
|
+
return Effect.gen(function* () {
|
|
94
|
+
const kvStore = yield* UploadFileKVStore;
|
|
95
|
+
return gcsStore({ keyFilename, credentials, bucketName, kvStore });
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
export function gcsStore({
|
|
100
|
+
keyFilename,
|
|
101
|
+
credentials,
|
|
102
|
+
bucketName,
|
|
103
|
+
kvStore,
|
|
104
|
+
}: GCSStoreOptions): DataStore<UploadFile> {
|
|
105
|
+
const storage = new Storage(
|
|
106
|
+
keyFilename ? { keyFilename } : credentials ? { credentials } : {},
|
|
107
|
+
);
|
|
108
|
+
|
|
109
|
+
const bucket = storage.bucket(bucketName);
|
|
110
|
+
|
|
111
|
+
const getCapabilities = (): DataStoreCapabilities => {
|
|
112
|
+
return {
|
|
113
|
+
supportsParallelUploads: false, // GCS doesn't have native multipart upload like S3
|
|
114
|
+
supportsConcatenation: true, // Can combine files using bucket.combine
|
|
115
|
+
supportsDeferredLength: true,
|
|
116
|
+
supportsResumableUploads: true, // Through patch files
|
|
117
|
+
supportsTransactionalUploads: false,
|
|
118
|
+
maxConcurrentUploads: 1, // Sequential operations
|
|
119
|
+
minChunkSize: undefined,
|
|
120
|
+
maxChunkSize: undefined,
|
|
121
|
+
maxParts: undefined,
|
|
122
|
+
optimalChunkSize: 8 * 1024 * 1024, // 8MB default
|
|
123
|
+
requiresOrderedChunks: true, // Due to combine operation
|
|
124
|
+
requiresMimeTypeValidation: true,
|
|
125
|
+
maxValidationSize: undefined, // no size limit
|
|
126
|
+
};
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const validateUploadStrategy = (
|
|
130
|
+
strategy: UploadStrategy,
|
|
131
|
+
): Effect.Effect<boolean, never> => {
|
|
132
|
+
const capabilities = getCapabilities();
|
|
133
|
+
|
|
134
|
+
const result = (() => {
|
|
135
|
+
switch (strategy) {
|
|
136
|
+
case "parallel":
|
|
137
|
+
return capabilities.supportsParallelUploads;
|
|
138
|
+
case "single":
|
|
139
|
+
return true;
|
|
140
|
+
default:
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
})();
|
|
144
|
+
|
|
145
|
+
return Effect.succeed(result);
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
return {
|
|
149
|
+
bucket: bucket.name,
|
|
150
|
+
create: (file: UploadFile) => {
|
|
151
|
+
return Effect.gen(function* () {
|
|
152
|
+
yield* uploadRequestsTotal(Effect.succeed(1));
|
|
153
|
+
yield* activeUploadsGauge(Effect.succeed(1));
|
|
154
|
+
yield* fileSizeHistogram(Effect.succeed(file.size || 0));
|
|
155
|
+
|
|
156
|
+
if (!file.id) {
|
|
157
|
+
yield* uploadErrorsTotal(Effect.succeed(1));
|
|
158
|
+
return yield* Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
const gcs_file = bucket.file(file.id);
|
|
162
|
+
|
|
163
|
+
file.storage = {
|
|
164
|
+
id: file.storage.id,
|
|
165
|
+
type: file.storage.type,
|
|
166
|
+
path: file.id,
|
|
167
|
+
bucket: bucket.name,
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
console.log("file", gcs_file.id);
|
|
171
|
+
|
|
172
|
+
const options: CreateWriteStreamOptions = {
|
|
173
|
+
metadata: {
|
|
174
|
+
metadata: {
|
|
175
|
+
...stringifyUploadKeys(file),
|
|
176
|
+
},
|
|
177
|
+
},
|
|
178
|
+
};
|
|
179
|
+
if (file.metadata?.contentType) {
|
|
180
|
+
options.contentType = file.metadata.contentType.toString();
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
return yield* Effect.tryPromise({
|
|
184
|
+
try: () => {
|
|
185
|
+
console.log("creating file", gcs_file.id);
|
|
186
|
+
return new Promise<UploadFile>((resolve, reject) => {
|
|
187
|
+
const fake_stream = new PassThrough();
|
|
188
|
+
fake_stream.end();
|
|
189
|
+
fake_stream
|
|
190
|
+
.pipe(gcs_file.createWriteStream(options))
|
|
191
|
+
.on("error", reject)
|
|
192
|
+
.on("finish", () => {
|
|
193
|
+
resolve(file);
|
|
194
|
+
});
|
|
195
|
+
});
|
|
196
|
+
},
|
|
197
|
+
catch: (error) => {
|
|
198
|
+
console.error("error creating file", error);
|
|
199
|
+
Effect.runSync(
|
|
200
|
+
trackGCSError("create", error, {
|
|
201
|
+
upload_id: file.id,
|
|
202
|
+
bucket: bucket.name,
|
|
203
|
+
}),
|
|
204
|
+
);
|
|
205
|
+
return UploadistaError.fromCode("FILE_WRITE_ERROR", {
|
|
206
|
+
cause: error,
|
|
207
|
+
});
|
|
208
|
+
},
|
|
209
|
+
});
|
|
210
|
+
});
|
|
211
|
+
},
|
|
212
|
+
read: (file_id: string) => {
|
|
213
|
+
return Effect.tryPromise({
|
|
214
|
+
try: async () => {
|
|
215
|
+
const [buffer] = await bucket.file(file_id).download();
|
|
216
|
+
return new Uint8Array(buffer);
|
|
217
|
+
},
|
|
218
|
+
catch: (error) => {
|
|
219
|
+
Effect.runSync(
|
|
220
|
+
trackGCSError("read", error, {
|
|
221
|
+
upload_id: file_id,
|
|
222
|
+
bucket: bucket.name,
|
|
223
|
+
}),
|
|
224
|
+
);
|
|
225
|
+
if (
|
|
226
|
+
error &&
|
|
227
|
+
typeof error === "object" &&
|
|
228
|
+
"code" in error &&
|
|
229
|
+
error.code === 404
|
|
230
|
+
) {
|
|
231
|
+
return UploadistaError.fromCode("FILE_NOT_FOUND");
|
|
232
|
+
}
|
|
233
|
+
return UploadistaError.fromCode("FILE_READ_ERROR", {
|
|
234
|
+
cause: error,
|
|
235
|
+
});
|
|
236
|
+
},
|
|
237
|
+
});
|
|
238
|
+
},
|
|
239
|
+
remove: (file_id: string) => {
|
|
240
|
+
return Effect.gen(function* () {
|
|
241
|
+
try {
|
|
242
|
+
yield* Effect.promise(() => bucket.file(file_id).delete());
|
|
243
|
+
yield* activeUploadsGauge(Effect.succeed(-1));
|
|
244
|
+
} catch (error) {
|
|
245
|
+
Effect.runSync(
|
|
246
|
+
trackGCSError("remove", error, {
|
|
247
|
+
upload_id: file_id,
|
|
248
|
+
bucket: bucket.name,
|
|
249
|
+
}),
|
|
250
|
+
);
|
|
251
|
+
throw error;
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
},
|
|
255
|
+
/**
|
|
256
|
+
* Get the file metatata from the object in GCS, then upload a new version
|
|
257
|
+
* passing through the metadata to the new version.
|
|
258
|
+
*/
|
|
259
|
+
write: (
|
|
260
|
+
options: DataStoreWriteOptions,
|
|
261
|
+
dependencies: {
|
|
262
|
+
onProgress?: (chunkSize: number) => void;
|
|
263
|
+
},
|
|
264
|
+
) => {
|
|
265
|
+
return withUploadMetrics(
|
|
266
|
+
options.file_id,
|
|
267
|
+
withTimingMetrics(
|
|
268
|
+
uploadDurationHistogram,
|
|
269
|
+
Effect.gen(function* () {
|
|
270
|
+
const startTime = Date.now();
|
|
271
|
+
const { file_id, offset, stream: effectStream } = options;
|
|
272
|
+
console.log("write", file_id, offset);
|
|
273
|
+
const { onProgress } = dependencies;
|
|
274
|
+
|
|
275
|
+
// GCS Doesn't persist metadata within versions,
|
|
276
|
+
// get that metadata first
|
|
277
|
+
const upload = yield* getUpload(bucket, file_id, kvStore);
|
|
278
|
+
console.log("upload", upload);
|
|
279
|
+
|
|
280
|
+
return yield* Effect.promise(
|
|
281
|
+
() =>
|
|
282
|
+
new Promise<number>((resolve, reject) => {
|
|
283
|
+
const file = bucket.file(file_id);
|
|
284
|
+
const destination =
|
|
285
|
+
upload.offset === 0
|
|
286
|
+
? file
|
|
287
|
+
: bucket.file(`${file_id}_patch`);
|
|
288
|
+
|
|
289
|
+
upload.offset = offset;
|
|
290
|
+
|
|
291
|
+
const gcsOptions = {
|
|
292
|
+
metadata: {
|
|
293
|
+
metadata: {
|
|
294
|
+
...stringifyUploadKeys(upload),
|
|
295
|
+
},
|
|
296
|
+
},
|
|
297
|
+
};
|
|
298
|
+
const write_stream =
|
|
299
|
+
destination.createWriteStream(gcsOptions);
|
|
300
|
+
if (!write_stream) {
|
|
301
|
+
Effect.runSync(uploadErrorsTotal(Effect.succeed(1)));
|
|
302
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
303
|
+
return;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
let bytes_received = upload.offset;
|
|
307
|
+
|
|
308
|
+
// Convert Effect Stream to ReadableStream
|
|
309
|
+
const readableStream = Stream.toReadableStream(effectStream);
|
|
310
|
+
|
|
311
|
+
const transform = new Transform({
|
|
312
|
+
transform(
|
|
313
|
+
chunk: Buffer,
|
|
314
|
+
_: string,
|
|
315
|
+
callback: (error?: Error | null, data?: Buffer) => void,
|
|
316
|
+
) {
|
|
317
|
+
bytes_received += chunk.length;
|
|
318
|
+
onProgress?.(bytes_received);
|
|
319
|
+
callback(null, chunk);
|
|
320
|
+
},
|
|
321
|
+
});
|
|
322
|
+
|
|
323
|
+
const nodeReadable = Readable.fromWeb(readableStream);
|
|
324
|
+
|
|
325
|
+
pipeline(
|
|
326
|
+
nodeReadable,
|
|
327
|
+
transform,
|
|
328
|
+
write_stream,
|
|
329
|
+
async (e: Error | null) => {
|
|
330
|
+
if (e) {
|
|
331
|
+
console.error("error writing file", e);
|
|
332
|
+
Effect.runSync(
|
|
333
|
+
trackGCSError("write", e, {
|
|
334
|
+
upload_id: file_id,
|
|
335
|
+
bucket: bucket.name,
|
|
336
|
+
offset,
|
|
337
|
+
}),
|
|
338
|
+
);
|
|
339
|
+
try {
|
|
340
|
+
await destination.delete({ ignoreNotFound: true });
|
|
341
|
+
} finally {
|
|
342
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
343
|
+
}
|
|
344
|
+
} else {
|
|
345
|
+
try {
|
|
346
|
+
if (file !== destination) {
|
|
347
|
+
await bucket.combine([file, destination], file);
|
|
348
|
+
await Promise.all([
|
|
349
|
+
file.setMetadata(gcsOptions.metadata),
|
|
350
|
+
destination.delete({ ignoreNotFound: true }),
|
|
351
|
+
]);
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
// Log completion
|
|
355
|
+
Effect.runSync(
|
|
356
|
+
logGCSUploadCompletion(file_id, {
|
|
357
|
+
fileSize: upload.size || 0,
|
|
358
|
+
totalDurationMs: Date.now() - startTime,
|
|
359
|
+
partsCount: 1,
|
|
360
|
+
averagePartSize: upload.size,
|
|
361
|
+
throughputBps:
|
|
362
|
+
(upload.size || 0) / (Date.now() - startTime),
|
|
363
|
+
retryCount: 0,
|
|
364
|
+
}),
|
|
365
|
+
);
|
|
366
|
+
Effect.runSync(uploadSuccessTotal(Effect.succeed(1)));
|
|
367
|
+
Effect.runSync(
|
|
368
|
+
activeUploadsGauge(Effect.succeed(-1)),
|
|
369
|
+
);
|
|
370
|
+
|
|
371
|
+
resolve(bytes_received);
|
|
372
|
+
} catch (error) {
|
|
373
|
+
console.error(error);
|
|
374
|
+
Effect.runSync(
|
|
375
|
+
trackGCSError("write", error, {
|
|
376
|
+
upload_id: file_id,
|
|
377
|
+
bucket: bucket.name,
|
|
378
|
+
operation: "combine",
|
|
379
|
+
}),
|
|
380
|
+
);
|
|
381
|
+
reject(UploadistaError.fromCode("FILE_WRITE_ERROR"));
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
},
|
|
385
|
+
);
|
|
386
|
+
}),
|
|
387
|
+
);
|
|
388
|
+
}),
|
|
389
|
+
),
|
|
390
|
+
);
|
|
391
|
+
},
|
|
392
|
+
getCapabilities,
|
|
393
|
+
validateUploadStrategy,
|
|
394
|
+
};
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
export const GCSStoreLayer = (options: Omit<GCSStoreOptions, "kvStore">) =>
|
|
398
|
+
Layer.effect(UploadFileDataStore, createGCSStore(options));
|
package/src/index.ts
ADDED