@valentinkolb/filegate 2.3.4 → 2.3.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/handlers/files.ts +3 -1
- package/src/handlers/upload.ts +20 -19
package/package.json
CHANGED
package/src/handlers/files.ts
CHANGED
|
@@ -196,7 +196,9 @@ app.get(
|
|
|
196
196
|
if (entry.isDirectory()) {
|
|
197
197
|
await addDirectoryToArchive(fullPath, basePath);
|
|
198
198
|
} else if (entry.isFile()) {
|
|
199
|
-
|
|
199
|
+
// Read file content as Blob - Bun.file() is lazy and doesn't work reliably with Archive
|
|
200
|
+
const fileContent = await Bun.file(fullPath).arrayBuffer();
|
|
201
|
+
files[archivePath] = new Blob([fileContent]);
|
|
200
202
|
}
|
|
201
203
|
}
|
|
202
204
|
};
|
package/src/handlers/upload.ts
CHANGED
|
@@ -272,32 +272,33 @@ app.post(
|
|
|
272
272
|
return c.json({ error: `chunk index ${chunkIndex} exceeds total ${meta.totalChunks}` }, 400);
|
|
273
273
|
}
|
|
274
274
|
|
|
275
|
-
|
|
276
|
-
|
|
275
|
+
// Read body as ArrayBuffer (more reliable than streaming)
|
|
276
|
+
let bodyBuffer: ArrayBuffer;
|
|
277
|
+
try {
|
|
278
|
+
bodyBuffer = await c.req.arrayBuffer();
|
|
279
|
+
} catch {
|
|
280
|
+
return c.json({ error: "failed to read request body" }, 400);
|
|
281
|
+
}
|
|
277
282
|
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
283
|
+
if (bodyBuffer.byteLength === 0) {
|
|
284
|
+
return c.json({ error: "missing body" }, 400);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
if (bodyBuffer.byteLength > config.maxChunkBytes) {
|
|
288
|
+
return c.json({ error: `chunk size exceeds maximum (${config.maxChunkBytes / 1024 / 1024}MB)` }, 413);
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const bodyData = new Uint8Array(bodyBuffer);
|
|
281
292
|
const hasher = new Bun.CryptoHasher("sha256");
|
|
293
|
+
hasher.update(bodyData);
|
|
282
294
|
|
|
295
|
+
// Write chunk to temporary file
|
|
296
|
+
const tempChunkPath = chunkPath(uploadId, chunkIndex) + ".tmp";
|
|
283
297
|
await mkdir(chunksDir(uploadId), { recursive: true });
|
|
284
|
-
const tempFile = Bun.file(tempChunkPath);
|
|
285
|
-
const writer = tempFile.writer();
|
|
286
298
|
|
|
287
299
|
try {
|
|
288
|
-
|
|
289
|
-
chunkSize += chunk.length;
|
|
290
|
-
if (chunkSize > config.maxChunkBytes) {
|
|
291
|
-
writer.end();
|
|
292
|
-
await rm(tempChunkPath).catch(() => {});
|
|
293
|
-
return c.json({ error: `chunk size exceeds maximum (${config.maxChunkBytes / 1024 / 1024}MB)` }, 413);
|
|
294
|
-
}
|
|
295
|
-
hasher.update(chunk);
|
|
296
|
-
writer.write(chunk);
|
|
297
|
-
}
|
|
298
|
-
await writer.end();
|
|
300
|
+
await Bun.write(tempChunkPath, bodyData);
|
|
299
301
|
} catch (e) {
|
|
300
|
-
writer.end();
|
|
301
302
|
await rm(tempChunkPath).catch(() => {});
|
|
302
303
|
throw e;
|
|
303
304
|
}
|