@muhgholy/next-drive 3.2.3 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -103,16 +103,6 @@ import { driveAPIHandler } from "@muhgholy/next-drive/server";
103
103
  import type { NextApiRequest, NextApiResponse } from "next";
104
104
 
105
105
  export default async function handler(req: NextApiRequest, res: NextApiResponse) {
106
- // Parse JSON body manually (body parser is disabled)
107
- if (req.headers["content-type"]?.includes("application/json")) {
108
- const chunks: Buffer[] = [];
109
- for await (const chunk of req) chunks.push(chunk);
110
- const buffer = Buffer.concat(chunks);
111
- req.body = buffer.length > 0 ? JSON.parse(buffer.toString()) : {};
112
- } else {
113
- req.body = req.body || {};
114
- }
115
-
116
106
  return driveAPIHandler(req, res);
117
107
  }
118
108
 
@@ -281,6 +271,16 @@ const file = await driveUpload(
281
271
  enforce: true, // Skip quota check
282
272
  }
283
273
  );
274
+
275
+ // Upload from Buffer
276
+ const buffer = Buffer.from("file content");
277
+ const file = await driveUpload(
278
+ buffer,
279
+ { userId: "123" },
280
+ {
281
+ name: "document.txt",
282
+ }
283
+ );
284
284
  ```
285
285
 
286
286
  **Options:**
@@ -1,7 +1,7 @@
1
1
  import { __require } from './chunk-DGUM43GV.js';
2
2
  import formidable from 'formidable';
3
3
  import path3 from 'path';
4
- import fs2 from 'fs';
4
+ import fs4 from 'fs';
5
5
  import os2 from 'os';
6
6
  import mongoose2, { Schema, isValidObjectId } from 'mongoose';
7
7
  import crypto3 from 'crypto';
@@ -139,7 +139,7 @@ var validateMimeType = (mime, allowedTypes) => {
139
139
  };
140
140
  var computeFileHash = (filePath) => new Promise((resolve, reject) => {
141
141
  const hash = crypto3.createHash("sha256");
142
- const stream = fs2.createReadStream(filePath);
142
+ const stream = fs4.createReadStream(filePath);
143
143
  stream.on("data", (data) => hash.update(data));
144
144
  stream.on("end", () => resolve(hash.digest("hex")));
145
145
  stream.on("error", reject);
@@ -270,11 +270,11 @@ var LocalStorageProvider = {
270
270
  openStream: async (item, accountId) => {
271
271
  if (item.information.type !== "FILE") throw new Error("Cannot stream folder");
272
272
  const filePath = path3.join(getDriveConfig().storage.path, item.information.path);
273
- if (!fs2.existsSync(filePath)) {
273
+ if (!fs4.existsSync(filePath)) {
274
274
  throw new Error("File not found on disk");
275
275
  }
276
- const stat = fs2.statSync(filePath);
277
- const stream = fs2.createReadStream(filePath);
276
+ const stat = fs4.statSync(filePath);
277
+ const stream = fs4.createReadStream(filePath);
278
278
  return {
279
279
  stream,
280
280
  mime: item.information.mime,
@@ -286,11 +286,11 @@ var LocalStorageProvider = {
286
286
  const storagePath = getDriveConfig().storage.path;
287
287
  const originalPath = path3.join(storagePath, item.information.path);
288
288
  const thumbPath = path3.join(storagePath, "cache", "thumbnails", `${item._id.toString()}.webp`);
289
- if (!fs2.existsSync(originalPath)) throw new Error("Original file not found");
290
- if (fs2.existsSync(thumbPath)) {
291
- return fs2.createReadStream(thumbPath);
289
+ if (!fs4.existsSync(originalPath)) throw new Error("Original file not found");
290
+ if (fs4.existsSync(thumbPath)) {
291
+ return fs4.createReadStream(thumbPath);
292
292
  }
293
- if (!fs2.existsSync(path3.dirname(thumbPath))) fs2.mkdirSync(path3.dirname(thumbPath), { recursive: true });
293
+ if (!fs4.existsSync(path3.dirname(thumbPath))) fs4.mkdirSync(path3.dirname(thumbPath), { recursive: true });
294
294
  if (item.information.mime.startsWith("image/")) {
295
295
  await sharp(originalPath).resize(300, 300, { fit: "inside" }).toFormat("webp", { quality: 80 }).toFile(thumbPath);
296
296
  } else if (item.information.mime.startsWith("video/")) {
@@ -305,7 +305,7 @@ var LocalStorageProvider = {
305
305
  } else {
306
306
  throw new Error("Unsupported mime type for thumbnail");
307
307
  }
308
- return fs2.createReadStream(thumbPath);
308
+ return fs4.createReadStream(thumbPath);
309
309
  },
310
310
  createFolder: async (name, parentId, owner, accountId) => {
311
311
  const getNextOrderValue2 = async (owner2) => {
@@ -329,28 +329,28 @@ var LocalStorageProvider = {
329
329
  const storagePath = getDriveConfig().storage.path;
330
330
  const destPath = path3.join(storagePath, drive.information.path);
331
331
  const dirPath = path3.dirname(destPath);
332
- if (!fs2.existsSync(filePath)) {
332
+ if (!fs4.existsSync(filePath)) {
333
333
  throw new Error("Source file not found");
334
334
  }
335
- if (!fs2.existsSync(dirPath)) {
336
- fs2.mkdirSync(dirPath, { recursive: true });
335
+ if (!fs4.existsSync(dirPath)) {
336
+ fs4.mkdirSync(dirPath, { recursive: true });
337
337
  }
338
338
  try {
339
- fs2.renameSync(filePath, destPath);
339
+ fs4.renameSync(filePath, destPath);
340
340
  } catch (err) {
341
341
  if (err instanceof Error && "code" in err && err.code === "EXDEV") {
342
- fs2.copyFileSync(filePath, destPath);
343
- fs2.unlinkSync(filePath);
342
+ fs4.copyFileSync(filePath, destPath);
343
+ fs4.unlinkSync(filePath);
344
344
  } else {
345
345
  throw err;
346
346
  }
347
347
  }
348
- if (!fs2.existsSync(destPath)) {
348
+ if (!fs4.existsSync(destPath)) {
349
349
  throw new Error("Failed to write file to destination");
350
350
  }
351
- const destStats = fs2.statSync(destPath);
351
+ const destStats = fs4.statSync(destPath);
352
352
  if (destStats.size !== drive.information.sizeInBytes) {
353
- fs2.unlinkSync(destPath);
353
+ fs4.unlinkSync(destPath);
354
354
  throw new Error(`Destination file size mismatch: expected ${drive.information.sizeInBytes}, got ${destStats.size}`);
355
355
  }
356
356
  drive.status = "READY";
@@ -381,8 +381,8 @@ var LocalStorageProvider = {
381
381
  if (item.information.type === "FILE" && item.information.path) {
382
382
  const fullPath = path3.join(getDriveConfig().storage.path, item.information.path);
383
383
  const dirPath = path3.dirname(fullPath);
384
- if (fs2.existsSync(dirPath)) {
385
- fs2.rmSync(dirPath, { recursive: true, force: true });
384
+ if (fs4.existsSync(dirPath)) {
385
+ fs4.rmSync(dirPath, { recursive: true, force: true });
386
386
  }
387
387
  }
388
388
  }
@@ -691,7 +691,7 @@ var GoogleDriveProvider = {
691
691
  },
692
692
  media: {
693
693
  mimeType: drive.information.mime,
694
- body: fs2.createReadStream(filePath)
694
+ body: fs4.createReadStream(filePath)
695
695
  },
696
696
  fields: "id, name, mimeType, webViewLink, iconLink, thumbnailLink, size"
697
697
  });
@@ -885,7 +885,7 @@ var driveFilePath = async (file) => {
885
885
  const providerType = drive.provider?.type || "LOCAL";
886
886
  if (providerType === "LOCAL") {
887
887
  const filePath = path3.join(STORAGE_PATH, drive.information.path);
888
- if (!fs2.existsSync(filePath)) {
888
+ if (!fs4.existsSync(filePath)) {
889
889
  throw new Error(`Local file not found on disk: ${filePath}`);
890
890
  }
891
891
  return Object.freeze({
@@ -900,8 +900,8 @@ var driveFilePath = async (file) => {
900
900
  const libraryDir = path3.join(STORAGE_PATH, "library", "google");
901
901
  const fileName = `${drive._id}${path3.extname(drive.name)}`;
902
902
  const cachedFilePath = path3.join(libraryDir, fileName);
903
- if (fs2.existsSync(cachedFilePath)) {
904
- const stats = fs2.statSync(cachedFilePath);
903
+ if (fs4.existsSync(cachedFilePath)) {
904
+ const stats = fs4.statSync(cachedFilePath);
905
905
  if (stats.size === drive.information.sizeInBytes) {
906
906
  return Object.freeze({
907
907
  path: cachedFilePath,
@@ -911,15 +911,15 @@ var driveFilePath = async (file) => {
911
911
  provider: "GOOGLE"
912
912
  });
913
913
  }
914
- fs2.unlinkSync(cachedFilePath);
914
+ fs4.unlinkSync(cachedFilePath);
915
915
  }
916
916
  const accountId = drive.storageAccountId?.toString();
917
917
  const { stream } = await GoogleDriveProvider.openStream(drive, accountId);
918
- if (!fs2.existsSync(libraryDir)) {
919
- fs2.mkdirSync(libraryDir, { recursive: true });
918
+ if (!fs4.existsSync(libraryDir)) {
919
+ fs4.mkdirSync(libraryDir, { recursive: true });
920
920
  }
921
921
  const tempPath = `${cachedFilePath}.tmp`;
922
- const writeStream = fs2.createWriteStream(tempPath);
922
+ const writeStream = fs4.createWriteStream(tempPath);
923
923
  await new Promise((resolve, reject) => {
924
924
  stream.pipe(writeStream);
925
925
  writeStream.on("finish", resolve);
@@ -927,11 +927,11 @@ var driveFilePath = async (file) => {
927
927
  stream.on("error", reject);
928
928
  });
929
929
  try {
930
- fs2.renameSync(tempPath, cachedFilePath);
930
+ fs4.renameSync(tempPath, cachedFilePath);
931
931
  } catch (err) {
932
932
  if (err instanceof Error && "code" in err && err.code === "EXDEV") {
933
- fs2.copyFileSync(tempPath, cachedFilePath);
934
- fs2.unlinkSync(tempPath);
933
+ fs4.copyFileSync(tempPath, cachedFilePath);
934
+ fs4.unlinkSync(tempPath);
935
935
  } else {
936
936
  throw err;
937
937
  }
@@ -1022,19 +1022,28 @@ var driveUpload = async (source, key, options) => {
1022
1022
  let sourceFilePath;
1023
1023
  let fileSize;
1024
1024
  if (typeof source === "string") {
1025
- if (!fs2.existsSync(source)) {
1025
+ if (!fs4.existsSync(source)) {
1026
1026
  throw new Error(`Source file not found: ${source}`);
1027
1027
  }
1028
1028
  sourceFilePath = source;
1029
- const stats = fs2.statSync(source);
1029
+ const stats = fs4.statSync(source);
1030
1030
  fileSize = stats.size;
1031
+ } else if (Buffer.isBuffer(source)) {
1032
+ const tempDir = path3.join(os2.tmpdir(), "next-drive-uploads");
1033
+ if (!fs4.existsSync(tempDir)) {
1034
+ fs4.mkdirSync(tempDir, { recursive: true });
1035
+ }
1036
+ tempFilePath = path3.join(tempDir, `upload-${crypto3.randomUUID()}.tmp`);
1037
+ fs4.writeFileSync(tempFilePath, source);
1038
+ sourceFilePath = tempFilePath;
1039
+ fileSize = source.length;
1031
1040
  } else {
1032
1041
  const tempDir = path3.join(os2.tmpdir(), "next-drive-uploads");
1033
- if (!fs2.existsSync(tempDir)) {
1034
- fs2.mkdirSync(tempDir, { recursive: true });
1042
+ if (!fs4.existsSync(tempDir)) {
1043
+ fs4.mkdirSync(tempDir, { recursive: true });
1035
1044
  }
1036
1045
  tempFilePath = path3.join(tempDir, `upload-${crypto3.randomUUID()}.tmp`);
1037
- const writeStream = fs2.createWriteStream(tempFilePath);
1046
+ const writeStream = fs4.createWriteStream(tempFilePath);
1038
1047
  await new Promise((resolve, reject) => {
1039
1048
  source.pipe(writeStream);
1040
1049
  writeStream.on("finish", resolve);
@@ -1042,7 +1051,7 @@ var driveUpload = async (source, key, options) => {
1042
1051
  source.on("error", reject);
1043
1052
  });
1044
1053
  sourceFilePath = tempFilePath;
1045
- const stats = fs2.statSync(tempFilePath);
1054
+ const stats = fs4.statSync(tempFilePath);
1046
1055
  fileSize = stats.size;
1047
1056
  }
1048
1057
  try {
@@ -1104,8 +1113,8 @@ var driveUpload = async (source, key, options) => {
1104
1113
  throw err;
1105
1114
  }
1106
1115
  } finally {
1107
- if (tempFilePath && fs2.existsSync(tempFilePath)) {
1108
- fs2.rmSync(tempFilePath, { force: true });
1116
+ if (tempFilePath && fs4.existsSync(tempFilePath)) {
1117
+ fs4.rmSync(tempFilePath, { force: true });
1109
1118
  }
1110
1119
  }
1111
1120
  };
@@ -1371,7 +1380,7 @@ var driveAPIHandler = async (req, res) => {
1371
1380
  case "upload": {
1372
1381
  if (req.method !== "POST") return res.status(405).json({ status: 405, message: "Only POST allowed" });
1373
1382
  const systemTmpDir = path3.join(os2.tmpdir(), "next-drive-uploads");
1374
- if (!fs2.existsSync(systemTmpDir)) fs2.mkdirSync(systemTmpDir, { recursive: true });
1383
+ if (!fs4.existsSync(systemTmpDir)) fs4.mkdirSync(systemTmpDir, { recursive: true });
1375
1384
  const form = formidable({
1376
1385
  multiples: false,
1377
1386
  maxFileSize: config.security.maxUploadSizeInBytes * 2,
@@ -1386,7 +1395,7 @@ var driveAPIHandler = async (req, res) => {
1386
1395
  });
1387
1396
  const cleanupTempFiles = (files2) => {
1388
1397
  Object.values(files2).flat().forEach((file) => {
1389
- if (file && fs2.existsSync(file.filepath)) fs2.rmSync(file.filepath, { force: true });
1398
+ if (file && fs4.existsSync(file.filepath)) fs4.rmSync(file.filepath, { force: true });
1390
1399
  });
1391
1400
  };
1392
1401
  const getString = (f) => Array.isArray(f) ? f[0] : f || "";
@@ -1420,7 +1429,7 @@ var driveAPIHandler = async (req, res) => {
1420
1429
  }
1421
1430
  currentUploadId = crypto.randomUUID();
1422
1431
  const uploadDir = path3.join(tempBaseDir, currentUploadId);
1423
- fs2.mkdirSync(uploadDir, { recursive: true });
1432
+ fs4.mkdirSync(uploadDir, { recursive: true });
1424
1433
  const metadata = {
1425
1434
  owner,
1426
1435
  accountId,
@@ -1431,11 +1440,11 @@ var driveAPIHandler = async (req, res) => {
1431
1440
  mimeType: fileType,
1432
1441
  totalChunks
1433
1442
  };
1434
- fs2.writeFileSync(path3.join(uploadDir, "metadata.json"), JSON.stringify(metadata));
1443
+ fs4.writeFileSync(path3.join(uploadDir, "metadata.json"), JSON.stringify(metadata));
1435
1444
  }
1436
1445
  if (currentUploadId) {
1437
1446
  const uploadDir = path3.join(tempBaseDir, currentUploadId);
1438
- if (!fs2.existsSync(uploadDir)) {
1447
+ if (!fs4.existsSync(uploadDir)) {
1439
1448
  cleanupTempFiles(files);
1440
1449
  return res.status(404).json({ status: 404, message: "Upload session not found or expired" });
1441
1450
  }
@@ -1444,32 +1453,32 @@ var driveAPIHandler = async (req, res) => {
1444
1453
  if (!chunkFile) throw new Error("No chunk file received");
1445
1454
  const partPath = path3.join(uploadDir, `part_${chunkIndex}`);
1446
1455
  try {
1447
- fs2.renameSync(chunkFile.filepath, partPath);
1456
+ fs4.renameSync(chunkFile.filepath, partPath);
1448
1457
  } catch (err) {
1449
1458
  if (err instanceof Error && "code" in err && err.code === "EXDEV") {
1450
- fs2.copyFileSync(chunkFile.filepath, partPath);
1451
- fs2.unlinkSync(chunkFile.filepath);
1459
+ fs4.copyFileSync(chunkFile.filepath, partPath);
1460
+ fs4.unlinkSync(chunkFile.filepath);
1452
1461
  } else {
1453
1462
  throw err;
1454
1463
  }
1455
1464
  }
1456
- const uploadedParts = fs2.readdirSync(uploadDir).filter((f) => f.startsWith("part_"));
1465
+ const uploadedParts = fs4.readdirSync(uploadDir).filter((f) => f.startsWith("part_"));
1457
1466
  if (uploadedParts.length === totalChunks) {
1458
1467
  const metaPath = path3.join(uploadDir, "metadata.json");
1459
- const meta = JSON.parse(fs2.readFileSync(metaPath, "utf-8"));
1468
+ const meta = JSON.parse(fs4.readFileSync(metaPath, "utf-8"));
1460
1469
  const finalTempPath = path3.join(uploadDir, "final.bin");
1461
- const writeStream = fs2.createWriteStream(finalTempPath);
1470
+ const writeStream = fs4.createWriteStream(finalTempPath);
1462
1471
  await new Promise((resolve, reject) => {
1463
1472
  writeStream.on("open", () => resolve());
1464
1473
  writeStream.on("error", reject);
1465
1474
  });
1466
1475
  for (let i = 0; i < totalChunks; i++) {
1467
1476
  const pPath = path3.join(uploadDir, `part_${i}`);
1468
- if (!fs2.existsSync(pPath)) {
1477
+ if (!fs4.existsSync(pPath)) {
1469
1478
  writeStream.destroy();
1470
1479
  throw new Error(`Missing chunk part: ${i}`);
1471
1480
  }
1472
- const data = fs2.readFileSync(pPath);
1481
+ const data = fs4.readFileSync(pPath);
1473
1482
  writeStream.write(data);
1474
1483
  }
1475
1484
  await new Promise((resolve, reject) => {
@@ -1477,10 +1486,10 @@ var driveAPIHandler = async (req, res) => {
1477
1486
  writeStream.on("finish", resolve);
1478
1487
  writeStream.on("error", reject);
1479
1488
  });
1480
- if (!fs2.existsSync(finalTempPath)) {
1489
+ if (!fs4.existsSync(finalTempPath)) {
1481
1490
  throw new Error("Failed to create merged file");
1482
1491
  }
1483
- const finalStats = fs2.statSync(finalTempPath);
1492
+ const finalStats = fs4.statSync(finalTempPath);
1484
1493
  if (finalStats.size !== meta.fileSize) {
1485
1494
  throw new Error(`File size mismatch: expected ${meta.fileSize}, got ${finalStats.size}`);
1486
1495
  }
@@ -1506,7 +1515,7 @@ var driveAPIHandler = async (req, res) => {
1506
1515
  await drive.save();
1507
1516
  try {
1508
1517
  const item = await provider.uploadFile(drive, finalTempPath, meta.accountId);
1509
- fs2.rmSync(uploadDir, { recursive: true, force: true });
1518
+ fs4.rmSync(uploadDir, { recursive: true, force: true });
1510
1519
  const newQuota = await provider.getQuota(meta.owner, meta.accountId, information.storage.quotaInBytes);
1511
1520
  res.status(200).json({ status: 200, message: "Upload complete", data: { type: "UPLOAD_COMPLETE", driveId: String(drive._id), item }, statistic: { storage: newQuota } });
1512
1521
  } catch (err) {
@@ -1536,9 +1545,9 @@ var driveAPIHandler = async (req, res) => {
1536
1545
  if (!cancelData.success) return res.status(400).json({ status: 400, message: "Invalid ID" });
1537
1546
  const { id } = cancelData.data;
1538
1547
  const tempUploadDir = path3.join(os2.tmpdir(), "next-drive-uploads", id);
1539
- if (fs2.existsSync(tempUploadDir)) {
1548
+ if (fs4.existsSync(tempUploadDir)) {
1540
1549
  try {
1541
- fs2.rmSync(tempUploadDir, { recursive: true, force: true });
1550
+ fs4.rmSync(tempUploadDir, { recursive: true, force: true });
1542
1551
  } catch (e) {
1543
1552
  console.error("Failed to cleanup temp upload:", e);
1544
1553
  }
@@ -1717,5 +1726,5 @@ var driveAPIHandler = async (req, res) => {
1717
1726
  };
1718
1727
 
1719
1728
  export { driveAPIHandler, driveConfiguration, driveDelete, driveFilePath, driveFileSchemaZod, driveGetUrl, driveList, driveReadFile, driveUpload, getDriveConfig, getDriveInformation };
1720
- //# sourceMappingURL=chunk-WSOFHT7J.js.map
1721
- //# sourceMappingURL=chunk-WSOFHT7J.js.map
1729
+ //# sourceMappingURL=chunk-AID7MTCJ.js.map
1730
+ //# sourceMappingURL=chunk-AID7MTCJ.js.map