@mr-aftab-ahmad-khan/upflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/LICENSE +15 -0
- package/README.md +414 -0
- package/dist/index.cjs +706 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +200 -0
- package/dist/index.d.ts +200 -0
- package/dist/index.js +655 -0
- package/dist/index.js.map +1 -0
- package/package.json +71 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,655 @@
|
|
|
1
|
+
// src/upflow.ts
|
|
2
|
+
import { EventEmitter } from "events";
|
|
3
|
+
import { PassThrough, Readable as Readable3 } from "stream";
|
|
4
|
+
|
|
5
|
+
// src/multipart.ts
|
|
6
|
+
import { Readable } from "stream";
|
|
7
|
+
async function* parseMultipart(body, boundary) {
|
|
8
|
+
const buf = await collect(body);
|
|
9
|
+
const parts = splitParts(buf, boundary);
|
|
10
|
+
for (const p of parts) yield p;
|
|
11
|
+
}
|
|
12
|
+
async function collect(body) {
|
|
13
|
+
const chunks = [];
|
|
14
|
+
for await (const c of body) {
|
|
15
|
+
chunks.push(Buffer.isBuffer(c) ? c : Buffer.from(c));
|
|
16
|
+
}
|
|
17
|
+
return Buffer.concat(chunks);
|
|
18
|
+
}
|
|
19
|
+
function splitParts(buf, boundary) {
|
|
20
|
+
const out = [];
|
|
21
|
+
const delim = Buffer.from(`--${boundary}`);
|
|
22
|
+
const CRLF = Buffer.from("\r\n");
|
|
23
|
+
let cursor = buf.indexOf(delim);
|
|
24
|
+
if (cursor === -1) return out;
|
|
25
|
+
cursor += delim.length;
|
|
26
|
+
while (cursor < buf.length) {
|
|
27
|
+
if (buf.slice(cursor, cursor + 2).equals(Buffer.from("--"))) return out;
|
|
28
|
+
if (buf.slice(cursor, cursor + 2).equals(CRLF)) cursor += 2;
|
|
29
|
+
const headerEnd = buf.indexOf(Buffer.from("\r\n\r\n"), cursor);
|
|
30
|
+
if (headerEnd === -1) return out;
|
|
31
|
+
const headerStr = buf.slice(cursor, headerEnd).toString("utf8");
|
|
32
|
+
cursor = headerEnd + 4;
|
|
33
|
+
const headers = {};
|
|
34
|
+
for (const line of headerStr.split("\r\n")) {
|
|
35
|
+
const c = line.indexOf(":");
|
|
36
|
+
if (c === -1) continue;
|
|
37
|
+
headers[line.slice(0, c).toLowerCase().trim()] = line.slice(c + 1).trim();
|
|
38
|
+
}
|
|
39
|
+
const sep = Buffer.concat([CRLF, delim]);
|
|
40
|
+
const partEnd = buf.indexOf(sep, cursor);
|
|
41
|
+
if (partEnd === -1) return out;
|
|
42
|
+
const body = buf.slice(cursor, partEnd);
|
|
43
|
+
cursor = partEnd + sep.length;
|
|
44
|
+
const disposition = headers["content-disposition"] ?? "";
|
|
45
|
+
const nameMatch = /name="([^"]*)"/.exec(disposition);
|
|
46
|
+
const filenameMatch = /filename="([^"]*)"/.exec(disposition);
|
|
47
|
+
const fieldName = nameMatch ? nameMatch[1] ?? "" : "";
|
|
48
|
+
const filename = filenameMatch ? filenameMatch[1] ?? "" : "";
|
|
49
|
+
const mimeType = headers["content-type"] ?? "application/octet-stream";
|
|
50
|
+
if (filename) {
|
|
51
|
+
out.push({
|
|
52
|
+
type: "file",
|
|
53
|
+
fieldName,
|
|
54
|
+
filename,
|
|
55
|
+
mimeType,
|
|
56
|
+
stream: Readable.from(body)
|
|
57
|
+
});
|
|
58
|
+
} else {
|
|
59
|
+
out.push({ type: "field", fieldName, value: body.toString("utf8") });
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return out;
|
|
63
|
+
}
|
|
64
|
+
function getBoundary(contentType) {
|
|
65
|
+
if (!contentType) return void 0;
|
|
66
|
+
const m = /boundary="?([^";]+)"?/i.exec(contentType);
|
|
67
|
+
return m ? m[1] : void 0;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// src/errors.ts
|
|
71
|
+
var UploadError = class _UploadError extends Error {
|
|
72
|
+
status;
|
|
73
|
+
constructor(message, status = 400) {
|
|
74
|
+
super(message);
|
|
75
|
+
this.name = "UploadError";
|
|
76
|
+
this.status = status;
|
|
77
|
+
Object.setPrototypeOf(this, _UploadError.prototype);
|
|
78
|
+
}
|
|
79
|
+
};
|
|
80
|
+
var FileTooLargeError = class _FileTooLargeError extends UploadError {
|
|
81
|
+
constructor(limit) {
|
|
82
|
+
super(`File exceeds maximum size of ${limit} bytes`, 413);
|
|
83
|
+
this.limit = limit;
|
|
84
|
+
this.name = "FileTooLargeError";
|
|
85
|
+
Object.setPrototypeOf(this, _FileTooLargeError.prototype);
|
|
86
|
+
}
|
|
87
|
+
limit;
|
|
88
|
+
};
|
|
89
|
+
var InvalidMimeTypeError = class _InvalidMimeTypeError extends UploadError {
|
|
90
|
+
constructor(mime, allowed) {
|
|
91
|
+
super(`Mime type "${mime}" is not in allowed list: ${allowed.join(", ")}`, 415);
|
|
92
|
+
this.mime = mime;
|
|
93
|
+
this.allowed = allowed;
|
|
94
|
+
this.name = "InvalidMimeTypeError";
|
|
95
|
+
Object.setPrototypeOf(this, _InvalidMimeTypeError.prototype);
|
|
96
|
+
}
|
|
97
|
+
mime;
|
|
98
|
+
allowed;
|
|
99
|
+
};
|
|
100
|
+
var StorageError = class _StorageError extends UploadError {
|
|
101
|
+
constructor(message, cause) {
|
|
102
|
+
super(message, 500);
|
|
103
|
+
this.cause = cause;
|
|
104
|
+
this.name = "StorageError";
|
|
105
|
+
Object.setPrototypeOf(this, _StorageError.prototype);
|
|
106
|
+
}
|
|
107
|
+
cause;
|
|
108
|
+
};
|
|
109
|
+
var UploadAbortedError = class _UploadAbortedError extends UploadError {
|
|
110
|
+
constructor() {
|
|
111
|
+
super("Upload was aborted by the client", 499);
|
|
112
|
+
this.name = "UploadAbortedError";
|
|
113
|
+
Object.setPrototypeOf(this, _UploadAbortedError.prototype);
|
|
114
|
+
}
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
// src/util.ts
|
|
118
|
+
import { Readable as Readable2 } from "stream";
|
|
119
|
+
import { randomUUID } from "crypto";
|
|
120
|
+
var RESERVED_WIN = /[<>:"/\\|?*\x00-\x1f]/g;
|
|
121
|
+
function sanitizeFilename(name) {
|
|
122
|
+
let safe = name.replace(/\\/g, "/").split("/").pop() ?? "file";
|
|
123
|
+
safe = safe.replace(RESERVED_WIN, "_");
|
|
124
|
+
safe = safe.replace(/\.\./g, "_");
|
|
125
|
+
safe = safe.replace(/\s+/g, "_");
|
|
126
|
+
safe = safe.replace(/^[._-]+/, "");
|
|
127
|
+
if (!safe || safe === ".") safe = "file";
|
|
128
|
+
return safe.slice(0, 200);
|
|
129
|
+
}
|
|
130
|
+
function appendUuidSuffix(name) {
|
|
131
|
+
const dot = name.lastIndexOf(".");
|
|
132
|
+
const stem = dot > 0 ? name.slice(0, dot) : name;
|
|
133
|
+
const ext = dot > 0 ? name.slice(dot) : "";
|
|
134
|
+
return `${stem}-${randomUUID().slice(0, 8)}${ext}`;
|
|
135
|
+
}
|
|
136
|
+
var MAGIC_TABLE = [
|
|
137
|
+
{ bytes: [255, 216, 255], mime: "image/jpeg" },
|
|
138
|
+
{ bytes: [137, 80, 78, 71], mime: "image/png" },
|
|
139
|
+
{ bytes: [71, 73, 70, 56], mime: "image/gif" },
|
|
140
|
+
{ bytes: [66, 77], mime: "image/bmp" },
|
|
141
|
+
{ bytes: [82, 73, 70, 70], mime: "image/webp" },
|
|
142
|
+
// also AVI/WAV — narrowed below
|
|
143
|
+
{ bytes: [102, 116, 121, 112], mime: "video/mp4", offset: 4 },
|
|
144
|
+
{ bytes: [37, 80, 68, 70], mime: "application/pdf" },
|
|
145
|
+
{ bytes: [80, 75, 3, 4], mime: "application/zip" },
|
|
146
|
+
{ bytes: [31, 139], mime: "application/gzip" }
|
|
147
|
+
];
|
|
148
|
+
function detectMimeFromBytes(buf) {
|
|
149
|
+
for (const sig of MAGIC_TABLE) {
|
|
150
|
+
const off = sig.offset ?? 0;
|
|
151
|
+
if (buf.length < off + sig.bytes.length) continue;
|
|
152
|
+
let ok = true;
|
|
153
|
+
for (let i = 0; i < sig.bytes.length; i++) {
|
|
154
|
+
if (buf[off + i] !== sig.bytes[i]) {
|
|
155
|
+
ok = false;
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
if (ok) {
|
|
160
|
+
if (sig.mime === "image/webp" && buf.length >= 12) {
|
|
161
|
+
const tag = buf.slice(8, 12).toString("ascii");
|
|
162
|
+
if (tag === "WEBP") return "image/webp";
|
|
163
|
+
if (tag === "WAVE") return "audio/wav";
|
|
164
|
+
if (tag.startsWith("AVI")) return "video/x-msvideo";
|
|
165
|
+
return void 0;
|
|
166
|
+
}
|
|
167
|
+
return sig.mime;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
return void 0;
|
|
171
|
+
}
|
|
172
|
+
function mimeMatchesAllowed(mime, allowed) {
|
|
173
|
+
if (!allowed || allowed.length === 0) return true;
|
|
174
|
+
for (const pattern of allowed) {
|
|
175
|
+
if (pattern === mime) return true;
|
|
176
|
+
if (pattern.endsWith("/*") && mime.startsWith(pattern.slice(0, -1))) return true;
|
|
177
|
+
if (pattern === "*/*") return true;
|
|
178
|
+
}
|
|
179
|
+
return false;
|
|
180
|
+
}
|
|
181
|
+
function webStreamToNode(stream) {
|
|
182
|
+
const reader = stream.getReader();
|
|
183
|
+
return new Readable2({
|
|
184
|
+
async read() {
|
|
185
|
+
try {
|
|
186
|
+
const { done, value } = await reader.read();
|
|
187
|
+
if (done) this.push(null);
|
|
188
|
+
else this.push(Buffer.from(value));
|
|
189
|
+
} catch (err) {
|
|
190
|
+
this.destroy(err);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
function applyTemplate(template, vars) {
|
|
196
|
+
return template.replace(/\{([a-zA-Z0-9_-]+)\}/g, (_m, key) => vars[key] ?? "");
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
// src/upflow.ts
|
|
200
|
+
var Upflow = class extends EventEmitter {
|
|
201
|
+
options;
|
|
202
|
+
constructor(options) {
|
|
203
|
+
super();
|
|
204
|
+
this.options = options;
|
|
205
|
+
}
|
|
206
|
+
presign(opts) {
|
|
207
|
+
if (!this.options.storage.presign) {
|
|
208
|
+
throw new UploadError("Storage adapter does not support presigned uploads", 501);
|
|
209
|
+
}
|
|
210
|
+
return this.options.storage.presign(opts);
|
|
211
|
+
}
|
|
212
|
+
// -------------------- Express middleware --------------------
|
|
213
|
+
single(fieldName) {
|
|
214
|
+
return async (req, res, next) => {
|
|
215
|
+
try {
|
|
216
|
+
const files = await this.handleNodeRequest(req, { single: fieldName });
|
|
217
|
+
req.file = files[0];
|
|
218
|
+
next();
|
|
219
|
+
} catch (err) {
|
|
220
|
+
next(err);
|
|
221
|
+
}
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
array(fieldName, maxCount) {
|
|
225
|
+
return async (req, _res, next) => {
|
|
226
|
+
try {
|
|
227
|
+
const files = await this.handleNodeRequest(req, { array: fieldName, maxCount });
|
|
228
|
+
req.files = files;
|
|
229
|
+
next();
|
|
230
|
+
} catch (err) {
|
|
231
|
+
next(err);
|
|
232
|
+
}
|
|
233
|
+
};
|
|
234
|
+
}
|
|
235
|
+
// -------------------- Generic fetch Request handler --------------------
|
|
236
|
+
handler() {
|
|
237
|
+
return async (request) => {
|
|
238
|
+
const contentType = request.headers.get("content-type") ?? "";
|
|
239
|
+
const boundary = getBoundary(contentType);
|
|
240
|
+
if (!boundary) throw new UploadError("Missing multipart boundary");
|
|
241
|
+
const body = request.body;
|
|
242
|
+
if (!body) throw new UploadError("Missing request body");
|
|
243
|
+
const nodeStream = webStreamToNode(body);
|
|
244
|
+
const files = await this.consumeMultipart(nodeStream, boundary, request);
|
|
245
|
+
return { files };
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
// -------------------- Hono middleware --------------------
|
|
249
|
+
hono() {
|
|
250
|
+
return async (c, next) => {
|
|
251
|
+
const result = await this.handler()(c.req.raw);
|
|
252
|
+
c.set("uploadedFiles", result.files);
|
|
253
|
+
await next();
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
// -------------------- Fastify plugin --------------------
|
|
257
|
+
fastify() {
|
|
258
|
+
return async (instance) => {
|
|
259
|
+
instance.addContentTypeParser(
|
|
260
|
+
"multipart/form-data",
|
|
261
|
+
{ parseAs: "buffer" },
|
|
262
|
+
async (req, payload) => {
|
|
263
|
+
const boundary = getBoundary(req.headers["content-type"]);
|
|
264
|
+
if (!boundary) throw new UploadError("Missing multipart boundary");
|
|
265
|
+
const stream = Readable3.from(payload);
|
|
266
|
+
return { files: await this.consumeMultipart(stream, boundary, req) };
|
|
267
|
+
}
|
|
268
|
+
);
|
|
269
|
+
};
|
|
270
|
+
}
|
|
271
|
+
// -------------------- Next.js App Router --------------------
|
|
272
|
+
nextjs() {
|
|
273
|
+
return async (request) => {
|
|
274
|
+
const result = await this.handler()(request);
|
|
275
|
+
return new Response(JSON.stringify({ files: result.files }), {
|
|
276
|
+
status: 200,
|
|
277
|
+
headers: { "content-type": "application/json" }
|
|
278
|
+
});
|
|
279
|
+
};
|
|
280
|
+
}
|
|
281
|
+
// -------------------- Core --------------------
|
|
282
|
+
async handleNodeRequest(req, opts) {
|
|
283
|
+
const contentType = req.headers["content-type"] ?? "";
|
|
284
|
+
const boundary = getBoundary(contentType);
|
|
285
|
+
if (!boundary) throw new UploadError("Missing multipart boundary");
|
|
286
|
+
const files = await this.consumeMultipart(req, boundary, req);
|
|
287
|
+
const allowed = opts.single ?? opts.array;
|
|
288
|
+
const matched = files.filter((f) => f.fieldName === allowed);
|
|
289
|
+
if (opts.single) {
|
|
290
|
+
if (matched.length === 0) throw new UploadError(`Expected file under field "${allowed}"`);
|
|
291
|
+
return [matched[0]];
|
|
292
|
+
}
|
|
293
|
+
if (opts.maxCount && matched.length > opts.maxCount) {
|
|
294
|
+
throw new UploadError(
|
|
295
|
+
`Too many files for field "${allowed}" (got ${matched.length}, max ${opts.maxCount})`
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
return matched;
|
|
299
|
+
}
|
|
300
|
+
async consumeMultipart(body, boundary, req) {
|
|
301
|
+
const limits = this.options.limits ?? {};
|
|
302
|
+
const files = [];
|
|
303
|
+
let fileCount = 0;
|
|
304
|
+
for await (const part of parseMultipart(body, boundary)) {
|
|
305
|
+
if (part.type !== "file") continue;
|
|
306
|
+
fileCount += 1;
|
|
307
|
+
if (limits.files && fileCount > limits.files) {
|
|
308
|
+
throw new UploadError(`Too many files (max ${limits.files})`, 413);
|
|
309
|
+
}
|
|
310
|
+
const safeOriginal = sanitizeFilename(part.filename);
|
|
311
|
+
const filename = appendUuidSuffix(safeOriginal);
|
|
312
|
+
await this.options.hooks?.onUploadStart?.({ filename, mimeType: part.mimeType }, req);
|
|
313
|
+
const validated = await this.validateAndPipe(part, filename, limits);
|
|
314
|
+
const stored = await this.options.storage.upload({
|
|
315
|
+
stream: validated.stream,
|
|
316
|
+
filename,
|
|
317
|
+
mimeType: validated.mimeType
|
|
318
|
+
});
|
|
319
|
+
const result = {
|
|
320
|
+
fieldName: part.fieldName,
|
|
321
|
+
filename,
|
|
322
|
+
originalName: part.filename,
|
|
323
|
+
mimeType: validated.mimeType,
|
|
324
|
+
size: stored.size,
|
|
325
|
+
storageKey: stored.key,
|
|
326
|
+
...stored.url !== void 0 ? { url: stored.url } : {},
|
|
327
|
+
metadata: {}
|
|
328
|
+
};
|
|
329
|
+
await this.options.hooks?.onUploadComplete?.(result, req);
|
|
330
|
+
files.push(result);
|
|
331
|
+
}
|
|
332
|
+
return files;
|
|
333
|
+
}
|
|
334
|
+
async validateAndPipe(part, filename, limits) {
|
|
335
|
+
const out = new PassThrough();
|
|
336
|
+
const maxSize = limits.fileSize ?? Number.MAX_SAFE_INTEGER;
|
|
337
|
+
let received = 0;
|
|
338
|
+
let detected = part.mimeType;
|
|
339
|
+
let head = Buffer.alloc(0);
|
|
340
|
+
let validated = false;
|
|
341
|
+
part.stream.on("error", (err) => out.destroy(err));
|
|
342
|
+
part.stream.on("data", (chunk) => {
|
|
343
|
+
received += chunk.length;
|
|
344
|
+
if (received > maxSize) {
|
|
345
|
+
const err = new FileTooLargeError(maxSize);
|
|
346
|
+
out.destroy(err);
|
|
347
|
+
part.stream.destroy(err);
|
|
348
|
+
return;
|
|
349
|
+
}
|
|
350
|
+
if (!validated) {
|
|
351
|
+
head = Buffer.concat([head, chunk]);
|
|
352
|
+
if (head.length >= 16) {
|
|
353
|
+
const sniffed = detectMimeFromBytes(head);
|
|
354
|
+
if (sniffed) detected = sniffed;
|
|
355
|
+
if (!mimeMatchesAllowed(detected, limits.allowedMimeTypes)) {
|
|
356
|
+
const err = new InvalidMimeTypeError(detected, limits.allowedMimeTypes ?? []);
|
|
357
|
+
out.destroy(err);
|
|
358
|
+
part.stream.destroy(err);
|
|
359
|
+
return;
|
|
360
|
+
}
|
|
361
|
+
validated = true;
|
|
362
|
+
out.write(head);
|
|
363
|
+
head = Buffer.alloc(0);
|
|
364
|
+
return;
|
|
365
|
+
}
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
out.write(chunk);
|
|
369
|
+
this.emit("progress", {
|
|
370
|
+
filename,
|
|
371
|
+
bytesReceived: received,
|
|
372
|
+
bytesTotal: null,
|
|
373
|
+
percent: null
|
|
374
|
+
});
|
|
375
|
+
});
|
|
376
|
+
part.stream.on("end", () => {
|
|
377
|
+
if (!validated && head.length > 0) {
|
|
378
|
+
const sniffed = detectMimeFromBytes(head) ?? detected;
|
|
379
|
+
if (!mimeMatchesAllowed(sniffed, limits.allowedMimeTypes)) {
|
|
380
|
+
out.destroy(new InvalidMimeTypeError(sniffed, limits.allowedMimeTypes ?? []));
|
|
381
|
+
return;
|
|
382
|
+
}
|
|
383
|
+
detected = sniffed;
|
|
384
|
+
out.write(head);
|
|
385
|
+
}
|
|
386
|
+
out.end();
|
|
387
|
+
});
|
|
388
|
+
part.stream.on("close", () => {
|
|
389
|
+
if (!part.stream.readableEnded) out.destroy(new UploadAbortedError());
|
|
390
|
+
});
|
|
391
|
+
return { stream: out, mimeType: detected };
|
|
392
|
+
}
|
|
393
|
+
};
|
|
394
|
+
function upflow(options) {
|
|
395
|
+
return new Upflow(options);
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
// src/storage/disk.ts
|
|
399
|
+
import { createWriteStream, existsSync, mkdirSync } from "fs";
|
|
400
|
+
import { dirname, join } from "path";
|
|
401
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
402
|
+
import { pipeline } from "stream/promises";
|
|
403
|
+
var DiskStorage = class {
|
|
404
|
+
root;
|
|
405
|
+
pathTemplate;
|
|
406
|
+
publicUrlPrefix;
|
|
407
|
+
constructor(opts) {
|
|
408
|
+
this.root = opts.root;
|
|
409
|
+
this.pathTemplate = opts.pathTemplate ?? "{date}/{uuid}-{filename}";
|
|
410
|
+
if (opts.publicUrlPrefix !== void 0) this.publicUrlPrefix = opts.publicUrlPrefix;
|
|
411
|
+
}
|
|
412
|
+
async upload(input) {
|
|
413
|
+
const safe = sanitizeFilename(input.filename);
|
|
414
|
+
const date = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
415
|
+
const dot = safe.lastIndexOf(".");
|
|
416
|
+
const ext = dot > 0 ? safe.slice(dot + 1) : "";
|
|
417
|
+
const key = applyTemplate(this.pathTemplate, {
|
|
418
|
+
date,
|
|
419
|
+
uuid: randomUUID2(),
|
|
420
|
+
filename: safe,
|
|
421
|
+
ext
|
|
422
|
+
});
|
|
423
|
+
const fullPath = join(this.root, key);
|
|
424
|
+
if (!existsSync(dirname(fullPath))) mkdirSync(dirname(fullPath), { recursive: true });
|
|
425
|
+
let size = 0;
|
|
426
|
+
input.stream.on("data", (c) => {
|
|
427
|
+
size += c.length;
|
|
428
|
+
});
|
|
429
|
+
await pipeline(input.stream, createWriteStream(fullPath));
|
|
430
|
+
const result = { key, size };
|
|
431
|
+
if (this.publicUrlPrefix) {
|
|
432
|
+
result.url = `${this.publicUrlPrefix.replace(/\/$/, "")}/${key}`;
|
|
433
|
+
}
|
|
434
|
+
return result;
|
|
435
|
+
}
|
|
436
|
+
async presign(_opts) {
|
|
437
|
+
throw new Error("DiskStorage does not support presigned uploads");
|
|
438
|
+
}
|
|
439
|
+
};
|
|
440
|
+
|
|
441
|
+
// src/storage/s3.ts
|
|
442
|
+
import { randomUUID as randomUUID3, createHmac, createHash } from "crypto";
|
|
443
|
+
var DEFAULT_PART_SIZE = 8 * 1024 * 1024;
|
|
444
|
+
var DEFAULT_THRESHOLD = 5 * 1024 * 1024;
|
|
445
|
+
var S3Storage = class {
|
|
446
|
+
bucket;
|
|
447
|
+
region;
|
|
448
|
+
endpoint;
|
|
449
|
+
partSize;
|
|
450
|
+
multipartThreshold;
|
|
451
|
+
publicUrlPrefix;
|
|
452
|
+
accessKeyId;
|
|
453
|
+
secretAccessKey;
|
|
454
|
+
client;
|
|
455
|
+
constructor(opts) {
|
|
456
|
+
this.bucket = opts.bucket;
|
|
457
|
+
this.region = opts.region;
|
|
458
|
+
this.endpoint = opts.endpoint ?? `https://s3.${opts.region}.amazonaws.com`;
|
|
459
|
+
this.partSize = opts.partSize ?? DEFAULT_PART_SIZE;
|
|
460
|
+
this.multipartThreshold = opts.multipartThreshold ?? DEFAULT_THRESHOLD;
|
|
461
|
+
if (opts.publicUrlPrefix !== void 0) this.publicUrlPrefix = opts.publicUrlPrefix;
|
|
462
|
+
if (opts.accessKeyId !== void 0) this.accessKeyId = opts.accessKeyId;
|
|
463
|
+
if (opts.secretAccessKey !== void 0) this.secretAccessKey = opts.secretAccessKey;
|
|
464
|
+
if (opts.client !== void 0) this.client = opts.client;
|
|
465
|
+
}
|
|
466
|
+
buildKey(filename) {
|
|
467
|
+
const safe = sanitizeFilename(filename);
|
|
468
|
+
return `${(/* @__PURE__ */ new Date()).toISOString().slice(0, 10)}/${randomUUID3()}-${safe}`;
|
|
469
|
+
}
|
|
470
|
+
buildUrl(key) {
|
|
471
|
+
if (this.publicUrlPrefix) return `${this.publicUrlPrefix.replace(/\/$/, "")}/${key}`;
|
|
472
|
+
return void 0;
|
|
473
|
+
}
|
|
474
|
+
async upload(input) {
|
|
475
|
+
const key = this.buildKey(input.filename);
|
|
476
|
+
if (this.client && hasSdk(this.client)) {
|
|
477
|
+
try {
|
|
478
|
+
await this.client.send(new (await loadCmd("PutObjectCommand"))({
|
|
479
|
+
Bucket: this.bucket,
|
|
480
|
+
Key: key,
|
|
481
|
+
Body: input.stream,
|
|
482
|
+
ContentType: input.mimeType
|
|
483
|
+
}));
|
|
484
|
+
} catch (err) {
|
|
485
|
+
throw new StorageError("S3 PutObject failed", err);
|
|
486
|
+
}
|
|
487
|
+
const url2 = this.buildUrl(key);
|
|
488
|
+
const result2 = { key, size: input.size ?? 0 };
|
|
489
|
+
if (url2) result2.url = url2;
|
|
490
|
+
return result2;
|
|
491
|
+
}
|
|
492
|
+
const chunks = [];
|
|
493
|
+
let size = 0;
|
|
494
|
+
for await (const c of input.stream) {
|
|
495
|
+
const b = Buffer.isBuffer(c) ? c : Buffer.from(c);
|
|
496
|
+
chunks.push(b);
|
|
497
|
+
size += b.length;
|
|
498
|
+
}
|
|
499
|
+
const body = Buffer.concat(chunks);
|
|
500
|
+
const url = `${this.endpoint}/${this.bucket}/${encodeURI(key)}`;
|
|
501
|
+
const headers = await this.signRequest("PUT", url, body, input.mimeType);
|
|
502
|
+
const res = await fetch(url, { method: "PUT", headers, body });
|
|
503
|
+
if (!res.ok) throw new StorageError(`S3 PUT failed with ${res.status}`);
|
|
504
|
+
const result = { key, size };
|
|
505
|
+
const publicUrl = this.buildUrl(key);
|
|
506
|
+
if (publicUrl) result.url = publicUrl;
|
|
507
|
+
return result;
|
|
508
|
+
}
|
|
509
|
+
async presign(opts) {
|
|
510
|
+
const key = this.buildKey(opts.filename);
|
|
511
|
+
const expires = opts.expiresInSeconds ?? 600;
|
|
512
|
+
const date = /* @__PURE__ */ new Date();
|
|
513
|
+
const dateStamp = date.toISOString().slice(0, 10).replace(/-/g, "");
|
|
514
|
+
const amzDate = date.toISOString().replace(/[-:]/g, "").replace(/\.\d{3}/, "");
|
|
515
|
+
const credential = `${this.accessKeyId ?? ""}/${dateStamp}/${this.region}/s3/aws4_request`;
|
|
516
|
+
const policy = Buffer.from(JSON.stringify({
|
|
517
|
+
expiration: new Date(Date.now() + expires * 1e3).toISOString(),
|
|
518
|
+
conditions: [
|
|
519
|
+
{ bucket: this.bucket },
|
|
520
|
+
["starts-with", "$key", key.split("/")[0] ?? ""],
|
|
521
|
+
{ "content-type": opts.contentType },
|
|
522
|
+
["content-length-range", 0, opts.maxSizeBytes ?? 100 * 1024 * 1024],
|
|
523
|
+
{ "x-amz-credential": credential },
|
|
524
|
+
{ "x-amz-algorithm": "AWS4-HMAC-SHA256" },
|
|
525
|
+
{ "x-amz-date": amzDate }
|
|
526
|
+
]
|
|
527
|
+
})).toString("base64");
|
|
528
|
+
const signingKey = await this.getSigningKey(dateStamp);
|
|
529
|
+
const signature = createHmac("sha256", signingKey).update(policy).digest("hex");
|
|
530
|
+
return {
|
|
531
|
+
url: `${this.endpoint}/${this.bucket}`,
|
|
532
|
+
fields: {
|
|
533
|
+
key,
|
|
534
|
+
"Content-Type": opts.contentType,
|
|
535
|
+
"x-amz-credential": credential,
|
|
536
|
+
"x-amz-algorithm": "AWS4-HMAC-SHA256",
|
|
537
|
+
"x-amz-date": amzDate,
|
|
538
|
+
Policy: policy,
|
|
539
|
+
"x-amz-signature": signature
|
|
540
|
+
},
|
|
541
|
+
storageKey: key,
|
|
542
|
+
expiresAt: new Date(Date.now() + expires * 1e3).toISOString()
|
|
543
|
+
};
|
|
544
|
+
}
|
|
545
|
+
async signRequest(method, url, body, contentType) {
|
|
546
|
+
const u = new URL(url);
|
|
547
|
+
const date = /* @__PURE__ */ new Date();
|
|
548
|
+
const amzDate = date.toISOString().replace(/[-:]/g, "").replace(/\.\d{3}/, "");
|
|
549
|
+
const dateStamp = amzDate.slice(0, 8);
|
|
550
|
+
const payloadHash = createHash("sha256").update(body).digest("hex");
|
|
551
|
+
const canonicalUri = u.pathname;
|
|
552
|
+
const canonicalQuery = "";
|
|
553
|
+
const canonicalHeaders = `content-type:${contentType}
|
|
554
|
+
host:${u.host}
|
|
555
|
+
x-amz-content-sha256:${payloadHash}
|
|
556
|
+
x-amz-date:${amzDate}
|
|
557
|
+
`;
|
|
558
|
+
const signedHeaders = "content-type;host;x-amz-content-sha256;x-amz-date";
|
|
559
|
+
const canonicalReq = `${method}
|
|
560
|
+
${canonicalUri}
|
|
561
|
+
${canonicalQuery}
|
|
562
|
+
${canonicalHeaders}
|
|
563
|
+
${signedHeaders}
|
|
564
|
+
${payloadHash}`;
|
|
565
|
+
const credentialScope = `${dateStamp}/${this.region}/s3/aws4_request`;
|
|
566
|
+
const stringToSign = `AWS4-HMAC-SHA256
|
|
567
|
+
${amzDate}
|
|
568
|
+
${credentialScope}
|
|
569
|
+
${createHash("sha256").update(canonicalReq).digest("hex")}`;
|
|
570
|
+
const signingKey = await this.getSigningKey(dateStamp);
|
|
571
|
+
const signature = createHmac("sha256", signingKey).update(stringToSign).digest("hex");
|
|
572
|
+
return {
|
|
573
|
+
Authorization: `AWS4-HMAC-SHA256 Credential=${this.accessKeyId}/${credentialScope}, SignedHeaders=${signedHeaders}, Signature=${signature}`,
|
|
574
|
+
"Content-Type": contentType,
|
|
575
|
+
"x-amz-date": amzDate,
|
|
576
|
+
"x-amz-content-sha256": payloadHash,
|
|
577
|
+
host: u.host
|
|
578
|
+
};
|
|
579
|
+
}
|
|
580
|
+
async getSigningKey(dateStamp) {
|
|
581
|
+
const kDate = createHmac("sha256", `AWS4${this.secretAccessKey ?? ""}`).update(dateStamp).digest();
|
|
582
|
+
const kRegion = createHmac("sha256", kDate).update(this.region).digest();
|
|
583
|
+
const kService = createHmac("sha256", kRegion).update("s3").digest();
|
|
584
|
+
return createHmac("sha256", kService).update("aws4_request").digest();
|
|
585
|
+
}
|
|
586
|
+
};
|
|
587
|
+
function hasSdk(client) {
|
|
588
|
+
return typeof client?.send === "function";
|
|
589
|
+
}
|
|
590
|
+
async function loadCmd(name) {
|
|
591
|
+
try {
|
|
592
|
+
const mod = await import(
|
|
593
|
+
/* @vite-ignore */
|
|
594
|
+
"@aws-sdk/client-s3"
|
|
595
|
+
);
|
|
596
|
+
return mod[name];
|
|
597
|
+
} catch (err) {
|
|
598
|
+
throw new StorageError(
|
|
599
|
+
"@aws-sdk/client-s3 is required when passing a `client` to S3Storage; install it as a peer dependency.",
|
|
600
|
+
err
|
|
601
|
+
);
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
// src/storage/r2.ts
|
|
606
|
+
var R2Storage = class extends S3Storage {
|
|
607
|
+
constructor(opts) {
|
|
608
|
+
super({
|
|
609
|
+
...opts,
|
|
610
|
+
region: "auto",
|
|
611
|
+
endpoint: opts.endpoint ?? `https://${opts.accountId}.r2.cloudflarestorage.com`
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
};
|
|
615
|
+
|
|
616
|
+
// src/storage/memory.ts
|
|
617
|
+
import { randomUUID as randomUUID4 } from "crypto";
|
|
618
|
+
var MemoryStorage = class {
|
|
619
|
+
store = /* @__PURE__ */ new Map();
|
|
620
|
+
async upload(input) {
|
|
621
|
+
const chunks = [];
|
|
622
|
+
for await (const c of input.stream) chunks.push(Buffer.isBuffer(c) ? c : Buffer.from(c));
|
|
623
|
+
const buffer = Buffer.concat(chunks);
|
|
624
|
+
const key = `${randomUUID4()}-${input.filename}`;
|
|
625
|
+
this.store.set(key, { buffer, mimeType: input.mimeType });
|
|
626
|
+
return { key, size: buffer.length };
|
|
627
|
+
}
|
|
628
|
+
read(key) {
|
|
629
|
+
return this.store.get(key)?.buffer;
|
|
630
|
+
}
|
|
631
|
+
list() {
|
|
632
|
+
return [...this.store.keys()];
|
|
633
|
+
}
|
|
634
|
+
clear() {
|
|
635
|
+
this.store.clear();
|
|
636
|
+
}
|
|
637
|
+
};
|
|
638
|
+
export {
|
|
639
|
+
DiskStorage,
|
|
640
|
+
FileTooLargeError,
|
|
641
|
+
InvalidMimeTypeError,
|
|
642
|
+
MemoryStorage,
|
|
643
|
+
R2Storage,
|
|
644
|
+
S3Storage,
|
|
645
|
+
StorageError,
|
|
646
|
+
Upflow,
|
|
647
|
+
UploadAbortedError,
|
|
648
|
+
UploadError,
|
|
649
|
+
appendUuidSuffix,
|
|
650
|
+
detectMimeFromBytes,
|
|
651
|
+
mimeMatchesAllowed,
|
|
652
|
+
sanitizeFilename,
|
|
653
|
+
upflow
|
|
654
|
+
};
|
|
655
|
+
//# sourceMappingURL=index.js.map
|