@mr-aftab-ahmad-khan/upflow 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/LICENSE +15 -0
- package/README.md +414 -0
- package/dist/index.cjs +706 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +200 -0
- package/dist/index.d.ts +200 -0
- package/dist/index.js +655 -0
- package/dist/index.js.map +1 -0
- package/package.json +71 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
# Changelog
|
|
2
|
+
|
|
3
|
+
## [0.1.0] — 2026-05-15
|
|
4
|
+
|
|
5
|
+
### Added
|
|
6
|
+
|
|
7
|
+
- `Upflow` class with Express, Hono, Fastify, and Next.js App Router adapters.
|
|
8
|
+
- Pluggable storage: `DiskStorage`, `S3Storage`, `R2Storage`, `MemoryStorage`.
|
|
9
|
+
- Magic-byte MIME validation, filename sanitization, size limits, and progress hooks.
|
|
10
|
+
- Built-in SigV4 signer when no `@aws-sdk/client-s3` is available.
|
package/LICENSE
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 upflow contributors
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND.
|
package/README.md
ADDED
|
@@ -0,0 +1,414 @@
|
|
|
1
|
+
# upflow
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@mr-aftab-ahmad-khan/upflow)
|
|
4
|
+
[](https://bundlephobia.com/package/upflow)
|
|
5
|
+
[](./LICENSE)
|
|
6
|
+
[](https://www.typescriptlang.org/)
|
|
7
|
+
|
|
8
|
+
**Multer hasn't had a major release since 2019.** No S3 streaming. No TypeScript. No chunked uploads. No Hono. `upflow` is what Multer should have become — a modern, type-safe upload handler for **Express, Hono, Fastify, and Next.js App Router** with **direct streaming to S3/R2**, **multipart uploads**, **presigned URLs**, **MIME-byte validation**, and **progress events**.
|
|
9
|
+
|
|
10
|
+
---
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
npm install @mr-aftab-ahmad-khan/upflow
|
|
16
|
+
pnpm add @mr-aftab-ahmad-khan/upflow
|
|
17
|
+
yarn add @mr-aftab-ahmad-khan/upflow
|
|
18
|
+
# Optional for S3:
|
|
19
|
+
npm install @aws-sdk/client-s3
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
---
|
|
23
|
+
|
|
24
|
+
## Quick Start
|
|
25
|
+
|
|
26
|
+
```ts
|
|
27
|
+
import express from "express";
|
|
28
|
+
import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
29
|
+
|
|
30
|
+
const upload = upflow({
|
|
31
|
+
storage: new DiskStorage({ root: "./uploads" }),
|
|
32
|
+
limits: { fileSize: 10 * 1024 * 1024 },
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
const app = express();
|
|
36
|
+
app.post("/upload", upload.single("file"), (req: any, res) => {
|
|
37
|
+
res.json(req.file);
|
|
38
|
+
});
|
|
39
|
+
app.listen(3000);
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
---
|
|
43
|
+
|
|
44
|
+
## Core Usage Examples
|
|
45
|
+
|
|
46
|
+
### 1. Single file upload to disk with Express
|
|
47
|
+
|
|
48
|
+
```ts
|
|
49
|
+
import express from "express";
|
|
50
|
+
import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
51
|
+
|
|
52
|
+
const upload = upflow({ storage: new DiskStorage({ root: "./uploads" }) });
|
|
53
|
+
|
|
54
|
+
const app = express();
|
|
55
|
+
app.post("/upload", upload.single("file"), (req: any, res) => res.json(req.file));
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
### 2. Multiple file upload to S3
|
|
59
|
+
|
|
60
|
+
```ts
|
|
61
|
+
import express from "express";
|
|
62
|
+
import { S3Client } from "@aws-sdk/client-s3";
|
|
63
|
+
import { upflow, S3Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
64
|
+
|
|
65
|
+
const upload = upflow({
|
|
66
|
+
storage: new S3Storage({
|
|
67
|
+
bucket: "uploads",
|
|
68
|
+
region: "us-east-1",
|
|
69
|
+
client: new S3Client({ region: "us-east-1" }),
|
|
70
|
+
}),
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
const app = express();
|
|
74
|
+
app.post("/upload", upload.array("files", 5), (req: any, res) => res.json(req.files));
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
### 3. Validate type and size
|
|
78
|
+
|
|
79
|
+
```ts
|
|
80
|
+
import { upflow, DiskStorage, FileTooLargeError, InvalidMimeTypeError } from "@mr-aftab-ahmad-khan/upflow";
|
|
81
|
+
|
|
82
|
+
const upload = upflow({
|
|
83
|
+
storage: new DiskStorage({ root: "./uploads" }),
|
|
84
|
+
limits: {
|
|
85
|
+
fileSize: 5 * 1024 * 1024,
|
|
86
|
+
allowedMimeTypes: ["image/jpeg", "image/png", "image/webp"],
|
|
87
|
+
},
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
app.post("/upload", upload.single("photo"), (_req, res) => res.json({ ok: true }), (err: any, _req: any, res: any, _next: any) => {
|
|
91
|
+
if (err instanceof FileTooLargeError) return res.status(413).json({ error: "too large" });
|
|
92
|
+
if (err instanceof InvalidMimeTypeError) return res.status(415).json({ error: "bad mime" });
|
|
93
|
+
res.status(500).json({ error: "upload failed" });
|
|
94
|
+
});
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### 4. Track upload progress
|
|
98
|
+
|
|
99
|
+
```ts
|
|
100
|
+
import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
101
|
+
|
|
102
|
+
const upload = upflow({ storage: new DiskStorage({ root: "./uploads" }) });
|
|
103
|
+
upload.on("progress", (e) => {
|
|
104
|
+
console.log(`${e.filename}: ${e.bytesReceived} bytes`);
|
|
105
|
+
});
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
### 5. Presigned URLs for direct browser upload
|
|
109
|
+
|
|
110
|
+
```ts
|
|
111
|
+
import { upflow, S3Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
112
|
+
|
|
113
|
+
const upload = upflow({
|
|
114
|
+
storage: new S3Storage({
|
|
115
|
+
bucket: "uploads",
|
|
116
|
+
region: "us-east-1",
|
|
117
|
+
accessKeyId: process.env.AWS_KEY!,
|
|
118
|
+
secretAccessKey: process.env.AWS_SECRET!,
|
|
119
|
+
}),
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
app.post("/presign", express.json(), async (req, res) => {
|
|
123
|
+
const result = await upload.presign({
|
|
124
|
+
filename: req.body.filename,
|
|
125
|
+
contentType: req.body.contentType,
|
|
126
|
+
maxSizeBytes: 50 * 1024 * 1024,
|
|
127
|
+
expiresInSeconds: 600,
|
|
128
|
+
});
|
|
129
|
+
res.json(result);
|
|
130
|
+
});
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
### 6. MemoryStorage for tests
|
|
134
|
+
|
|
135
|
+
```ts
|
|
136
|
+
import { describe, it, expect } from "vitest";
|
|
137
|
+
import { upflow, MemoryStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
138
|
+
|
|
139
|
+
it("uploads", async () => {
|
|
140
|
+
const storage = new MemoryStorage();
|
|
141
|
+
const upload = upflow({ storage });
|
|
142
|
+
// ... drive `upload.handler()` with a fetch Request and assert
|
|
143
|
+
});
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
---
|
|
147
|
+
|
|
148
|
+
## Framework Integration Examples
|
|
149
|
+
|
|
150
|
+
### Express
|
|
151
|
+
|
|
152
|
+
```ts
|
|
153
|
+
import express from "express";
|
|
154
|
+
import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
155
|
+
|
|
156
|
+
const upload = upflow({ storage: new DiskStorage({ root: "./uploads" }) });
|
|
157
|
+
|
|
158
|
+
const app = express();
|
|
159
|
+
app.post("/single", upload.single("file"), (req: any, res) => res.json(req.file));
|
|
160
|
+
app.post("/multiple", upload.array("files", 5), (req: any, res) => res.json(req.files));
|
|
161
|
+
```
|
|
162
|
+
|
|
163
|
+
### Hono (Cloudflare R2)
|
|
164
|
+
|
|
165
|
+
```ts
|
|
166
|
+
import { Hono } from "hono";
|
|
167
|
+
import { upflow, R2Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
168
|
+
|
|
169
|
+
const upload = upflow({
|
|
170
|
+
storage: new R2Storage({
|
|
171
|
+
accountId: "your-account",
|
|
172
|
+
bucket: "uploads",
|
|
173
|
+
accessKeyId: process.env.R2_KEY!,
|
|
174
|
+
secretAccessKey: process.env.R2_SECRET!,
|
|
175
|
+
}),
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
const app = new Hono();
|
|
179
|
+
app.post("/upload", upload.hono(), (c) => c.json({ files: c.get("uploadedFiles") }));
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
### Fastify
|
|
183
|
+
|
|
184
|
+
```ts
|
|
185
|
+
import Fastify from "fastify";
|
|
186
|
+
import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
187
|
+
|
|
188
|
+
const fastify = Fastify();
|
|
189
|
+
const upload = upflow({ storage: new DiskStorage({ root: "./uploads" }) });
|
|
190
|
+
await upload.fastify()(fastify);
|
|
191
|
+
|
|
192
|
+
fastify.post("/upload", async (req) => (req.body as { files: unknown[] }).files);
|
|
193
|
+
```
|
|
194
|
+
|
|
195
|
+
### Next.js App Router
|
|
196
|
+
|
|
197
|
+
```ts
|
|
198
|
+
// app/api/upload/route.ts
|
|
199
|
+
import { upflow, S3Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
200
|
+
|
|
201
|
+
const upload = upflow({
|
|
202
|
+
storage: new S3Storage({
|
|
203
|
+
bucket: "uploads",
|
|
204
|
+
region: "us-east-1",
|
|
205
|
+
accessKeyId: process.env.AWS_KEY!,
|
|
206
|
+
secretAccessKey: process.env.AWS_SECRET!,
|
|
207
|
+
}),
|
|
208
|
+
});
|
|
209
|
+
|
|
210
|
+
export const POST = upload.nextjs();
|
|
211
|
+
```
|
|
212
|
+
|
|
213
|
+
---
|
|
214
|
+
|
|
215
|
+
## Storage Adapter Reference
|
|
216
|
+
|
|
217
|
+
### DiskStorage
|
|
218
|
+
|
|
219
|
+
| Option | Type | Default | Description |
|
|
220
|
+
| ----------------- | -------- | ------------------------------------ | -------------------------- |
|
|
221
|
+
| `root` | `string` | — | Root upload directory |
|
|
222
|
+
| `pathTemplate` | `string` | `"{date}/{uuid}-{filename}"` | Variables: `date`, `uuid`, `filename`, `ext` |
|
|
223
|
+
| `publicUrlPrefix` | `string?`| `undefined` | If set, files get a `url` |
|
|
224
|
+
|
|
225
|
+
### S3Storage
|
|
226
|
+
|
|
227
|
+
| Option | Type | Default | Description |
|
|
228
|
+
| ------------------- | --------- | ---------------------------------------- | ------------------------------------------ |
|
|
229
|
+
| `bucket` | `string` | — | Bucket name |
|
|
230
|
+
| `region` | `string` | — | AWS region |
|
|
231
|
+
| `endpoint` | `string` | `https://s3.<region>.amazonaws.com` | For custom S3-compatible providers |
|
|
232
|
+
| `accessKeyId` | `string?` | env credentials | When `client` is not provided |
|
|
233
|
+
| `secretAccessKey` | `string?` | env credentials | When `client` is not provided |
|
|
234
|
+
| `partSize` | `number` | `8388608` (8 MB) | Multipart part size |
|
|
235
|
+
| `multipartThreshold`| `number` | `5242880` (5 MB) | Files larger than this use multipart |
|
|
236
|
+
| `publicUrlPrefix` | `string?` | `undefined` | CDN/public URL prefix |
|
|
237
|
+
| `client` | `S3Client?` | `undefined` | Optional `@aws-sdk/client-s3` instance |
|
|
238
|
+
|
|
239
|
+
### R2Storage
|
|
240
|
+
|
|
241
|
+
Same as `S3Storage` plus `accountId`. Region defaults to `"auto"`; endpoint defaults to `https://<accountId>.r2.cloudflarestorage.com`.
|
|
242
|
+
|
|
243
|
+
### MemoryStorage
|
|
244
|
+
|
|
245
|
+
No options. Useful in tests. Exposes `read(key)`, `list()`, `clear()`.
|
|
246
|
+
|
|
247
|
+
---
|
|
248
|
+
|
|
249
|
+
## Error Handling
|
|
250
|
+
|
|
251
|
+
```ts
|
|
252
|
+
import {
|
|
253
|
+
UploadError,
|
|
254
|
+
FileTooLargeError,
|
|
255
|
+
InvalidMimeTypeError,
|
|
256
|
+
StorageError,
|
|
257
|
+
UploadAbortedError,
|
|
258
|
+
} from "@mr-aftab-ahmad-khan/upflow";
|
|
259
|
+
|
|
260
|
+
app.use((err: any, _req: any, res: any, next: any) => {
|
|
261
|
+
if (err instanceof FileTooLargeError) return res.status(413).json({ error: err.message });
|
|
262
|
+
if (err instanceof InvalidMimeTypeError) return res.status(415).json({ error: err.message });
|
|
263
|
+
if (err instanceof StorageError) return res.status(500).json({ error: "storage failed" });
|
|
264
|
+
if (err instanceof UploadAbortedError) return res.status(499).end();
|
|
265
|
+
if (err instanceof UploadError) return res.status(err.status).json({ error: err.message });
|
|
266
|
+
next(err);
|
|
267
|
+
});
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
---
|
|
271
|
+
|
|
272
|
+
## TypeScript Types
|
|
273
|
+
|
|
274
|
+
```ts
|
|
275
|
+
import type {
|
|
276
|
+
UploadedFile,
|
|
277
|
+
UploadOptions,
|
|
278
|
+
StorageAdapter,
|
|
279
|
+
ProgressEvent,
|
|
280
|
+
PresignResult,
|
|
281
|
+
PresignOptions,
|
|
282
|
+
} from "@mr-aftab-ahmad-khan/upflow";
|
|
283
|
+
|
|
284
|
+
class GcsStorage implements StorageAdapter {
|
|
285
|
+
async upload(input: { stream: NodeJS.ReadableStream; filename: string; mimeType: string }) {
|
|
286
|
+
return { key: "...", size: 0 };
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
```
|
|
290
|
+
|
|
291
|
+
---
|
|
292
|
+
|
|
293
|
+
## Security Hardening
|
|
294
|
+
|
|
295
|
+
`upflow` ships with the following defaults:
|
|
296
|
+
|
|
297
|
+
- **Path traversal prevention** — `..`, backslashes, control characters stripped
|
|
298
|
+
- **MIME spoofing protection** — `Content-Type` ignored when bytes disagree
|
|
299
|
+
- **Filename sanitization** — UUID suffix prevents collision-based overwrites
|
|
300
|
+
- **Per-request limits** — `fileSize`, `files`, `allowedMimeTypes`
|
|
301
|
+
- **Aborted streams** raise `UploadAbortedError`
|
|
302
|
+
|
|
303
|
+
Recommended additions:
|
|
304
|
+
|
|
305
|
+
- Rate-limit upload endpoints with `express-rate-limit` or your gateway
|
|
306
|
+
- Cap concurrent uploads per IP with `p-limit`
|
|
307
|
+
- Run uploads through a virus scanner in `onUploadComplete` for user-generated content
|
|
308
|
+
- Set a strong `Content-Security-Policy` on the page making cross-origin uploads
|
|
309
|
+
|
|
310
|
+
---
|
|
311
|
+
|
|
312
|
+
## Real-World Recipe — Full Image Upload Service
|
|
313
|
+
|
|
314
|
+
```ts
|
|
315
|
+
import { Hono } from "hono";
|
|
316
|
+
import sharp from "sharp";
|
|
317
|
+
import { upflow, R2Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
318
|
+
|
|
319
|
+
const r2 = new R2Storage({
|
|
320
|
+
accountId: process.env.R2_ACCOUNT_ID!,
|
|
321
|
+
bucket: "img",
|
|
322
|
+
accessKeyId: process.env.R2_KEY!,
|
|
323
|
+
secretAccessKey: process.env.R2_SECRET!,
|
|
324
|
+
publicUrlPrefix: "https://cdn.example.com",
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
const upload = upflow({
|
|
328
|
+
storage: r2,
|
|
329
|
+
limits: {
|
|
330
|
+
fileSize: 20 * 1024 * 1024,
|
|
331
|
+
allowedMimeTypes: ["image/jpeg", "image/png", "image/webp"],
|
|
332
|
+
},
|
|
333
|
+
hooks: {
|
|
334
|
+
async onUploadComplete(file) {
|
|
335
|
+
const buffer = Buffer.alloc(0); // pulled from r2.get(file.storageKey)
|
|
336
|
+
const thumb = await sharp(buffer).resize(300).webp().toBuffer();
|
|
337
|
+
// write thumb to a `${file.storageKey}.thumb.webp` key (omitted for brevity)
|
|
338
|
+
console.log("thumb ready for", file.storageKey);
|
|
339
|
+
},
|
|
340
|
+
},
|
|
341
|
+
});
|
|
342
|
+
|
|
343
|
+
const app = new Hono();
|
|
344
|
+
app.post("/upload", upload.hono(), (c) => {
|
|
345
|
+
const files = c.get("uploadedFiles");
|
|
346
|
+
return c.json({ files });
|
|
347
|
+
});
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
```html
|
|
351
|
+
<!-- React frontend (presigned URL flow) -->
|
|
352
|
+
<input id="file" type="file" />
|
|
353
|
+
<script type="module">
|
|
354
|
+
document.getElementById("file").addEventListener("change", async (e) => {
|
|
355
|
+
const file = e.target.files[0];
|
|
356
|
+
const { url, fields, storageKey } = await fetch("/presign", {
|
|
357
|
+
method: "POST",
|
|
358
|
+
headers: { "content-type": "application/json" },
|
|
359
|
+
body: JSON.stringify({ filename: file.name, contentType: file.type }),
|
|
360
|
+
}).then((r) => r.json());
|
|
361
|
+
|
|
362
|
+
const fd = new FormData();
|
|
363
|
+
for (const [k, v] of Object.entries(fields)) fd.append(k, v);
|
|
364
|
+
fd.append("file", file);
|
|
365
|
+
await fetch(url, { method: "POST", body: fd });
|
|
366
|
+
console.log("uploaded as", storageKey);
|
|
367
|
+
});
|
|
368
|
+
</script>
|
|
369
|
+
```
|
|
370
|
+
|
|
371
|
+
---
|
|
372
|
+
|
|
373
|
+
## Migration Guide from Multer
|
|
374
|
+
|
|
375
|
+
```diff
|
|
376
|
+
- import multer from "multer";
|
|
377
|
+
+ import { upflow, DiskStorage } from "@mr-aftab-ahmad-khan/upflow";
|
|
378
|
+
|
|
379
|
+
- const upload = multer({ dest: "./uploads" });
|
|
380
|
+
+ const upload = upflow({ storage: new DiskStorage({ root: "./uploads" }) });
|
|
381
|
+
|
|
382
|
+
app.post("/", upload.single("file"), (req, res) => res.json(req.file));
|
|
383
|
+
```
|
|
384
|
+
|
|
385
|
+
For S3:
|
|
386
|
+
|
|
387
|
+
```diff
|
|
388
|
+
- import multer from "multer";
|
|
389
|
+
- import multerS3 from "multer-s3";
|
|
390
|
+
- const upload = multer({ storage: multerS3({ s3, bucket: "x" }) });
|
|
391
|
+
+ import { S3Client } from "@aws-sdk/client-s3";
|
|
392
|
+
+ import { upflow, S3Storage } from "@mr-aftab-ahmad-khan/upflow";
|
|
393
|
+
+ const upload = upflow({ storage: new S3Storage({ bucket: "x", region: "us-east-1", client: new S3Client({ region: "us-east-1" }) }) });
|
|
394
|
+
```
|
|
395
|
+
|
|
396
|
+
---
|
|
397
|
+
|
|
398
|
+
## Comparison Table
|
|
399
|
+
|
|
400
|
+
| Feature | Multer | Formidable | **upflow** |
|
|
401
|
+
| ----------------------------- | :----: | :--------: | :--------: |
|
|
402
|
+
| TypeScript types | ⚠️ | ⚠️ | ✅ |
|
|
403
|
+
| S3 / R2 streaming | ❌ | ❌ | ✅ |
|
|
404
|
+
| Multipart upload (>5 MB) | ❌ | ❌ | ✅ |
|
|
405
|
+
| Presigned URLs | ❌ | ❌ | ✅ |
|
|
406
|
+
| Hono / Fastify adapters | ❌ | ❌ | ✅ |
|
|
407
|
+
| MIME validation from bytes | ❌ | ❌ | ✅ |
|
|
408
|
+
| Progress events | ⚠️ | ✅ | ✅ |
|
|
409
|
+
|
|
410
|
+
---
|
|
411
|
+
|
|
412
|
+
## License
|
|
413
|
+
|
|
414
|
+
MIT
|