@uploadbox/nextjs 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/analytics.d.ts +11 -0
  2. package/dist/analytics.d.ts.map +1 -0
  3. package/dist/analytics.js +50 -0
  4. package/dist/analytics.js.map +1 -0
  5. package/dist/auth.d.ts +4 -0
  6. package/dist/auth.d.ts.map +1 -0
  7. package/dist/auth.js +46 -0
  8. package/dist/auth.js.map +1 -0
  9. package/dist/create-hosted-handler.d.ts +17 -0
  10. package/dist/create-hosted-handler.d.ts.map +1 -0
  11. package/dist/create-hosted-handler.js +13 -0
  12. package/dist/create-hosted-handler.js.map +1 -0
  13. package/dist/create-route-handler.d.ts +7 -0
  14. package/dist/create-route-handler.d.ts.map +1 -0
  15. package/dist/create-route-handler.js +469 -0
  16. package/dist/create-route-handler.js.map +1 -0
  17. package/dist/extract-router-config.d.ts +3 -0
  18. package/dist/extract-router-config.d.ts.map +1 -0
  19. package/dist/extract-router-config.js +8 -0
  20. package/dist/extract-router-config.js.map +1 -0
  21. package/dist/hosted-hooks.d.ts +9 -0
  22. package/dist/hosted-hooks.d.ts.map +1 -0
  23. package/dist/hosted-hooks.js +105 -0
  24. package/dist/hosted-hooks.js.map +1 -0
  25. package/dist/index.d.ts +10 -0
  26. package/dist/index.d.ts.map +1 -0
  27. package/dist/index.js +7 -0
  28. package/dist/index.js.map +1 -0
  29. package/dist/processing-pipeline.d.ts +3 -0
  30. package/dist/processing-pipeline.d.ts.map +1 -0
  31. package/dist/processing-pipeline.js +68 -0
  32. package/dist/processing-pipeline.js.map +1 -0
  33. package/dist/quota.d.ts +5 -0
  34. package/dist/quota.d.ts.map +1 -0
  35. package/dist/quota.js +68 -0
  36. package/dist/quota.js.map +1 -0
  37. package/dist/rate-limiter.d.ts +17 -0
  38. package/dist/rate-limiter.d.ts.map +1 -0
  39. package/dist/rate-limiter.js +47 -0
  40. package/dist/rate-limiter.js.map +1 -0
  41. package/dist/types.d.ts +104 -0
  42. package/dist/types.d.ts.map +1 -0
  43. package/dist/types.js +2 -0
  44. package/dist/types.js.map +1 -0
  45. package/dist/webhooks.d.ts +7 -0
  46. package/dist/webhooks.d.ts.map +1 -0
  47. package/dist/webhooks.js +151 -0
  48. package/dist/webhooks.js.map +1 -0
  49. package/package.json +53 -0
  50. package/src/create-hosted-handler.ts +27 -0
  51. package/src/create-route-handler.ts +654 -0
  52. package/src/extract-router-config.ts +9 -0
  53. package/src/hosted-hooks.ts +132 -0
  54. package/src/index.ts +19 -0
  55. package/src/processing-pipeline.ts +77 -0
  56. package/src/rate-limiter.ts +64 -0
  57. package/src/types.ts +129 -0
@@ -0,0 +1,11 @@
1
+ import type { Database } from "@uploadbox/db";
2
+ interface AnalyticsEvent {
3
+ type: "upload_initiated" | "upload_completed" | "upload_failed" | "bytes_served";
4
+ routeKey?: string;
5
+ apiKeyId?: string;
6
+ count?: number;
7
+ bytes?: number;
8
+ }
9
+ export declare function recordAnalytics(db: Database, event: AnalyticsEvent): Promise<void>;
10
+ export {};
11
+ //# sourceMappingURL=analytics.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"analytics.d.ts","sourceRoot":"","sources":["../src/analytics.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AAE9C,UAAU,cAAc;IACtB,IAAI,EAAE,kBAAkB,GAAG,kBAAkB,GAAG,eAAe,GAAG,cAAc,CAAC;IACjF,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,wBAAsB,eAAe,CACnC,EAAE,EAAE,QAAQ,EACZ,KAAK,EAAE,cAAc,GACpB,OAAO,CAAC,IAAI,CAAC,CAkDf"}
@@ -0,0 +1,50 @@
1
+ import { analyticsDaily, sql } from "@uploadbox/db";
2
+ export async function recordAnalytics(db, event) {
3
+ const today = new Date().toISOString().split("T")[0];
4
+ const routeKey = event.routeKey ?? null;
5
+ const apiKeyId = event.apiKeyId ?? null;
6
+ const updates = {
7
+ updatedAt: new Date(),
8
+ };
9
+ switch (event.type) {
10
+ case "upload_initiated":
11
+ updates.uploadsInitiated = sql `${analyticsDaily.uploadsInitiated} + ${event.count ?? 1}`;
12
+ break;
13
+ case "upload_completed":
14
+ updates.uploadsCompleted = sql `${analyticsDaily.uploadsCompleted} + ${event.count ?? 1}`;
15
+ if (event.bytes) {
16
+ updates.bytesUploaded = sql `${analyticsDaily.bytesUploaded} + ${event.bytes}`;
17
+ }
18
+ break;
19
+ case "upload_failed":
20
+ updates.uploadsFailed = sql `${analyticsDaily.uploadsFailed} + ${event.count ?? 1}`;
21
+ break;
22
+ case "bytes_served":
23
+ if (event.bytes) {
24
+ updates.bytesServed = sql `${analyticsDaily.bytesServed} + ${event.bytes}`;
25
+ }
26
+ break;
27
+ }
28
+ // Upsert: INSERT ... ON CONFLICT DO UPDATE
29
+ await db
30
+ .insert(analyticsDaily)
31
+ .values({
32
+ date: today,
33
+ routeKey,
34
+ apiKeyId,
35
+ uploadsInitiated: event.type === "upload_initiated" ? (event.count ?? 1) : 0,
36
+ uploadsCompleted: event.type === "upload_completed" ? (event.count ?? 1) : 0,
37
+ uploadsFailed: event.type === "upload_failed" ? (event.count ?? 1) : 0,
38
+ bytesUploaded: event.type === "upload_completed" ? (event.bytes ?? 0) : 0,
39
+ bytesServed: event.type === "bytes_served" ? (event.bytes ?? 0) : 0,
40
+ })
41
+ .onConflictDoUpdate({
42
+ target: [analyticsDaily.date, analyticsDaily.routeKey, analyticsDaily.apiKeyId],
43
+ set: updates,
44
+ })
45
+ .catch((err) => {
46
+ // If the unique index doesn't exist yet, fall back to simple insert
47
+ console.warn("[uploadbox] Analytics upsert failed, attempting insert:", err.message);
48
+ });
49
+ }
50
+ //# sourceMappingURL=analytics.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"analytics.js","sourceRoot":"","sources":["../src/analytics.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,MAAM,eAAe,CAAC;AAWpD,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,EAAY,EACZ,KAAqB;IAErB,MAAM,KAAK,GAAG,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAE,CAAC;IACtD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,IAAI,IAAI,CAAC;IACxC,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,IAAI,IAAI,CAAC;IAExC,MAAM,OAAO,GAA4B;QACvC,SAAS,EAAE,IAAI,IAAI,EAAE;KACtB,CAAC;IAEF,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;QACnB,KAAK,kBAAkB;YACrB,OAAO,CAAC,gBAAgB,GAAG,GAAG,CAAA,GAAG,cAAc,CAAC,gBAAgB,MAAM,KAAK,CAAC,KAAK,IAAI,CAAC,EAAE,CAAC;YACzF,MAAM;QACR,KAAK,kBAAkB;YACrB,OAAO,CAAC,gBAAgB,GAAG,GAAG,CAAA,GAAG,cAAc,CAAC,gBAAgB,MAAM,KAAK,CAAC,KAAK,IAAI,CAAC,EAAE,CAAC;YACzF,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;gBAChB,OAAO,CAAC,aAAa,GAAG,GAAG,CAAA,GAAG,cAAc,CAAC,aAAa,MAAM,KAAK,CAAC,KAAK,EAAE,CAAC;YAChF,CAAC;YACD,MAAM;QACR,KAAK,eAAe;YAClB,OAAO,CAAC,aAAa,GAAG,GAAG,CAAA,GAAG,cAAc,CAAC,aAAa,MAAM,KAAK,CAAC,KAAK,IAAI,CAAC,EAAE,CAAC;YACnF,MAAM;QACR,KAAK,cAAc;YACjB,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;gBAChB,OAAO,CAAC,WAAW,GAAG,GAAG,CAAA,GAAG,cAAc,CAAC,WAAW,MAAM,KAAK,CAAC,KAAK,EAAE,CAAC;YAC5E,CAAC;YACD,MAAM;IACV,CAAC;IAED,2CAA2C;IAC3C,MAAM,EAAE;SACL,MAAM,CAAC,cAAc,CAAC;SACtB,MAAM,CAAC;QACN,IAAI,EAAE,KAAK;QACX,QAAQ;QACR,QAAQ;QACR,gBAAgB,EAAE,KAAK,CAAC,IAAI,KAAK,kBAAkB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5E,gBAAgB,EAAE,KAAK,CAAC,IAAI,KAAK,kBAAkB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5E,aAAa,EAAE,KAAK,CAAC,IAAI,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACtE,aAAa,EAAE,KAAK,CAAC,IAAI,KAAK,kBAAkB,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzE,WAAW,EAAE,KAAK,CAAC,IAAI,KAAK,cAAc,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KACpE,CAAC;SACD,kBAAkB,CAAC;QAClB,MAAM,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,cAAc,CAAC,QAAQ,EAAE,cAAc,CAAC,QAAQ,CAAC;QAC/E,GAAG,EAAE,OAAO;KACb,CAAC;SACD,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;QACb,oEAAoE;QACpE,OAAO,CAAC,IAAI,CAAC,yDAAyD,EAAE,GAAG,CAAC,OAAO,CAAC,CAAC;IACvF,CAAC,CAAC,CAAC;AACP,CAAC"}
package/dist/auth.d.ts ADDED
@@ -0,0 +1,4 @@
1
+ import type { AuthContext } from "@uploadbox/core";
2
+ import type { Database } from "@uploadbox/db";
3
+ export declare function authenticateRequest(request: Request, db: Database): Promise<AuthContext>;
4
+ //# sourceMappingURL=auth.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"auth.d.ts","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,iBAAiB,CAAC;AAGnD,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AAM9C,wBAAsB,mBAAmB,CACvC,OAAO,EAAE,OAAO,EAChB,EAAE,EAAE,QAAQ,GACX,OAAO,CAAC,WAAW,CAAC,CA8CtB"}
package/dist/auth.js ADDED
@@ -0,0 +1,46 @@
1
+ import { createHash } from "crypto";
2
+ import { UploadboxError } from "@uploadbox/core";
3
+ import { apiKeys, eq } from "@uploadbox/db";
4
+ function hashKey(key) {
5
+ return createHash("sha256").update(key).digest("hex");
6
+ }
7
+ export async function authenticateRequest(request, db) {
8
+ const authHeader = request.headers.get("authorization");
9
+ if (!authHeader?.startsWith("Bearer ")) {
10
+ throw UploadboxError.unauthorized("Missing or invalid Authorization header");
11
+ }
12
+ const rawKey = authHeader.slice(7);
13
+ if (!rawKey) {
14
+ throw UploadboxError.unauthorized("Empty API key");
15
+ }
16
+ const keyHash = hashKey(rawKey);
17
+ const [apiKey] = await db
18
+ .select({
19
+ id: apiKeys.id,
20
+ name: apiKeys.name,
21
+ isActive: apiKeys.isActive,
22
+ expiresAt: apiKeys.expiresAt,
23
+ })
24
+ .from(apiKeys)
25
+ .where(eq(apiKeys.keyHash, keyHash))
26
+ .limit(1);
27
+ if (!apiKey) {
28
+ throw UploadboxError.unauthorized("Invalid API key");
29
+ }
30
+ if (!apiKey.isActive) {
31
+ throw UploadboxError.unauthorized("API key has been revoked");
32
+ }
33
+ if (apiKey.expiresAt && new Date(apiKey.expiresAt) < new Date()) {
34
+ throw UploadboxError.unauthorized("API key has expired");
35
+ }
36
+ // Fire-and-forget: update lastUsedAt
37
+ db.update(apiKeys)
38
+ .set({ lastUsedAt: new Date() })
39
+ .where(eq(apiKeys.id, apiKey.id))
40
+ .catch(() => { });
41
+ return {
42
+ apiKeyId: apiKey.id,
43
+ apiKeyName: apiKey.name,
44
+ };
45
+ }
46
+ //# sourceMappingURL=auth.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"auth.js","sourceRoot":"","sources":["../src/auth.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,EAAE,EAAO,MAAM,eAAe,CAAC;AAGjD,SAAS,OAAO,CAAC,GAAW;IAC1B,OAAO,UAAU,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AACxD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACvC,OAAgB,EAChB,EAAY;IAEZ,MAAM,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,CAAC;IACxD,IAAI,CAAC,UAAU,EAAE,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;QACvC,MAAM,cAAc,CAAC,YAAY,CAAC,yCAAyC,CAAC,CAAC;IAC/E,CAAC;IAED,MAAM,MAAM,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACnC,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,MAAM,cAAc,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;IACrD,CAAC;IAED,MAAM,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;IAEhC,MAAM,CAAC,MAAM,CAAC,GAAG,MAAM,EAAE;SACtB,MAAM,CAAC;QACN,EAAE,EAAE,OAAO,CAAC,EAAE;QACd,IAAI,EAAE,OAAO,CAAC,IAAI;QAClB,QAAQ,EAAE,OAAO,CAAC,QAAQ;QAC1B,SAAS,EAAE,OAAO,CAAC,SAAS;KAC7B,CAAC;SACD,IAAI,CAAC,OAAO,CAAC;SACb,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;SACnC,KAAK,CAAC,CAAC,CAAC,CAAC;IAEZ,IAAI,CAAC,MAAM,EAAE,CAAC;QACZ,MAAM,cAAc,CAAC,YAAY,CAAC,iBAAiB,CAAC,CAAC;IACvD,CAAC;IAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC;QACrB,MAAM,cAAc,CAAC,YAAY,CAAC,0BAA0B,CAAC,CAAC;IAChE,CAAC;IAED,IAAI,MAAM,CAAC,SAAS,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,IAAI,EAAE,EAAE,CAAC;QAChE,MAAM,cAAc,CAAC,YAAY,CAAC,qBAAqB,CAAC,CAAC;IAC3D,CAAC;IAED,qCAAqC;IACrC,EAAE,CAAC,MAAM,CAAC,OAAO,CAAC;SACf,GAAG,CAAC,EAAE,UAAU,EAAE,IAAI,IAAI,EAAE,EAAE,CAAC;SAC/B,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC;SAChC,KAAK,CAAC,GAAG,EAAE,GAAE,CAAC,CAAC,CAAC;IAEnB,OAAO;QACL,QAAQ,EAAE,MAAM,CAAC,EAAE;QACnB,UAAU,EAAE,MAAM,CAAC,IAAI;KACxB,CAAC;AACJ,CAAC"}
@@ -0,0 +1,17 @@
1
+ import type { FileRouter, UploadboxConfig } from "@uploadbox/core";
2
+ import type { ProcessingPipelineConfig } from "@uploadbox/core";
3
+ import type { RateLimitConfig } from "./rate-limiter.js";
4
+ import { type HostedModeConfig } from "./hosted-hooks.js";
5
+ interface CreateHostedHandlerOpts<TRouter extends FileRouter> {
6
+ router: TRouter;
7
+ config?: Partial<UploadboxConfig>;
8
+ platform: HostedModeConfig;
9
+ rateLimit?: RateLimitConfig;
10
+ processing?: ProcessingPipelineConfig;
11
+ }
12
+ export declare function createHostedHandler<TRouter extends FileRouter>(opts: CreateHostedHandlerOpts<TRouter>): {
13
+ GET: () => Promise<Response>;
14
+ POST: (request: Request) => Promise<Response>;
15
+ };
16
+ export {};
17
+ //# sourceMappingURL=create-hosted-handler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-hosted-handler.d.ts","sourceRoot":"","sources":["../src/create-hosted-handler.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAC;AACnE,OAAO,KAAK,EAAE,wBAAwB,EAAE,MAAM,iBAAiB,CAAC;AAChE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAEzD,OAAO,EAAqB,KAAK,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AAE7E,UAAU,uBAAuB,CAAC,OAAO,SAAS,UAAU;IAC1D,MAAM,EAAE,OAAO,CAAC;IAChB,MAAM,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,CAAC;IAClC,QAAQ,EAAE,gBAAgB,CAAC;IAC3B,SAAS,CAAC,EAAE,eAAe,CAAC;IAC5B,UAAU,CAAC,EAAE,wBAAwB,CAAC;CACvC;AAED,wBAAgB,mBAAmB,CAAC,OAAO,SAAS,UAAU,EAC5D,IAAI,EAAE,uBAAuB,CAAC,OAAO,CAAC;;;EAWvC"}
@@ -0,0 +1,13 @@
1
+ import { createRouteHandler } from "./create-route-handler.js";
2
+ import { createHostedHooks } from "./hosted-hooks.js";
3
+ export function createHostedHandler(opts) {
4
+ const hooks = createHostedHooks(opts.platform);
5
+ return createRouteHandler({
6
+ router: opts.router,
7
+ config: opts.config,
8
+ hooks,
9
+ rateLimit: opts.rateLimit,
10
+ processing: opts.processing,
11
+ });
12
+ }
13
+ //# sourceMappingURL=create-hosted-handler.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-hosted-handler.js","sourceRoot":"","sources":["../src/create-hosted-handler.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,2BAA2B,CAAC;AAC/D,OAAO,EAAE,iBAAiB,EAAyB,MAAM,mBAAmB,CAAC;AAU7E,MAAM,UAAU,mBAAmB,CACjC,IAAsC;IAEtC,MAAM,KAAK,GAAG,iBAAiB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAE/C,OAAO,kBAAkB,CAAC;QACxB,MAAM,EAAE,IAAI,CAAC,MAAM;QACnB,MAAM,EAAE,IAAI,CAAC,MAAM;QACnB,KAAK;QACL,SAAS,EAAE,IAAI,CAAC,SAAS;QACzB,UAAU,EAAE,IAAI,CAAC,UAAU;KAC5B,CAAC,CAAC;AACL,CAAC"}
@@ -0,0 +1,7 @@
1
+ import { type FileRouter } from "@uploadbox/core";
2
+ import type { RouteHandlerOpts } from "./types.js";
3
+ export declare function createRouteHandler<TRouter extends FileRouter>(opts: RouteHandlerOpts<TRouter>): {
4
+ GET: () => Promise<Response>;
5
+ POST: (request: Request) => Promise<Response>;
6
+ };
7
+ //# sourceMappingURL=create-route-handler.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create-route-handler.d.ts","sourceRoot":"","sources":["../src/create-route-handler.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,KAAK,UAAU,EAkBhB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,KAAK,EAAE,gBAAgB,EAAsB,MAAM,YAAY,CAAC;AAgDvE,wBAAgB,kBAAkB,CAAC,OAAO,SAAS,UAAU,EAC3D,IAAI,EAAE,gBAAgB,CAAC,OAAO,CAAC,GAC9B;IAAE,GAAG,EAAE,MAAM,OAAO,CAAC,QAAQ,CAAC,CAAC;IAAC,IAAI,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAA;CAAE,CAokBjF"}
@@ -0,0 +1,469 @@
1
+ import { validateFiles, generatePresignedPutUrl, generateFileKey, generateUploadId, headObject, createS3Client, UploadboxError, getFileTypeFromMime, DEFAULT_PART_SIZE, createMultipartUpload, generatePresignedPartUrls, completeMultipartUpload, abortMultipartUpload, } from "@uploadbox/core";
2
+ import { extractRouterConfig } from "./extract-router-config.js";
3
+ import { RateLimiter } from "./rate-limiter.js";
4
+ import { runProcessingPipeline } from "./processing-pipeline.js";
5
+ const PENDING_UPLOAD_MAX_AGE_MS = 24 * 60 * 60 * 1000; // 24h
6
+ function getConfig(override) {
7
+ return {
8
+ region: override?.region ?? process.env.UPLOADBOX_AWS_REGION ?? "us-east-1",
9
+ bucket: override?.bucket ?? process.env.UPLOADBOX_S3_BUCKET ?? "",
10
+ accessKeyId: override?.accessKeyId ?? process.env.UPLOADBOX_AWS_ACCESS_KEY_ID ?? "",
11
+ secretAccessKey: override?.secretAccessKey ?? process.env.UPLOADBOX_AWS_SECRET_ACCESS_KEY ?? "",
12
+ cdnBaseUrl: override?.cdnBaseUrl ?? process.env.UPLOADBOX_CDN_BASE_URL,
13
+ endpoint: override?.endpoint ?? process.env.UPLOADBOX_S3_ENDPOINT,
14
+ forcePathStyle: override?.forcePathStyle ?? process.env.UPLOADBOX_S3_FORCE_PATH_STYLE === "true",
15
+ presignedUrlExpiry: override?.presignedUrlExpiry ?? (process.env.UPLOADBOX_PRESIGNED_URL_EXPIRY
16
+ ? parseInt(process.env.UPLOADBOX_PRESIGNED_URL_EXPIRY, 10)
17
+ : undefined),
18
+ };
19
+ }
20
+ function buildFileUrl(config, key) {
21
+ if (config.cdnBaseUrl) {
22
+ return `${config.cdnBaseUrl.replace(/\/$/, "")}/${key}`;
23
+ }
24
+ if (config.endpoint) {
25
+ const base = config.endpoint.replace(/\/$/, "");
26
+ if (config.forcePathStyle) {
27
+ return `${base}/${config.bucket}/${key}`;
28
+ }
29
+ return `${base}/${key}`;
30
+ }
31
+ return `https://${config.bucket}.s3.${config.region ?? "us-east-1"}.amazonaws.com/${key}`;
32
+ }
33
+ export function createRouteHandler(opts) {
34
+ let _config;
35
+ let _s3Client;
36
+ let _rateLimiter;
37
+ const hooks = opts.hooks ?? {};
38
+ // In-memory store for pending uploads (between presign and complete)
39
+ const pendingUploads = new Map();
40
+ function cleanupExpiredPendingUploads() {
41
+ const now = Date.now();
42
+ for (const [key, pending] of pendingUploads.entries()) {
43
+ if (now - pending.createdAtMs > PENDING_UPLOAD_MAX_AGE_MS) {
44
+ pendingUploads.delete(key);
45
+ }
46
+ }
47
+ }
48
+ function getConfigLazy() {
49
+ if (!_config)
50
+ _config = getConfig(opts.config);
51
+ return _config;
52
+ }
53
+ function getS3() {
54
+ if (!_s3Client)
55
+ _s3Client = createS3Client(getConfigLazy());
56
+ return _s3Client;
57
+ }
58
+ function makeS3(config) {
59
+ if (opts.s3ClientFactory) {
60
+ return opts.s3ClientFactory(config);
61
+ }
62
+ return createS3Client(config);
63
+ }
64
+ function getRateLimiter() {
65
+ if (!_rateLimiter && opts.rateLimit)
66
+ _rateLimiter = new RateLimiter(opts.rateLimit);
67
+ return _rateLimiter;
68
+ }
69
+ /** Resolve per-request S3 config + client via hook, or fall back to global defaults. */
70
+ async function resolveRequestConfig(auth) {
71
+ if (hooks.onResolveConfig) {
72
+ const resolved = await hooks.onResolveConfig(auth);
73
+ if (resolved) {
74
+ return {
75
+ config: resolved.config,
76
+ s3Client: makeS3(resolved.config),
77
+ keyPrefix: resolved.keyPrefix ?? "",
78
+ };
79
+ }
80
+ }
81
+ return { config: getConfigLazy(), s3Client: getS3(), keyPrefix: "" };
82
+ }
83
+ async function GET() {
84
+ cleanupExpiredPendingUploads();
85
+ const routerConfig = extractRouterConfig(opts.router);
86
+ return Response.json(routerConfig);
87
+ }
88
+ async function POST(request) {
89
+ try {
90
+ cleanupExpiredPendingUploads();
91
+ const body = (await request.json());
92
+ const { action } = body;
93
+ // Authentication via lifecycle hook
94
+ let auth;
95
+ if (hooks.onAuthenticate) {
96
+ auth = await hooks.onAuthenticate(request);
97
+ }
98
+ // Request-level rate limiting
99
+ const rateLimiter = getRateLimiter();
100
+ if (rateLimiter) {
101
+ const clientIp = request.headers.get("x-forwarded-for")?.split(",")[0]?.trim() ?? "unknown";
102
+ const key = auth?.apiKeyId ?? clientIp;
103
+ const result = rateLimiter.check(key, "request");
104
+ if (!result.allowed) {
105
+ throw UploadboxError.rateLimited(result.retryAfter);
106
+ }
107
+ }
108
+ switch (action) {
109
+ case "upload":
110
+ return handleUpload(request, body, auth);
111
+ case "complete":
112
+ return handleComplete(body, auth);
113
+ case "create-multipart":
114
+ return handleCreateMultipart(request, body, auth);
115
+ case "presign-parts":
116
+ return handlePresignParts(body);
117
+ case "complete-multipart":
118
+ return handleCompleteMultipart(body, auth);
119
+ case "abort-multipart":
120
+ return handleAbortMultipart(body);
121
+ default:
122
+ return Response.json({ error: "INVALID_ACTION", message: `Unknown action: ${action}` }, { status: 400 });
123
+ }
124
+ }
125
+ catch (err) {
126
+ if (err instanceof UploadboxError) {
127
+ return Response.json(err.toJSON(), { status: err.statusCode });
128
+ }
129
+ console.error("[uploadbox] Unexpected error:", err);
130
+ return Response.json({ error: "INTERNAL_ERROR", message: "An unexpected error occurred" }, { status: 500 });
131
+ }
132
+ }
133
+ async function handleUpload(request, body, auth) {
134
+ const { routeKey, files: fileInfos } = body;
135
+ const route = opts.router[routeKey];
136
+ if (!route) {
137
+ throw UploadboxError.routeNotFound(routeKey);
138
+ }
139
+ validateFiles(fileInfos, route._config);
140
+ // Upload-specific rate limiting
141
+ const rateLimiter = getRateLimiter();
142
+ if (rateLimiter) {
143
+ const clientIp = request.headers.get("x-forwarded-for")?.split(",")[0]?.trim() ?? "unknown";
144
+ const key = auth?.apiKeyId ?? clientIp;
145
+ const result = rateLimiter.check(key, "upload");
146
+ if (!result.allowed) {
147
+ throw UploadboxError.rateLimited(result.retryAfter);
148
+ }
149
+ }
150
+ // Quota check via lifecycle hook
151
+ if (hooks.onQuotaCheck) {
152
+ const totalSize = fileInfos.reduce((sum, f) => sum + f.size, 0);
153
+ await hooks.onQuotaCheck({ auth, totalSize, fileCount: fileInfos.length });
154
+ }
155
+ const metadata = await route._middleware({ req: request, files: fileInfos, auth });
156
+ const { config, s3Client, keyPrefix } = await resolveRequestConfig(auth);
157
+ const results = [];
158
+ const startedEvents = [];
159
+ for (const file of fileInfos) {
160
+ const key = keyPrefix + generateFileKey(file.name);
161
+ const uploadId = generateUploadId();
162
+ const detectedFileType = getFileTypeFromMime(file.type);
163
+ const routeTypeConfig = detectedFileType ? route._config[detectedFileType] : undefined;
164
+ const expiresIn = routeTypeConfig?.presignedUrlExpiry ?? config.presignedUrlExpiry ?? 3600;
165
+ const presignedUrl = await generatePresignedPutUrl(s3Client, config.bucket, key, file.type, file.size, expiresIn);
166
+ const ttl = file.ttlSeconds ?? routeTypeConfig?.defaultTtlSeconds;
167
+ const maxTtl = routeTypeConfig?.maxTtlSeconds;
168
+ const effectiveTtl = ttl != null && maxTtl != null ? Math.min(ttl, maxTtl) : ttl;
169
+ const expiresAt = effectiveTtl != null ? new Date(Date.now() + effectiveTtl * 1000) : null;
170
+ const url = buildFileUrl(config, key);
171
+ // Store in memory for the complete step
172
+ pendingUploads.set(key, {
173
+ key,
174
+ name: file.name,
175
+ size: file.size,
176
+ type: file.type,
177
+ url,
178
+ acl: routeTypeConfig?.acl ?? "public-read",
179
+ routeKey,
180
+ metadata: metadata,
181
+ customMetadata: file.customMetadata ?? null,
182
+ auth,
183
+ resolvedConfig: config,
184
+ createdAtMs: Date.now(),
185
+ });
186
+ startedEvents.push({
187
+ key,
188
+ name: file.name,
189
+ size: file.size,
190
+ type: file.type,
191
+ url,
192
+ routeKey,
193
+ auth,
194
+ metadata: metadata,
195
+ customMetadata: file.customMetadata ?? null,
196
+ expiresAt,
197
+ });
198
+ results.push({
199
+ uploadId,
200
+ key,
201
+ url: presignedUrl,
202
+ name: file.name,
203
+ size: file.size,
204
+ type: file.type,
205
+ });
206
+ }
207
+ // Fire-and-forget: notify lifecycle hook
208
+ if (hooks.onUploadStarted && startedEvents.length > 0) {
209
+ hooks.onUploadStarted(startedEvents).catch(console.error);
210
+ }
211
+ return Response.json(results);
212
+ }
213
+ async function handleComplete(body, auth) {
214
+ const { routeKey, keys } = body;
215
+ const route = opts.router[routeKey];
216
+ if (!route) {
217
+ throw UploadboxError.routeNotFound(routeKey);
218
+ }
219
+ // Resolve config for this request (used as default for keys without stored config)
220
+ const requestCtx = await resolveRequestConfig(auth);
221
+ const results = await Promise.all(keys.map(async (key) => {
222
+ // Get file data from in-memory store, or fall back to lifecycle hook
223
+ let pending = pendingUploads.get(key);
224
+ if (!pending && hooks.onFileVerified) {
225
+ const verified = await hooks.onFileVerified(key);
226
+ if (verified) {
227
+ pending = {
228
+ key: verified.key,
229
+ name: verified.name,
230
+ size: verified.size,
231
+ type: verified.type,
232
+ url: verified.url,
233
+ acl: verified.acl,
234
+ routeKey,
235
+ metadata: verified.metadata,
236
+ customMetadata: verified.customMetadata ?? null,
237
+ auth,
238
+ createdAtMs: Date.now(),
239
+ };
240
+ }
241
+ }
242
+ // Use stored config from pending upload if available, otherwise request-level
243
+ const config = pending?.resolvedConfig ?? requestCtx.config;
244
+ const s3Client = pending?.resolvedConfig ? makeS3(pending.resolvedConfig) : requestCtx.s3Client;
245
+ const s3Head = await headObject(s3Client, config.bucket, key);
246
+ if (!s3Head) {
247
+ // Notify failure
248
+ if (hooks.onUploadFailed) {
249
+ hooks.onUploadFailed({ key, routeKey, auth }).catch(console.error);
250
+ }
251
+ throw UploadboxError.uploadFailed(`File not found in storage: ${key}`);
252
+ }
253
+ if (!pending) {
254
+ throw UploadboxError.uploadFailed(`File record not found: ${key}`);
255
+ }
256
+ // Clean up in-memory state
257
+ pendingUploads.delete(key);
258
+ const fileData = {
259
+ key: pending.key,
260
+ name: pending.name,
261
+ size: pending.size,
262
+ type: pending.type,
263
+ url: pending.url,
264
+ acl: pending.acl,
265
+ customMetadata: pending.customMetadata ?? undefined,
266
+ };
267
+ const serverData = await route._onUploadComplete({
268
+ metadata: pending.metadata,
269
+ file: fileData,
270
+ });
271
+ // Fire-and-forget: notify lifecycle hook
272
+ if (hooks.onUploadCompleted) {
273
+ hooks.onUploadCompleted({
274
+ file: fileData,
275
+ routeKey,
276
+ auth: pending.auth,
277
+ metadata: pending.metadata,
278
+ serverData,
279
+ }).catch(console.error);
280
+ }
281
+ // Run processing pipeline (non-blocking)
282
+ if (opts.processing) {
283
+ runProcessingPipeline(opts.processing, fileData, pending.metadata, s3Client, config).catch((err) => {
284
+ console.error("[uploadbox] Processing pipeline error:", err);
285
+ });
286
+ }
287
+ return { file: fileData, serverData };
288
+ }));
289
+ return Response.json(results);
290
+ }
291
+ async function handleCreateMultipart(request, body, auth) {
292
+ const { routeKey, file: fileInfo } = body;
293
+ const route = opts.router[routeKey];
294
+ if (!route) {
295
+ throw UploadboxError.routeNotFound(routeKey);
296
+ }
297
+ validateFiles([fileInfo], route._config);
298
+ const metadata = await route._middleware({ req: request, files: [fileInfo], auth });
299
+ const { config, s3Client, keyPrefix } = await resolveRequestConfig(auth);
300
+ const key = keyPrefix + generateFileKey(fileInfo.name);
301
+ const partSize = DEFAULT_PART_SIZE;
302
+ const totalParts = Math.ceil(fileInfo.size / partSize);
303
+ // Create S3 multipart upload
304
+ const s3UploadId = await createMultipartUpload(s3Client, config.bucket, key, fileInfo.type);
305
+ // Generate presigned URLs for all parts
306
+ const partNumbers = Array.from({ length: totalParts }, (_, i) => i + 1);
307
+ const detectedFileType = getFileTypeFromMime(fileInfo.type);
308
+ const routeTypeConfig = detectedFileType ? route._config[detectedFileType] : undefined;
309
+ const expiresIn = routeTypeConfig?.presignedUrlExpiry ?? config.presignedUrlExpiry ?? 3600;
310
+ const parts = await generatePresignedPartUrls(s3Client, config.bucket, key, s3UploadId, partNumbers, expiresIn);
311
+ // Compute TTL/expiresAt
312
+ const ttl = fileInfo.ttlSeconds ?? routeTypeConfig?.defaultTtlSeconds;
313
+ const maxTtl = routeTypeConfig?.maxTtlSeconds;
314
+ const effectiveTtl = ttl != null && maxTtl != null ? Math.min(ttl, maxTtl) : ttl;
315
+ const fileExpiresAt = effectiveTtl != null ? new Date(Date.now() + effectiveTtl * 1000) : null;
316
+ const url = buildFileUrl(config, key);
317
+ // Store in memory for the complete step
318
+ pendingUploads.set(key, {
319
+ key,
320
+ name: fileInfo.name,
321
+ size: fileInfo.size,
322
+ type: fileInfo.type,
323
+ url,
324
+ acl: routeTypeConfig?.acl ?? "public-read",
325
+ routeKey,
326
+ metadata: metadata,
327
+ customMetadata: fileInfo.customMetadata ?? null,
328
+ auth,
329
+ resolvedConfig: config,
330
+ createdAtMs: Date.now(),
331
+ });
332
+ // Fire-and-forget: notify lifecycle hook
333
+ if (hooks.onMultipartStarted) {
334
+ hooks.onMultipartStarted({
335
+ key,
336
+ name: fileInfo.name,
337
+ size: fileInfo.size,
338
+ type: fileInfo.type,
339
+ url,
340
+ routeKey,
341
+ s3UploadId,
342
+ bucket: config.bucket,
343
+ totalParts,
344
+ partSize,
345
+ auth,
346
+ metadata: metadata,
347
+ customMetadata: fileInfo.customMetadata ?? null,
348
+ expiresAt: fileExpiresAt,
349
+ uploadExpiresAt: new Date(Date.now() + expiresIn * 1000),
350
+ }).catch(console.error);
351
+ }
352
+ return Response.json({
353
+ fileKey: key,
354
+ uploadId: s3UploadId,
355
+ parts,
356
+ partSize,
357
+ totalParts,
358
+ });
359
+ }
360
+ async function handlePresignParts(body) {
361
+ const { fileKey, uploadId, partNumbers } = body;
362
+ // Use stored config from pending upload if available
363
+ const pending = pendingUploads.get(fileKey);
364
+ const config = pending?.resolvedConfig ?? getConfigLazy();
365
+ const s3Client = pending?.resolvedConfig ? makeS3(pending.resolvedConfig) : getS3();
366
+ const expiresIn = config.presignedUrlExpiry ?? 3600;
367
+ const parts = await generatePresignedPartUrls(s3Client, config.bucket, fileKey, uploadId, partNumbers, expiresIn);
368
+ return Response.json({ parts });
369
+ }
370
+ async function handleCompleteMultipart(body, auth) {
371
+ const { routeKey, fileKey, uploadId, parts } = body;
372
+ const route = opts.router[routeKey];
373
+ if (!route) {
374
+ throw UploadboxError.routeNotFound(routeKey);
375
+ }
376
+ // Get file data from in-memory store, or fall back to lifecycle hook
377
+ let pending = pendingUploads.get(fileKey);
378
+ if (!pending && hooks.onFileVerified) {
379
+ const verified = await hooks.onFileVerified(fileKey);
380
+ if (verified) {
381
+ pending = {
382
+ key: verified.key,
383
+ name: verified.name,
384
+ size: verified.size,
385
+ type: verified.type,
386
+ url: verified.url,
387
+ acl: verified.acl,
388
+ routeKey,
389
+ metadata: verified.metadata,
390
+ customMetadata: verified.customMetadata ?? null,
391
+ auth,
392
+ createdAtMs: Date.now(),
393
+ };
394
+ }
395
+ }
396
+ // Use stored config or resolve fresh
397
+ const config = pending?.resolvedConfig ?? (await resolveRequestConfig(auth)).config;
398
+ const s3Client = makeS3(config);
399
+ // Complete S3 multipart upload
400
+ await completeMultipartUpload(s3Client, config.bucket, fileKey, uploadId, parts);
401
+ // Verify with headObject
402
+ const s3Head = await headObject(s3Client, config.bucket, fileKey);
403
+ if (!s3Head) {
404
+ throw UploadboxError.uploadFailed(`File not found after multipart complete: ${fileKey}`);
405
+ }
406
+ if (!pending) {
407
+ throw UploadboxError.uploadFailed(`File record not found: ${fileKey}`);
408
+ }
409
+ // Clean up in-memory state
410
+ pendingUploads.delete(fileKey);
411
+ // Fire-and-forget: notify multipart completed
412
+ if (hooks.onMultipartCompleted) {
413
+ hooks.onMultipartCompleted({
414
+ key: fileKey,
415
+ s3UploadId: uploadId,
416
+ routeKey,
417
+ auth: pending.auth,
418
+ parts,
419
+ }).catch(console.error);
420
+ }
421
+ const fileData = {
422
+ key: pending.key,
423
+ name: pending.name,
424
+ size: pending.size,
425
+ type: pending.type,
426
+ url: pending.url,
427
+ acl: pending.acl,
428
+ customMetadata: pending.customMetadata ?? undefined,
429
+ };
430
+ const serverData = await route._onUploadComplete({
431
+ metadata: pending.metadata,
432
+ file: fileData,
433
+ });
434
+ // Fire-and-forget: notify lifecycle hook
435
+ if (hooks.onUploadCompleted) {
436
+ hooks.onUploadCompleted({
437
+ file: fileData,
438
+ routeKey,
439
+ auth: pending.auth,
440
+ metadata: pending.metadata,
441
+ serverData,
442
+ }).catch(console.error);
443
+ }
444
+ // Run processing pipeline (non-blocking)
445
+ if (opts.processing) {
446
+ runProcessingPipeline(opts.processing, fileData, pending.metadata, s3Client, config).catch((err) => {
447
+ console.error("[uploadbox] Processing pipeline error:", err);
448
+ });
449
+ }
450
+ return Response.json({ file: fileData, serverData });
451
+ }
452
+ async function handleAbortMultipart(body) {
453
+ const { fileKey, uploadId } = body;
454
+ // Use stored config from pending upload if available
455
+ const pending = pendingUploads.get(fileKey);
456
+ const config = pending?.resolvedConfig ?? getConfigLazy();
457
+ const s3Client = pending?.resolvedConfig ? makeS3(pending.resolvedConfig) : getS3();
458
+ await abortMultipartUpload(s3Client, config.bucket, fileKey, uploadId);
459
+ // Clean up in-memory state
460
+ pendingUploads.delete(fileKey);
461
+ // Fire-and-forget: notify lifecycle hook
462
+ if (hooks.onMultipartAborted) {
463
+ hooks.onMultipartAborted({ key: fileKey, s3UploadId: uploadId }).catch(console.error);
464
+ }
465
+ return Response.json({ success: true });
466
+ }
467
+ return { GET, POST };
468
+ }
469
+ //# sourceMappingURL=create-route-handler.js.map