@edge-base/server 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/admin-build/.gitkeep +0 -0
- package/admin-build/_app/env.js +1 -0
- package/admin-build/_app/immutable/assets/0.Bm6cF078.css +1 -0
- package/admin-build/_app/immutable/assets/1.BfW3pUNa.css +1 -0
- package/admin-build/_app/immutable/assets/11.CVmQOewb.css +1 -0
- package/admin-build/_app/immutable/assets/12.B1EhbRZT.css +1 -0
- package/admin-build/_app/immutable/assets/13.BvwYeuwE.css +1 -0
- package/admin-build/_app/immutable/assets/14.CdVfcO0R.css +1 -0
- package/admin-build/_app/immutable/assets/15.2yeZ66b-.css +1 -0
- package/admin-build/_app/immutable/assets/17.BVg0JEVu.css +1 -0
- package/admin-build/_app/immutable/assets/18.Rwnl3x_i.css +1 -0
- package/admin-build/_app/immutable/assets/20.DsPWA9AV.css +1 -0
- package/admin-build/_app/immutable/assets/21.Dz2RJ56c.css +1 -0
- package/admin-build/_app/immutable/assets/22.DwNLk5Ai.css +1 -0
- package/admin-build/_app/immutable/assets/23.CFpu0gOO.css +1 -0
- package/admin-build/_app/immutable/assets/24.Cy5LBeoJ.css +1 -0
- package/admin-build/_app/immutable/assets/25.pUyLVf-h.css +1 -0
- package/admin-build/_app/immutable/assets/26.DBcGrlXa.css +1 -0
- package/admin-build/_app/immutable/assets/27.BswYyAJD.css +1 -0
- package/admin-build/_app/immutable/assets/28.B4ueB1Kf.css +1 -0
- package/admin-build/_app/immutable/assets/29.B-qU6PdF.css +1 -0
- package/admin-build/_app/immutable/assets/3.Dg81Pgmd.css +1 -0
- package/admin-build/_app/immutable/assets/30.CsdWum94.css +1 -0
- package/admin-build/_app/immutable/assets/31.U6OwIp50.css +1 -0
- package/admin-build/_app/immutable/assets/4.CyawCCux.css +1 -0
- package/admin-build/_app/immutable/assets/5.C0YO2HTk.css +1 -0
- package/admin-build/_app/immutable/assets/8.Br5jd6kD.css +1 -0
- package/admin-build/_app/immutable/assets/Badge.EMYLHBxE.css +1 -0
- package/admin-build/_app/immutable/assets/Button.DpzMRTjK.css +1 -0
- package/admin-build/_app/immutable/assets/ConfirmDialog.DAnaWRRk.css +1 -0
- package/admin-build/_app/immutable/assets/EmptyState.CwKsu57Y.css +1 -0
- package/admin-build/_app/immutable/assets/Input.BDUSenmU.css +1 -0
- package/admin-build/_app/immutable/assets/Modal.Dm5B0Xie.css +1 -0
- package/admin-build/_app/immutable/assets/PageShell.CmU-Xh-b.css +1 -0
- package/admin-build/_app/immutable/assets/SchemaFieldEditor.g4NsCdno.css +1 -0
- package/admin-build/_app/immutable/assets/Select.BW4Keufm.css +1 -0
- package/admin-build/_app/immutable/assets/Skeleton.KWUulTKJ.css +1 -0
- package/admin-build/_app/immutable/assets/Tabs.CniGYb67.css +1 -0
- package/admin-build/_app/immutable/assets/TimeChart.BTCDAvmT.css +1 -0
- package/admin-build/_app/immutable/assets/Toggle.Cy_K12OM.css +1 -0
- package/admin-build/_app/immutable/assets/TopList.ClFzmPlA.css +1 -0
- package/admin-build/_app/immutable/chunks/7B47DvSx.js +1 -0
- package/admin-build/_app/immutable/chunks/7f08Id8e.js +1 -0
- package/admin-build/_app/immutable/chunks/8wJeQ7LN.js +1 -0
- package/admin-build/_app/immutable/chunks/B-h2afW5.js +1 -0
- package/admin-build/_app/immutable/chunks/B8vJP3wz.js +1 -0
- package/admin-build/_app/immutable/chunks/BR_fL5Yv.js +1 -0
- package/admin-build/_app/immutable/chunks/BY92tFS2.js +1 -0
- package/admin-build/_app/immutable/chunks/BcR-Rdj9.js +1 -0
- package/admin-build/_app/immutable/chunks/BdrwyZv8.js +1 -0
- package/admin-build/_app/immutable/chunks/Bh56EfQ_.js +1 -0
- package/admin-build/_app/immutable/chunks/BkrCkgYp.js +1 -0
- package/admin-build/_app/immutable/chunks/BmRjiP5k.js +1 -0
- package/admin-build/_app/immutable/chunks/BsokvhWC.js +1 -0
- package/admin-build/_app/immutable/chunks/C4D51vTW.js +1 -0
- package/admin-build/_app/immutable/chunks/C6puvcoR.js +2 -0
- package/admin-build/_app/immutable/chunks/CCKNu7m7.js +1 -0
- package/admin-build/_app/immutable/chunks/CWj6FrbW.js +1 -0
- package/admin-build/_app/immutable/chunks/Ce-ngf4p.js +5 -0
- package/admin-build/_app/immutable/chunks/Cs0GwzJA.js +1 -0
- package/admin-build/_app/immutable/chunks/CwROoZK0.js +1 -0
- package/admin-build/_app/immutable/chunks/CxCPv_Ut.js +1 -0
- package/admin-build/_app/immutable/chunks/CxbRue-5.js +1 -0
- package/admin-build/_app/immutable/chunks/CyqB6g-D.js +1 -0
- package/admin-build/_app/immutable/chunks/D5h5A1cc.js +2 -0
- package/admin-build/_app/immutable/chunks/DnyL7Zq-.js +1 -0
- package/admin-build/_app/immutable/chunks/DoPXzH7F.js +1 -0
- package/admin-build/_app/immutable/chunks/DrQSgw-f.js +1 -0
- package/admin-build/_app/immutable/chunks/DttM2zNO.js +1 -0
- package/admin-build/_app/immutable/chunks/DuXuUBWN.js +1 -0
- package/admin-build/_app/immutable/chunks/MdeqaOQx.js +10 -0
- package/admin-build/_app/immutable/chunks/NuUjtcO2.js +1 -0
- package/admin-build/_app/immutable/chunks/Q2nPFxS6.js +1 -0
- package/admin-build/_app/immutable/chunks/R6arueIl.js +1 -0
- package/admin-build/_app/immutable/chunks/UUazaC_N.js +1 -0
- package/admin-build/_app/immutable/chunks/cOYbrQxx.js +1 -0
- package/admin-build/_app/immutable/chunks/eFQHTGwA.js +1 -0
- package/admin-build/_app/immutable/chunks/ehbppgYb.js +1 -0
- package/admin-build/_app/immutable/chunks/glwixJlP.js +1 -0
- package/admin-build/_app/immutable/chunks/vApWTCBs.js +1 -0
- package/admin-build/_app/immutable/chunks/w89G9Xpi.js +1 -0
- package/admin-build/_app/immutable/chunks/wJsUhbfZ.js +1 -0
- package/admin-build/_app/immutable/chunks/zfauFM8P.js +1 -0
- package/admin-build/_app/immutable/entry/app.CcO-Uos3.js +2 -0
- package/admin-build/_app/immutable/entry/start.COebYq3I.js +1 -0
- package/admin-build/_app/immutable/nodes/0.CjtHKU-6.js +1 -0
- package/admin-build/_app/immutable/nodes/1.DEisjlM0.js +1 -0
- package/admin-build/_app/immutable/nodes/10.CvhdyWVB.js +1 -0
- package/admin-build/_app/immutable/nodes/11.DjHqcOvy.js +1 -0
- package/admin-build/_app/immutable/nodes/12.mQLz4Mj_.js +1 -0
- package/admin-build/_app/immutable/nodes/13.CBonZZyP.js +110 -0
- package/admin-build/_app/immutable/nodes/14.d-oiZL0j.js +3 -0
- package/admin-build/_app/immutable/nodes/15.CKPQsUYF.js +1 -0
- package/admin-build/_app/immutable/nodes/16.wPzAPQGx.js +1 -0
- package/admin-build/_app/immutable/nodes/17.DayhKyEZ.js +1 -0
- package/admin-build/_app/immutable/nodes/18.DKwS0Ir0.js +1 -0
- package/admin-build/_app/immutable/nodes/19.wPzAPQGx.js +1 -0
- package/admin-build/_app/immutable/nodes/2.BKoKrw1i.js +1 -0
- package/admin-build/_app/immutable/nodes/20.BvIkkkrW.js +1 -0
- package/admin-build/_app/immutable/nodes/21.DMaFhdHk.js +128 -0
- package/admin-build/_app/immutable/nodes/22.3xdgwuK1.js +1 -0
- package/admin-build/_app/immutable/nodes/23.8Bvgjbsl.js +112 -0
- package/admin-build/_app/immutable/nodes/24.DzSSzRhG.js +2 -0
- package/admin-build/_app/immutable/nodes/25.9KKYBnAE.js +2 -0
- package/admin-build/_app/immutable/nodes/26.Bhn9dfhY.js +1 -0
- package/admin-build/_app/immutable/nodes/27.kRLiC24G.js +1 -0
- package/admin-build/_app/immutable/nodes/28.BVIN1-7N.js +1 -0
- package/admin-build/_app/immutable/nodes/29.3yabZWj4.js +1 -0
- package/admin-build/_app/immutable/nodes/3.BFtSOkX7.js +2 -0
- package/admin-build/_app/immutable/nodes/30.CyCQlwaP.js +1 -0
- package/admin-build/_app/immutable/nodes/31.C4LDXjES.js +1 -0
- package/admin-build/_app/immutable/nodes/4.CvbiMlCa.js +1 -0
- package/admin-build/_app/immutable/nodes/5.C6BLv2eM.js +1 -0
- package/admin-build/_app/immutable/nodes/6.BcXvfl2P.js +1 -0
- package/admin-build/_app/immutable/nodes/7.CIuqhPiK.js +1 -0
- package/admin-build/_app/immutable/nodes/8.BQOR_JfO.js +1 -0
- package/admin-build/_app/immutable/nodes/9.NZqXQxPy.js +1 -0
- package/admin-build/_app/version.json +1 -0
- package/admin-build/favicon.svg +26 -0
- package/admin-build/index.html +45 -0
- package/openapi.json +19543 -0
- package/package.json +66 -0
- package/src/__tests__/admin-assets.test.ts +55 -0
- package/src/__tests__/admin-data-routes.test.ts +488 -0
- package/src/__tests__/admin-db-target.test.ts +103 -0
- package/src/__tests__/admin-routing.test.ts +31 -0
- package/src/__tests__/admin-user-management.test.ts +311 -0
- package/src/__tests__/analytics-query.test.ts +75 -0
- package/src/__tests__/auth-d1.test.ts +749 -0
- package/src/__tests__/auth-db-adapter.test.ts +73 -0
- package/src/__tests__/auth-jwt.test.ts +440 -0
- package/src/__tests__/auth-oauth.test.ts +389 -0
- package/src/__tests__/auth-password.test.ts +367 -0
- package/src/__tests__/auth-redirect.test.ts +87 -0
- package/src/__tests__/backup-restore.test.ts +711 -0
- package/src/__tests__/broadcast.test.ts +128 -0
- package/src/__tests__/cli.test.ts +178 -0
- package/src/__tests__/cloudflare-realtime.test.ts +113 -0
- package/src/__tests__/config.test.ts +469 -0
- package/src/__tests__/cors.test.ts +154 -0
- package/src/__tests__/cron.test.ts +302 -0
- package/src/__tests__/d1-handler.test.ts +402 -0
- package/src/__tests__/d1-sql.test.ts +120 -0
- package/src/__tests__/database-live-config.test.ts +42 -0
- package/src/__tests__/database-live-emitter.test.ts +56 -0
- package/src/__tests__/database-live-filters.test.ts +63 -0
- package/src/__tests__/database-live-route.test.ts +113 -0
- package/src/__tests__/db-sql.test.ts +163 -0
- package/src/__tests__/do-lifecycle.test.ts +263 -0
- package/src/__tests__/do-router.test.ts +729 -0
- package/src/__tests__/email-provider.test.ts +128 -0
- package/src/__tests__/email-templates.test.ts +528 -0
- package/src/__tests__/error-format.test.ts +250 -0
- package/src/__tests__/field-ops.test.ts +242 -0
- package/src/__tests__/functions-context.test.ts +334 -0
- package/src/__tests__/functions-d1-proxy.test.ts +229 -0
- package/src/__tests__/functions-registry-runtime-config.test.ts +17 -0
- package/src/__tests__/functions-route.test.ts +139 -0
- package/src/__tests__/internal-request.test.ts +77 -0
- package/src/__tests__/log-writer.test.ts +44 -0
- package/src/__tests__/logger.test.ts +58 -0
- package/src/__tests__/meta-admin-proxy.test.ts +48 -0
- package/src/__tests__/meta-export-coverage.test.ts +191 -0
- package/src/__tests__/meta-route-registration.test.ts +47 -0
- package/src/__tests__/namespace-dump.test.ts +28 -0
- package/src/__tests__/oauth-providers.test.ts +337 -0
- package/src/__tests__/openapi-coverage.test.ts +144 -0
- package/src/__tests__/pagination.test.ts +59 -0
- package/src/__tests__/password-policy.test.ts +191 -0
- package/src/__tests__/plugin-migrations.test.ts +379 -0
- package/src/__tests__/postgres-batch-compat.test.ts +133 -0
- package/src/__tests__/postgres-dialect.test.ts +328 -0
- package/src/__tests__/postgres-executor.test.ts +79 -0
- package/src/__tests__/postgres-field-ops-compat.test.ts +222 -0
- package/src/__tests__/postgres-schema-init.test.ts +105 -0
- package/src/__tests__/postgres-table-utils.test.ts +107 -0
- package/src/__tests__/presence.test.ts +199 -0
- package/src/__tests__/provider.test.ts +550 -0
- package/src/__tests__/public-user-profile.test.ts +339 -0
- package/src/__tests__/push-handlers.test.ts +179 -0
- package/src/__tests__/push-provider.test.ts +80 -0
- package/src/__tests__/push-token.test.ts +418 -0
- package/src/__tests__/query.test.ts +771 -0
- package/src/__tests__/rate-limit.test.ts +260 -0
- package/src/__tests__/room-access-policy.test.ts +101 -0
- package/src/__tests__/room-handler-context.test.ts +130 -0
- package/src/__tests__/room-monitoring.test.ts +138 -0
- package/src/__tests__/room-runtime-routing.test.ts +222 -0
- package/src/__tests__/room.test.ts +254 -0
- package/src/__tests__/route-parser.test.ts +490 -0
- package/src/__tests__/rules.test.ts +234 -0
- package/src/__tests__/runtime-surface-accounting.test.ts +120 -0
- package/src/__tests__/scheduled.test.ts +80 -0
- package/src/__tests__/schema.test.ts +1273 -0
- package/src/__tests__/security-hardening.test.ts +312 -0
- package/src/__tests__/server.unit.test.ts +333 -0
- package/src/__tests__/service-key-db-proxy.test.ts +650 -0
- package/src/__tests__/service-key-provider-bypass.test.ts +138 -0
- package/src/__tests__/service-key.test.ts +757 -0
- package/src/__tests__/smoke-skip-report.test.ts +72 -0
- package/src/__tests__/sms-provider.test.ts +39 -0
- package/src/__tests__/sql-route.test.ts +218 -0
- package/src/__tests__/storage-hook-context.test.ts +115 -0
- package/src/__tests__/totp.test.ts +200 -0
- package/src/__tests__/uuid.test.ts +144 -0
- package/src/__tests__/validation.test.ts +773 -0
- package/src/__tests__/websocket-pending.test.ts +163 -0
- package/src/_functions-registry.ts +51 -0
- package/src/bench-entry.ts +9 -0
- package/src/cloudflare-test.d.ts +1 -0
- package/src/durable-objects/auth-do.ts +49 -0
- package/src/durable-objects/database-do.ts +2240 -0
- package/src/durable-objects/database-live-do.ts +949 -0
- package/src/durable-objects/logs-do.ts +1200 -0
- package/src/durable-objects/room-runtime-base.ts +1604 -0
- package/src/durable-objects/rooms-do.ts +2191 -0
- package/src/generated-config.ts +6 -0
- package/src/index.ts +382 -0
- package/src/lib/admin-assets.ts +54 -0
- package/src/lib/admin-db-target.ts +301 -0
- package/src/lib/admin-routing.ts +35 -0
- package/src/lib/admin-user-management.ts +464 -0
- package/src/lib/analytics-adapter.ts +103 -0
- package/src/lib/analytics-query.ts +579 -0
- package/src/lib/auth-d1-service.ts +1193 -0
- package/src/lib/auth-d1.ts +1056 -0
- package/src/lib/auth-db-adapter.ts +289 -0
- package/src/lib/auth-redirect.ts +116 -0
- package/src/lib/cidr.ts +115 -0
- package/src/lib/client-ip.ts +51 -0
- package/src/lib/cloudflare-realtime.ts +251 -0
- package/src/lib/control-db.ts +36 -0
- package/src/lib/cron.ts +163 -0
- package/src/lib/d1-handler.ts +1425 -0
- package/src/lib/d1-schema-init.ts +255 -0
- package/src/lib/d1-sql.ts +33 -0
- package/src/lib/database-live-config.ts +24 -0
- package/src/lib/database-live-emitter.ts +111 -0
- package/src/lib/db-sql.ts +66 -0
- package/src/lib/do-retry.ts +36 -0
- package/src/lib/do-router.ts +270 -0
- package/src/lib/do-sql.ts +73 -0
- package/src/lib/email-provider.ts +379 -0
- package/src/lib/email-templates.ts +285 -0
- package/src/lib/email-translations.ts +422 -0
- package/src/lib/errors.ts +151 -0
- package/src/lib/functions.ts +2091 -0
- package/src/lib/hono.ts +56 -0
- package/src/lib/internal-request.ts +56 -0
- package/src/lib/jwt.ts +354 -0
- package/src/lib/log-writer.ts +272 -0
- package/src/lib/namespace-dump.ts +125 -0
- package/src/lib/oauth-providers.ts +1225 -0
- package/src/lib/op-parser.ts +99 -0
- package/src/lib/openapi.ts +146 -0
- package/src/lib/pagination.ts +19 -0
- package/src/lib/password-policy.ts +102 -0
- package/src/lib/password.ts +145 -0
- package/src/lib/plugin-migrations.ts +612 -0
- package/src/lib/postgres-executor.ts +203 -0
- package/src/lib/postgres-handler.ts +1102 -0
- package/src/lib/postgres-schema-init.ts +341 -0
- package/src/lib/postgres-table-utils.ts +87 -0
- package/src/lib/public-user-profile.ts +187 -0
- package/src/lib/push-provider.ts +409 -0
- package/src/lib/push-token.ts +294 -0
- package/src/lib/query-engine.ts +768 -0
- package/src/lib/room-monitoring.ts +97 -0
- package/src/lib/room-runtime.ts +14 -0
- package/src/lib/route-parser.ts +434 -0
- package/src/lib/schema.ts +538 -0
- package/src/lib/schemas.ts +152 -0
- package/src/lib/service-key.ts +419 -0
- package/src/lib/sms-provider.ts +230 -0
- package/src/lib/startup-config.ts +99 -0
- package/src/lib/totp.ts +242 -0
- package/src/lib/uuid.ts +87 -0
- package/src/lib/validation.ts +205 -0
- package/src/lib/version.ts +2 -0
- package/src/lib/websocket-pending.ts +40 -0
- package/src/middleware/auth.ts +169 -0
- package/src/middleware/captcha-verify.ts +217 -0
- package/src/middleware/cors.ts +159 -0
- package/src/middleware/error-handler.ts +54 -0
- package/src/middleware/internal-guard.ts +26 -0
- package/src/middleware/logger.ts +126 -0
- package/src/middleware/rate-limit.ts +283 -0
- package/src/middleware/rules.ts +475 -0
- package/src/routes/admin-auth.ts +447 -0
- package/src/routes/admin.ts +3501 -0
- package/src/routes/analytics-api.ts +290 -0
- package/src/routes/auth.ts +4222 -0
- package/src/routes/backup.ts +1466 -0
- package/src/routes/config.ts +53 -0
- package/src/routes/d1.ts +109 -0
- package/src/routes/database-live.ts +281 -0
- package/src/routes/functions.ts +155 -0
- package/src/routes/health.ts +32 -0
- package/src/routes/kv.ts +167 -0
- package/src/routes/oauth.ts +1055 -0
- package/src/routes/push.ts +1465 -0
- package/src/routes/room.ts +639 -0
- package/src/routes/schema-endpoint.ts +76 -0
- package/src/routes/sql.ts +176 -0
- package/src/routes/storage.ts +1674 -0
- package/src/routes/tables.ts +699 -0
- package/src/routes/users.ts +21 -0
- package/src/routes/vectorize.ts +372 -0
- package/src/types.ts +99 -0
|
@@ -0,0 +1,1674 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Storage Routes — R2 File Storage API (M7, M17)
|
|
3
|
+
*
|
|
4
|
+
* Endpoints (GET routes ordered by registration priority):
|
|
5
|
+
* POST /api/storage/{bucket}/upload — File upload
|
|
6
|
+
* GET /api/storage/{bucket}/{key}/metadata — Get metadata
|
|
7
|
+
* PATCH /api/storage/{bucket}/{key}/metadata — Update metadata
|
|
8
|
+
* HEAD /api/storage/{bucket}/{key} — Check file exists
|
|
9
|
+
* GET /api/storage/{bucket}/uploads/{uploadId}/parts — Get uploaded parts (M17 resume)
|
|
10
|
+
* GET /api/storage/{bucket}/{key} — Download file (catch-all — LAST)
|
|
11
|
+
* GET /api/storage/{bucket} — List files
|
|
12
|
+
* DELETE /api/storage/{bucket}/{key} — Delete file
|
|
13
|
+
* POST /api/storage/{bucket}/delete-batch — Batch delete files
|
|
14
|
+
* POST /api/storage/{bucket}/signed-url — Create signed download URL
|
|
15
|
+
* POST /api/storage/{bucket}/signed-urls — Batch create signed download URLs
|
|
16
|
+
* POST /api/storage/{bucket}/signed-upload-url — Create signed upload URL
|
|
17
|
+
* POST /api/storage/{bucket}/multipart/create — Start multipart upload
|
|
18
|
+
* POST /api/storage/{bucket}/multipart/upload-part — Upload a part
|
|
19
|
+
* POST /api/storage/{bucket}/multipart/complete — Complete multipart upload
|
|
20
|
+
* POST /api/storage/{bucket}/multipart/abort — Abort multipart upload
|
|
21
|
+
*
|
|
22
|
+
* ⚠️ Route order matters: specific sub-paths (metadata, uploads/parts) must be
|
|
23
|
+
* registered BEFORE the /{key} catch-all to avoid route shadowing.
|
|
24
|
+
*
|
|
25
|
+
* Security: Bucket-level rules (read, write, delete) from config.storage.buckets.
|
|
26
|
+
* Default deny when no rules are defined.
|
|
27
|
+
*/
|
|
28
|
+
|
|
29
|
+
import type { Context } from 'hono';
|
|
30
|
+
import { OpenAPIHono, createRoute, z, type HonoEnv } from '../lib/hono.js';
|
|
31
|
+
import type { Env } from '../types.js';
|
|
32
|
+
import { parseConfig } from '../lib/do-router.js';
|
|
33
|
+
import { resolveRootServiceKey, validateKey, timingSafeEqual, type ConstraintContext } from '../lib/service-key.js';
|
|
34
|
+
import { EdgeBaseError } from '@edge-base/shared';
|
|
35
|
+
import { hookRejectedError } from '../lib/errors.js';
|
|
36
|
+
import { getTrustedClientIp } from '../lib/client-ip.js';
|
|
37
|
+
import { zodDefaultHook, jsonResponseSchema, errorResponseSchema } from '../lib/schemas.js';
|
|
38
|
+
import type { StorageBucketConfig, StorageHooks, StorageHookCtx, AuthContext, R2FileMeta, WriteFileMeta, StorageTrigger } from '@edge-base/shared';
|
|
39
|
+
import {
|
|
40
|
+
getFunctionsByTrigger,
|
|
41
|
+
buildFunctionKvProxy,
|
|
42
|
+
buildFunctionD1Proxy,
|
|
43
|
+
buildFunctionVectorizeProxy,
|
|
44
|
+
buildFunctionPushProxy,
|
|
45
|
+
buildAdminAuthContext,
|
|
46
|
+
buildAdminDbProxy,
|
|
47
|
+
getWorkerUrl,
|
|
48
|
+
} from '../lib/functions.js';
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
const storage = new OpenAPIHono<HonoEnv>({ defaultHook: zodDefaultHook });
|
|
52
|
+
|
|
53
|
+
// ─── Plugin Storage Hook Execution (metadata only, non-blocking) ───
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Execute plugin-registered storage hooks (fire-and-forget via waitUntil).
|
|
57
|
+
* Storage hooks receive file metadata only — NO file content (Worker 128MB memory limit).
|
|
58
|
+
* NOTE: presigned URL direct uploads bypass the server and do NOT trigger these hooks.
|
|
59
|
+
*/
|
|
60
|
+
function executeStorageHooks(
|
|
61
|
+
event: StorageTrigger['event'],
|
|
62
|
+
fileMeta: R2FileMeta & { bucket: string },
|
|
63
|
+
auth: AuthContext | null,
|
|
64
|
+
executionCtx: ExecutionContext,
|
|
65
|
+
env: Env,
|
|
66
|
+
workerUrl?: string,
|
|
67
|
+
): void {
|
|
68
|
+
const hooks = getFunctionsByTrigger('storage', { type: 'storage', event } as StorageTrigger);
|
|
69
|
+
if (hooks.length === 0) return;
|
|
70
|
+
|
|
71
|
+
const serviceKey = resolveRootServiceKey(parseConfig(env), env);
|
|
72
|
+
const adminCtx = buildStorageHookAdminContext(env, executionCtx, workerUrl, serviceKey);
|
|
73
|
+
|
|
74
|
+
for (const { name, definition } of hooks) {
|
|
75
|
+
executionCtx.waitUntil(
|
|
76
|
+
definition.handler({
|
|
77
|
+
file: fileMeta,
|
|
78
|
+
auth: auth ? { id: auth.id, email: auth.email } : null,
|
|
79
|
+
admin: adminCtx,
|
|
80
|
+
}).catch((err: unknown) => {
|
|
81
|
+
console.error(`[EdgeBase] Storage hook '${name}' (${event}) failed:`, err);
|
|
82
|
+
}),
|
|
83
|
+
);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function normalizeStorageHookError(
|
|
88
|
+
error: unknown,
|
|
89
|
+
event: 'beforeUpload' | 'beforeDelete' | 'beforeDownload',
|
|
90
|
+
): EdgeBaseError {
|
|
91
|
+
const fallbackByEvent = {
|
|
92
|
+
beforeUpload: 'Upload rejected by beforeUpload hook.',
|
|
93
|
+
beforeDelete: 'Delete rejected by beforeDelete hook.',
|
|
94
|
+
beforeDownload: 'Download rejected by beforeDownload hook.',
|
|
95
|
+
} as const;
|
|
96
|
+
|
|
97
|
+
return hookRejectedError(error, fallbackByEvent[event], event);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Execute plugin-registered blocking storage hooks (beforeUpload, beforeDelete, beforeDownload).
|
|
102
|
+
* Blocking hooks can throw to reject the operation. 5s timeout per hook.
|
|
103
|
+
* beforeUpload hooks may return Record<string, string> to merge custom metadata.
|
|
104
|
+
*/
|
|
105
|
+
async function executeBlockingStorageHooks(
|
|
106
|
+
event: 'beforeUpload' | 'beforeDelete' | 'beforeDownload',
|
|
107
|
+
fileMeta: (R2FileMeta | WriteFileMeta) & { bucket: string },
|
|
108
|
+
auth: AuthContext | null,
|
|
109
|
+
env: Env,
|
|
110
|
+
workerUrl?: string,
|
|
111
|
+
): Promise<Record<string, string> | void> {
|
|
112
|
+
const hooks = getFunctionsByTrigger('storage', { type: 'storage', event } as unknown as StorageTrigger);
|
|
113
|
+
if (hooks.length === 0) return;
|
|
114
|
+
|
|
115
|
+
const HOOK_TIMEOUT_MS = 5000;
|
|
116
|
+
const serviceKey = resolveRootServiceKey(parseConfig(env), env);
|
|
117
|
+
const adminCtx = buildStorageHookAdminContext(env, undefined, workerUrl, serviceKey);
|
|
118
|
+
const mergedMeta: Record<string, string> = {};
|
|
119
|
+
|
|
120
|
+
for (const { name, definition } of hooks) {
|
|
121
|
+
const hookCtx = {
|
|
122
|
+
file: fileMeta,
|
|
123
|
+
auth: auth ? { id: auth.id, email: auth.email } : null,
|
|
124
|
+
admin: adminCtx,
|
|
125
|
+
};
|
|
126
|
+
|
|
127
|
+
let result: unknown;
|
|
128
|
+
try {
|
|
129
|
+
result = await Promise.race([
|
|
130
|
+
definition.handler(hookCtx),
|
|
131
|
+
new Promise((_, reject) =>
|
|
132
|
+
setTimeout(() => reject(new Error(`Storage hook '${name}' (${event}) timed out (5s)`)), HOOK_TIMEOUT_MS),
|
|
133
|
+
),
|
|
134
|
+
]);
|
|
135
|
+
} catch (error) {
|
|
136
|
+
throw normalizeStorageHookError(error, event);
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (result && typeof result === 'object' && event === 'beforeUpload') {
|
|
140
|
+
Object.assign(mergedMeta, result as Record<string, string>);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return Object.keys(mergedMeta).length > 0 ? mergedMeta : undefined;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/** Build admin context for plugin storage hooks (DB, auth, kv, d1, etc.). */
|
|
148
|
+
function buildStorageHookAdminContext(
|
|
149
|
+
env: Env,
|
|
150
|
+
executionCtx?: ExecutionContext,
|
|
151
|
+
workerUrl?: string,
|
|
152
|
+
serviceKey?: string,
|
|
153
|
+
) {
|
|
154
|
+
const config = parseConfig(env);
|
|
155
|
+
const adminDb = buildAdminDbProxy({
|
|
156
|
+
databaseNamespace: env.DATABASE,
|
|
157
|
+
config,
|
|
158
|
+
workerUrl,
|
|
159
|
+
serviceKey,
|
|
160
|
+
env,
|
|
161
|
+
executionCtx,
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
return {
|
|
165
|
+
db: adminDb,
|
|
166
|
+
table: (name: string) => adminDb('shared').table(name),
|
|
167
|
+
auth: buildAdminAuthContext({ d1Database: env.AUTH_DB, serviceKey, workerUrl }),
|
|
168
|
+
async sql(namespace: string, id: string | undefined, query: string, params?: unknown[]) {
|
|
169
|
+
if (workerUrl && serviceKey) {
|
|
170
|
+
const res = await fetch(`${workerUrl}/api/sql`, {
|
|
171
|
+
method: 'POST',
|
|
172
|
+
headers: { 'Content-Type': 'application/json', 'X-EdgeBase-Service-Key': serviceKey },
|
|
173
|
+
body: JSON.stringify({ namespace, id, sql: query, params: params ?? [] }),
|
|
174
|
+
});
|
|
175
|
+
if (!res.ok) throw new Error(`admin.sql() failed: ${res.status}`);
|
|
176
|
+
return res.json();
|
|
177
|
+
}
|
|
178
|
+
throw new Error('admin.sql() requires workerUrl in storage hook context.');
|
|
179
|
+
},
|
|
180
|
+
async broadcast(channel: string, event: string, payload?: Record<string, unknown>) {
|
|
181
|
+
if (workerUrl && serviceKey) {
|
|
182
|
+
await fetch(`${workerUrl}/api/db/broadcast`, {
|
|
183
|
+
method: 'POST',
|
|
184
|
+
headers: { 'Content-Type': 'application/json', 'X-EdgeBase-Service-Key': serviceKey },
|
|
185
|
+
body: JSON.stringify({ channel, event, payload: payload ?? {} }),
|
|
186
|
+
});
|
|
187
|
+
return;
|
|
188
|
+
}
|
|
189
|
+
throw new Error('admin.broadcast() requires workerUrl in storage hook context.');
|
|
190
|
+
},
|
|
191
|
+
functions: {
|
|
192
|
+
async call(name: string, data?: unknown) {
|
|
193
|
+
if (workerUrl && serviceKey) {
|
|
194
|
+
const safeName = name.split('/').map(encodeURIComponent).join('/');
|
|
195
|
+
const res = await fetch(`${workerUrl}/api/functions/${safeName}`, {
|
|
196
|
+
method: 'POST',
|
|
197
|
+
headers: { 'Content-Type': 'application/json', 'X-EdgeBase-Service-Key': serviceKey },
|
|
198
|
+
body: JSON.stringify(data ?? {}),
|
|
199
|
+
});
|
|
200
|
+
if (!res.ok) throw new Error(`admin.functions.call('${name}') failed: ${res.status}`);
|
|
201
|
+
return res.json();
|
|
202
|
+
}
|
|
203
|
+
throw new Error('admin.functions.call() requires workerUrl in storage hook context.');
|
|
204
|
+
},
|
|
205
|
+
},
|
|
206
|
+
kv: (namespace: string) => buildFunctionKvProxy(namespace, config, env, workerUrl, serviceKey),
|
|
207
|
+
d1: (database: string) => buildFunctionD1Proxy(database, config, env, workerUrl, serviceKey),
|
|
208
|
+
vector: (index: string) => buildFunctionVectorizeProxy(index, config, env, workerUrl, serviceKey),
|
|
209
|
+
push: buildFunctionPushProxy(workerUrl, serviceKey),
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// ─── Helpers ───
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
/** Normalize a raw storage rule value to a callable. */
|
|
217
|
+
function normalizeStorageRule(
|
|
218
|
+
rule: ((auth: AuthContext | null, file: R2FileMeta | WriteFileMeta) => boolean) | boolean | string | undefined,
|
|
219
|
+
): ((auth: AuthContext | null, resource: R2FileMeta | WriteFileMeta | null) => boolean) | null {
|
|
220
|
+
if (rule === undefined || rule === null) return null;
|
|
221
|
+
if (typeof rule === 'boolean') return () => rule;
|
|
222
|
+
if (typeof rule === 'function') {
|
|
223
|
+
const fn = rule;
|
|
224
|
+
return (auth, resource) => fn(auth, (resource ?? {}) as R2FileMeta | WriteFileMeta);
|
|
225
|
+
}
|
|
226
|
+
if (typeof rule === 'string') {
|
|
227
|
+
return (auth, resource) => evalStorageStringRule(rule, auth, resource);
|
|
228
|
+
}
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/** Simple string rule evaluator for storage rules from JSON config. */
|
|
233
|
+
function evalStorageStringRule(
|
|
234
|
+
expr: string,
|
|
235
|
+
auth: AuthContext | null,
|
|
236
|
+
resource: R2FileMeta | WriteFileMeta | null,
|
|
237
|
+
): boolean {
|
|
238
|
+
const e = expr.trim().replace(/\s+/g, ' ');
|
|
239
|
+
if (e === 'true') return true;
|
|
240
|
+
if (e === 'false') return false;
|
|
241
|
+
if (e === 'auth != null' || e === 'auth !== null') return auth !== null;
|
|
242
|
+
if (e === 'auth == null' || e === 'auth === null') return auth === null;
|
|
243
|
+
// auth.id == resource.X
|
|
244
|
+
const authIdEqResource = /^auth\.id ===? resource\.(\w+)$/.exec(e);
|
|
245
|
+
if (authIdEqResource) {
|
|
246
|
+
const field = authIdEqResource[1];
|
|
247
|
+
return auth !== null && resource !== null && resource !== undefined
|
|
248
|
+
&& auth.id === (resource as unknown as Record<string, unknown>)[field];
|
|
249
|
+
}
|
|
250
|
+
// Default: deny (fail-closed for unknown/unsupported expressions)
|
|
251
|
+
console.warn(`[Storage] Unrecognized string rule expression: "${expr}" — denied (fail-closed).`);
|
|
252
|
+
return false;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
/** Evaluate a storage access rule (function, boolean, or string — §3/§5). */
|
|
256
|
+
function checkStorageRule(
|
|
257
|
+
rule: ((auth: AuthContext | null, file: R2FileMeta | WriteFileMeta) => boolean) | boolean | string | undefined,
|
|
258
|
+
auth: AuthContext | null,
|
|
259
|
+
resource: R2FileMeta | WriteFileMeta | null,
|
|
260
|
+
action: string,
|
|
261
|
+
bucketName: string,
|
|
262
|
+
release?: boolean,
|
|
263
|
+
): void {
|
|
264
|
+
const ruleFn = normalizeStorageRule(rule);
|
|
265
|
+
// Default deny — bypassed when release is false
|
|
266
|
+
if (ruleFn === null) {
|
|
267
|
+
if (!release) return; // release: false → allow without rules
|
|
268
|
+
throw new EdgeBaseError(403, `Access denied. No '${action}' rule defined for bucket '${bucketName}'.`, undefined, 'access-denied');
|
|
269
|
+
}
|
|
270
|
+
try {
|
|
271
|
+
const result = ruleFn(auth, resource);
|
|
272
|
+
if (!result) throw new EdgeBaseError(403, 'Access denied by storage access rules.', undefined, 'access-denied');
|
|
273
|
+
} catch (e) {
|
|
274
|
+
if (e instanceof EdgeBaseError) throw e;
|
|
275
|
+
throw new EdgeBaseError(403, 'Access denied by storage access rules.', undefined, 'access-denied');
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/** Get bucket config, throw if bucket not configured. Returns release flag for rule evaluation. */
|
|
280
|
+
function getBucketConfig(env: Env, bucketName: string): { bucketConfig: StorageBucketConfig; release: boolean } {
|
|
281
|
+
const config = parseConfig(env);
|
|
282
|
+
const bucketConfig = config.storage?.buckets?.[bucketName];
|
|
283
|
+
if (!bucketConfig) {
|
|
284
|
+
throw new EdgeBaseError(404, `Storage bucket '${bucketName}' is not configured.`, undefined, 'not-found');
|
|
285
|
+
}
|
|
286
|
+
return { bucketConfig, release: config.release ?? false };
|
|
287
|
+
}
|
|
288
|
+
|
|
289
|
+
/** Check Service Key bypass for storage requests. */
|
|
290
|
+
function checkServiceKey(env: Env, header: string | undefined, scope: string, req?: { header: (name: string) => string | undefined }): boolean {
|
|
291
|
+
const config = parseConfig(env);
|
|
292
|
+
const constraintCtx: ConstraintContext = {
|
|
293
|
+
env: env.ENVIRONMENT,
|
|
294
|
+
};
|
|
295
|
+
if (req) {
|
|
296
|
+
constraintCtx.ip = getTrustedClientIp(env, req);
|
|
297
|
+
}
|
|
298
|
+
const { result } = validateKey(header, scope, config, env, undefined, constraintCtx);
|
|
299
|
+
if (result === 'valid') return true;
|
|
300
|
+
if (result === 'invalid') {
|
|
301
|
+
throw new EdgeBaseError(401, 'Unauthorized. Invalid Service Key.', undefined, 'unauthenticated');
|
|
302
|
+
}
|
|
303
|
+
return false; // 'missing' → continue to normal rules
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
type SignedTokenClaims = {
|
|
307
|
+
expiresAt: number;
|
|
308
|
+
maxBytes: number | null;
|
|
309
|
+
};
|
|
310
|
+
|
|
311
|
+
function parseByteSize(value?: string): number | null {
|
|
312
|
+
if (!value) return null;
|
|
313
|
+
const trimmed = value.trim();
|
|
314
|
+
const match = trimmed.match(/^(\d+)(B|KB|MB|GB)$/i);
|
|
315
|
+
if (!match) {
|
|
316
|
+
throw new EdgeBaseError(400, 'Invalid maxFileSize. Use a byte size like 128B, 1KB, 5MB, or 1GB.', undefined, 'validation-failed');
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
const amount = parseInt(match[1], 10);
|
|
320
|
+
const unit = match[2].toUpperCase();
|
|
321
|
+
const multiplier = unit === 'B'
|
|
322
|
+
? 1
|
|
323
|
+
: unit === 'KB'
|
|
324
|
+
? 1024
|
|
325
|
+
: unit === 'MB'
|
|
326
|
+
? 1024 * 1024
|
|
327
|
+
: 1024 * 1024 * 1024;
|
|
328
|
+
|
|
329
|
+
return amount * multiplier;
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
/** Create HMAC-based signed URL token. */
|
|
333
|
+
export async function createSignedToken(
|
|
334
|
+
key: string,
|
|
335
|
+
bucket: string,
|
|
336
|
+
expiresAt: number,
|
|
337
|
+
secret: string,
|
|
338
|
+
maxBytes?: number | null,
|
|
339
|
+
): Promise<string> {
|
|
340
|
+
const encoder = new TextEncoder();
|
|
341
|
+
const normalizedMaxBytes = typeof maxBytes === 'number' && Number.isFinite(maxBytes)
|
|
342
|
+
? Math.max(0, Math.trunc(maxBytes))
|
|
343
|
+
: null;
|
|
344
|
+
const data = `${bucket}:${key}:${expiresAt}:${normalizedMaxBytes ?? ''}`;
|
|
345
|
+
const cryptoKey = await crypto.subtle.importKey(
|
|
346
|
+
'raw', encoder.encode(secret), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign'],
|
|
347
|
+
);
|
|
348
|
+
const signature = await crypto.subtle.sign('HMAC', cryptoKey, encoder.encode(data));
|
|
349
|
+
const sigHex = Array.from(new Uint8Array(signature)).map(b => b.toString(16).padStart(2, '0')).join('');
|
|
350
|
+
return normalizedMaxBytes === null
|
|
351
|
+
? `${expiresAt}.${sigHex}`
|
|
352
|
+
: `${expiresAt}.${normalizedMaxBytes}.${sigHex}`;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
/** Verify HMAC-based signed URL token. */
|
|
356
|
+
async function verifySignedToken(
|
|
357
|
+
token: string,
|
|
358
|
+
key: string,
|
|
359
|
+
bucket: string,
|
|
360
|
+
secret: string,
|
|
361
|
+
): Promise<{ valid: boolean; claims: SignedTokenClaims }> {
|
|
362
|
+
const parts = token.split('.');
|
|
363
|
+
if (parts.length !== 2 && parts.length !== 3) {
|
|
364
|
+
return { valid: false, claims: { expiresAt: 0, maxBytes: null } };
|
|
365
|
+
}
|
|
366
|
+
const expiresAt = parseInt(parts[0]!, 10);
|
|
367
|
+
const maxBytes = parts.length === 3 ? parseInt(parts[1]!, 10) : null;
|
|
368
|
+
const signature = parts[parts.length - 1]!;
|
|
369
|
+
|
|
370
|
+
if (isNaN(expiresAt) || Date.now() > expiresAt) {
|
|
371
|
+
return { valid: false, claims: { expiresAt, maxBytes: Number.isFinite(maxBytes ?? NaN) ? maxBytes : null } };
|
|
372
|
+
}
|
|
373
|
+
if (parts.length === 3 && !Number.isFinite(maxBytes)) {
|
|
374
|
+
return { valid: false, claims: { expiresAt, maxBytes: null } };
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
const encoder = new TextEncoder();
|
|
378
|
+
const data = `${bucket}:${key}:${expiresAt}:${parts.length === 3 ? maxBytes : ''}`;
|
|
379
|
+
const cryptoKey = await crypto.subtle.importKey(
|
|
380
|
+
'raw', encoder.encode(secret), { name: 'HMAC', hash: 'SHA-256' }, false, ['sign'],
|
|
381
|
+
);
|
|
382
|
+
const expected = await crypto.subtle.sign('HMAC', cryptoKey, encoder.encode(data));
|
|
383
|
+
const expectedHex = Array.from(new Uint8Array(expected)).map(b => b.toString(16).padStart(2, '0')).join('');
|
|
384
|
+
return {
|
|
385
|
+
valid: timingSafeEqual(signature, expectedHex),
|
|
386
|
+
claims: {
|
|
387
|
+
expiresAt,
|
|
388
|
+
maxBytes: parts.length === 3 ? maxBytes : null,
|
|
389
|
+
},
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
/** Parse duration string (e.g. '1h', '30m') to milliseconds. Max 7 days. */
|
|
394
|
+
export function parseDuration(str: string): number {
|
|
395
|
+
const match = str.match(/^(\d+)(s|m|h|d)$/);
|
|
396
|
+
if (!match) return 3600 * 1000; // default 1h
|
|
397
|
+
const value = parseInt(match[1], 10);
|
|
398
|
+
const unit = match[2];
|
|
399
|
+
const multipliers: Record<string, number> = { s: 1000, m: 60 * 1000, h: 3600 * 1000, d: 86400 * 1000 };
|
|
400
|
+
const ms = value * (multipliers[unit] || 3600 * 1000);
|
|
401
|
+
const MAX_MS = 7 * 86400 * 1000; // 7 days max for signed URLs
|
|
402
|
+
return Math.min(ms, MAX_MS);
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
/** Build R2 object key with bucket prefix. */
|
|
406
|
+
function r2Key(bucket: string, key: string): string {
|
|
407
|
+
return `${bucket}/${key}`;
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
/**
|
|
411
|
+
* Validate storage key for security issues.
|
|
412
|
+
* Rejects path traversal, null bytes, and overly long keys.
|
|
413
|
+
*/
|
|
414
|
+
function validateStorageKey(key: string): void {
|
|
415
|
+
if (!key || !key.trim()) {
|
|
416
|
+
throw new EdgeBaseError(400, 'Storage key must not be empty.', undefined, 'validation-failed');
|
|
417
|
+
}
|
|
418
|
+
if (key.length > 1024) {
|
|
419
|
+
throw new EdgeBaseError(400, 'Storage key must not exceed 1024 characters.', undefined, 'validation-failed');
|
|
420
|
+
}
|
|
421
|
+
if (key.includes('\0')) {
|
|
422
|
+
throw new EdgeBaseError(400, 'Storage key must not contain null bytes.', undefined, 'validation-failed');
|
|
423
|
+
}
|
|
424
|
+
// Check for path traversal: ".." as a standalone segment
|
|
425
|
+
if (/(^|\/)\.\.(\/|$)/.test(key)) {
|
|
426
|
+
throw new EdgeBaseError(400, 'Storage key must not contain path traversal sequences (..).', undefined, 'validation-failed');
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
/** Build KV key for multipart part tracking (M17). */
|
|
431
|
+
export function partTrackingKey(bucket: string, key: string, uploadId: string): string {
|
|
432
|
+
return `upload:${bucket}:${key}:${uploadId}:parts`;
|
|
433
|
+
}
|
|
434
|
+
|
|
435
|
+
/** Part tracking TTL — 7 days, synced with R2 auto-abort (M17). */
|
|
436
|
+
export const PART_TRACKING_TTL = 7 * 24 * 60 * 60; // 604800 seconds
|
|
437
|
+
|
|
438
|
+
/** Build file metadata from R2 object. */
|
|
439
|
+
function buildMetadata(obj: R2Object): R2FileMeta {
|
|
440
|
+
return {
|
|
441
|
+
key: obj.key.split('/').slice(1).join('/'), // remove bucket prefix
|
|
442
|
+
size: obj.size,
|
|
443
|
+
contentType: obj.httpMetadata?.contentType || 'application/octet-stream',
|
|
444
|
+
etag: obj.etag,
|
|
445
|
+
uploadedAt: obj.uploaded?.toISOString(),
|
|
446
|
+
uploadedBy: obj.customMetadata?.uploadedBy || null,
|
|
447
|
+
customMetadata: obj.customMetadata || {},
|
|
448
|
+
} as R2FileMeta;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
const STORAGE_OFFSET_CURSOR_PREFIX = 'offset:';
|
|
452
|
+
|
|
453
|
+
function parseStorageListInteger(raw: string | undefined, name: string, fallback: number): number {
|
|
454
|
+
if (raw === undefined || raw === null || raw === '') return fallback;
|
|
455
|
+
const parsed = parseInt(raw, 10);
|
|
456
|
+
if (!Number.isFinite(parsed) || parsed < 0) {
|
|
457
|
+
throw new EdgeBaseError(400, `Invalid ${name}: must be a non-negative integer.`, undefined, 'validation-failed');
|
|
458
|
+
}
|
|
459
|
+
return parsed;
|
|
460
|
+
}
|
|
461
|
+
|
|
462
|
+
async function listStorageObjects(
|
|
463
|
+
storage: R2Bucket,
|
|
464
|
+
options: { prefix: string; limit: number; cursor?: string; offset?: number },
|
|
465
|
+
): Promise<{ objects: R2Object[]; truncated: boolean; cursor: string | null }> {
|
|
466
|
+
const limit = Math.min(Math.max(options.limit, 1), 1000);
|
|
467
|
+
const rawCursor = options.cursor;
|
|
468
|
+
const usesOffsetCursor = !!rawCursor && rawCursor.startsWith(STORAGE_OFFSET_CURSOR_PREFIX);
|
|
469
|
+
|
|
470
|
+
if (!usesOffsetCursor && (options.offset ?? 0) === 0) {
|
|
471
|
+
const listed = await storage.list({
|
|
472
|
+
prefix: options.prefix,
|
|
473
|
+
cursor: rawCursor,
|
|
474
|
+
limit,
|
|
475
|
+
});
|
|
476
|
+
return {
|
|
477
|
+
objects: listed.objects,
|
|
478
|
+
truncated: listed.truncated,
|
|
479
|
+
cursor: listed.truncated ? listed.cursor : null,
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
|
|
483
|
+
const baseOffset = usesOffsetCursor
|
|
484
|
+
? parseStorageListInteger(rawCursor!.slice(STORAGE_OFFSET_CURSOR_PREFIX.length), 'storage cursor offset', 0)
|
|
485
|
+
: (options.offset ?? 0);
|
|
486
|
+
let remainingOffset = baseOffset;
|
|
487
|
+
const collected: R2Object[] = [];
|
|
488
|
+
let cursor: string | undefined;
|
|
489
|
+
const targetCount = limit + 1;
|
|
490
|
+
|
|
491
|
+
while (collected.length < targetCount) {
|
|
492
|
+
const pageLimit = Math.min(1000, Math.max(1, remainingOffset + (targetCount - collected.length)));
|
|
493
|
+
const listed = await storage.list({
|
|
494
|
+
prefix: options.prefix,
|
|
495
|
+
cursor,
|
|
496
|
+
limit: pageLimit,
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
if (remainingOffset >= listed.objects.length) {
|
|
500
|
+
remainingOffset -= listed.objects.length;
|
|
501
|
+
} else {
|
|
502
|
+
collected.push(...listed.objects.slice(remainingOffset));
|
|
503
|
+
remainingOffset = 0;
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
if (!listed.truncated) {
|
|
507
|
+
const hasMore = collected.length > limit;
|
|
508
|
+
return {
|
|
509
|
+
objects: collected.slice(0, limit),
|
|
510
|
+
truncated: hasMore,
|
|
511
|
+
cursor: hasMore ? `${STORAGE_OFFSET_CURSOR_PREFIX}${baseOffset + limit}` : null,
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
cursor = listed.cursor;
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
return {
|
|
519
|
+
objects: collected.slice(0, limit),
|
|
520
|
+
truncated: true,
|
|
521
|
+
cursor: `${STORAGE_OFFSET_CURSOR_PREFIX}${baseOffset + limit}`,
|
|
522
|
+
};
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
/**
|
|
526
|
+
* Local R2 emulation can transiently miss freshly uploaded objects on `head()`
|
|
527
|
+
* even though `get()` succeeds immediately. Fall back to `get()` so metadata,
|
|
528
|
+
* exists, and delete stay consistent with download semantics.
|
|
529
|
+
*/
|
|
530
|
+
async function getStoredObject(
|
|
531
|
+
storage: R2Bucket,
|
|
532
|
+
fullKey: string,
|
|
533
|
+
): Promise<R2Object | R2ObjectBody | null> {
|
|
534
|
+
const headed = await storage.head(fullKey);
|
|
535
|
+
if (headed) {
|
|
536
|
+
return headed;
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
return storage.get(fullKey);
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
function decodeStorageKey(rawKey: string): string {
|
|
543
|
+
return rawKey
|
|
544
|
+
.split('/')
|
|
545
|
+
.map((segment) => decodeURIComponent(segment))
|
|
546
|
+
.join('/');
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
function getCatchAllTail(c: Context<HonoEnv>, bucketName: string): string | null {
|
|
550
|
+
const marker = `/api/storage/${bucketName}/`;
|
|
551
|
+
if (!c.req.path.startsWith(marker)) {
|
|
552
|
+
return null;
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
return c.req.path.slice(marker.length);
|
|
556
|
+
}
|
|
557
|
+
|
|
558
|
+
function resolveStorageKey(
|
|
559
|
+
c: Context<HonoEnv>,
|
|
560
|
+
bucketName: string,
|
|
561
|
+
options?: { suffix?: string },
|
|
562
|
+
): string {
|
|
563
|
+
const directKey = c.req.param('key');
|
|
564
|
+
if (directKey) {
|
|
565
|
+
return directKey;
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
const tail = getCatchAllTail(c, bucketName);
|
|
569
|
+
if (!tail) {
|
|
570
|
+
return '';
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
const trimmedTail = options?.suffix && tail.endsWith(options.suffix)
|
|
574
|
+
? tail.slice(0, -options.suffix.length)
|
|
575
|
+
: tail;
|
|
576
|
+
|
|
577
|
+
return decodeStorageKey(trimmedTail);
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
// ─── Storage Hook Helpers ───
|
|
581
|
+
|
|
582
|
+
/** Get storage hooks for a bucket from config. */
|
|
583
|
+
function getStorageHooks(env: Env, bucketName: string): StorageHooks | undefined {
|
|
584
|
+
const config = parseConfig(env);
|
|
585
|
+
return config.storage?.buckets?.[bucketName]?.handlers?.hooks;
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
/** Build StorageHookCtx for Worker context. */
|
|
589
|
+
function buildStorageHookCtx(
|
|
590
|
+
env: Env,
|
|
591
|
+
executionCtx: ExecutionContext,
|
|
592
|
+
workerUrl?: string,
|
|
593
|
+
): StorageHookCtx {
|
|
594
|
+
const serviceKey = resolveRootServiceKey(parseConfig(env), env);
|
|
595
|
+
const push = buildFunctionPushProxy(workerUrl, serviceKey);
|
|
596
|
+
|
|
597
|
+
return {
|
|
598
|
+
waitUntil: (p: Promise<unknown>) => executionCtx.waitUntil(p),
|
|
599
|
+
push: {
|
|
600
|
+
async send(userId: string, payload: { title?: string; body: string }): Promise<void> {
|
|
601
|
+
if (!workerUrl || !serviceKey) return; // No self-call context available — skip push silently
|
|
602
|
+
await push.send(userId, payload).catch((error) => {
|
|
603
|
+
console.warn('[EdgeBase] storage hook push.send failed:', error);
|
|
604
|
+
});
|
|
605
|
+
},
|
|
606
|
+
},
|
|
607
|
+
};
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
// ─── Upload ───
|
|
611
|
+
|
|
612
|
+
const uploadFile = createRoute({
|
|
613
|
+
operationId: 'uploadFile',
|
|
614
|
+
method: 'post',
|
|
615
|
+
path: '/{bucket}/upload',
|
|
616
|
+
tags: ['client'],
|
|
617
|
+
summary: 'Upload file',
|
|
618
|
+
request: {
|
|
619
|
+
params: z.object({ bucket: z.string() }),
|
|
620
|
+
body: { content: { 'multipart/form-data': { schema: z.object({}).passthrough() } }, required: true },
|
|
621
|
+
},
|
|
622
|
+
responses: {
|
|
623
|
+
201: { description: 'File uploaded', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
624
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
625
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
626
|
+
},
|
|
627
|
+
});
|
|
628
|
+
|
|
629
|
+
storage.openapi(uploadFile, async (c) => {
|
|
630
|
+
const bucketName = c.req.param('bucket')!;
|
|
631
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
632
|
+
|
|
633
|
+
// Security: signed upload token OR write rule
|
|
634
|
+
const token = c.req.query('token');
|
|
635
|
+
const tokenKey = c.req.query('key');
|
|
636
|
+
let skipRules = false;
|
|
637
|
+
|
|
638
|
+
if (token && tokenKey) {
|
|
639
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
640
|
+
if (secret) {
|
|
641
|
+
const verified = await verifySignedToken(token, tokenKey, bucketName, secret);
|
|
642
|
+
if (verified.valid) {
|
|
643
|
+
skipRules = true;
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
// secret absent → ignore token, fall through to rule evaluation (asymmetric fail-closed,)
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
// Parse multipart form data first — needed to get actual file size/type for write rule (§19)
|
|
650
|
+
let formData: FormData;
|
|
651
|
+
try {
|
|
652
|
+
formData = await c.req.formData();
|
|
653
|
+
} catch {
|
|
654
|
+
throw new EdgeBaseError(400, 'Expected multipart/form-data request body.', undefined, 'validation-failed');
|
|
655
|
+
}
|
|
656
|
+
const file = formData.get('file') as File | null;
|
|
657
|
+
const key = formData.get('key') as string | null;
|
|
658
|
+
const customMetadataStr = formData.get('customMetadata') as string | null;
|
|
659
|
+
|
|
660
|
+
if (!file || !key) {
|
|
661
|
+
throw new EdgeBaseError(400, 'Missing required fields: file and key.', undefined, 'validation-failed');
|
|
662
|
+
}
|
|
663
|
+
validateStorageKey(key);
|
|
664
|
+
if (skipRules && tokenKey !== key) {
|
|
665
|
+
throw new EdgeBaseError(400, 'Signed upload key mismatch between query and form body.', undefined, 'validation-failed');
|
|
666
|
+
}
|
|
667
|
+
|
|
668
|
+
let signedClaims: SignedTokenClaims | null = null;
|
|
669
|
+
if (skipRules && token && tokenKey) {
|
|
670
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
671
|
+
if (secret) {
|
|
672
|
+
const verified = await verifySignedToken(token, tokenKey, bucketName, secret);
|
|
673
|
+
if (verified.valid) {
|
|
674
|
+
signedClaims = verified.claims;
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
}
|
|
678
|
+
if (signedClaims?.maxBytes != null && file.size > signedClaims.maxBytes) {
|
|
679
|
+
throw new EdgeBaseError(413, `Signed upload exceeds maxFileSize of ${signedClaims.maxBytes} bytes.`, undefined, 'payload-too-large');
|
|
680
|
+
}
|
|
681
|
+
|
|
682
|
+
if (!skipRules) {
|
|
683
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
684
|
+
if (!serviceKeyBypass) {
|
|
685
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
686
|
+
// §19: WriteFileMeta uses actual file metadata from form data
|
|
687
|
+
const writeFileMeta: WriteFileMeta = {
|
|
688
|
+
size: file.size,
|
|
689
|
+
contentType: file.type || 'application/octet-stream',
|
|
690
|
+
key: key,
|
|
691
|
+
};
|
|
692
|
+
checkStorageRule(bucketConfig.access?.write, auth, writeFileMeta, 'write', bucketName, release);
|
|
693
|
+
}
|
|
694
|
+
}
|
|
695
|
+
|
|
696
|
+
// §5/§19: maxFileSize/allowedMimeTypes removed — write rule handles validation.
|
|
697
|
+
// Parse custom metadata
|
|
698
|
+
let customMetadata: Record<string, string> = {};
|
|
699
|
+
if (customMetadataStr) {
|
|
700
|
+
try { customMetadata = JSON.parse(customMetadataStr); } catch { /* ignore */ }
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
704
|
+
if (auth?.id) {
|
|
705
|
+
customMetadata.uploadedBy = auth.id as string;
|
|
706
|
+
}
|
|
707
|
+
|
|
708
|
+
// Plugin blocking storage hooks (beforeUpload)
|
|
709
|
+
const pluginMeta = await executeBlockingStorageHooks('beforeUpload', { key, bucket: bucketName, size: file.size, contentType: file.type || 'application/octet-stream' } as WriteFileMeta & { bucket: string }, auth, c.env, getWorkerUrl(c.req.url, c.env));
|
|
710
|
+
if (pluginMeta) Object.assign(customMetadata, pluginMeta);
|
|
711
|
+
|
|
712
|
+
// beforeUpload hook — blocking, can inject custom metadata or reject
|
|
713
|
+
const hooks = getStorageHooks(c.env, bucketName);
|
|
714
|
+
if (hooks?.beforeUpload) {
|
|
715
|
+
const writeFileMeta: WriteFileMeta = { size: file.size, contentType: file.type || 'application/octet-stream', key };
|
|
716
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
717
|
+
let extraMeta: Record<string, string> | void;
|
|
718
|
+
try {
|
|
719
|
+
extraMeta = await hooks.beforeUpload(auth, writeFileMeta, hookCtx);
|
|
720
|
+
} catch (error) {
|
|
721
|
+
throw normalizeStorageHookError(error, 'beforeUpload');
|
|
722
|
+
}
|
|
723
|
+
if (extraMeta && typeof extraMeta === 'object') {
|
|
724
|
+
Object.assign(customMetadata, extraMeta);
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
|
|
728
|
+
// Upload to R2 — use arrayBuffer() instead of stream() for wrangler dev compatibility.
|
|
729
|
+
// ReadableStream uploads can return null in wrangler local R2 emulation.
|
|
730
|
+
const fullKey = r2Key(bucketName, key);
|
|
731
|
+
const buf = await file.arrayBuffer();
|
|
732
|
+
const obj = await c.env.STORAGE.put(fullKey, buf, {
|
|
733
|
+
httpMetadata: {
|
|
734
|
+
contentType: file.type || 'application/octet-stream',
|
|
735
|
+
},
|
|
736
|
+
customMetadata,
|
|
737
|
+
});
|
|
738
|
+
|
|
739
|
+
if (!obj) {
|
|
740
|
+
throw new EdgeBaseError(500, `Failed to upload file '${key}' to bucket '${bucketName}'. R2 put() returned null — check that the STORAGE R2 binding is correctly configured in wrangler.toml and the bucket exists.`, undefined, 'internal-error');
|
|
741
|
+
}
|
|
742
|
+
|
|
743
|
+
// afterUpload hook — fire-and-forget (config-level)
|
|
744
|
+
if (hooks?.afterUpload) {
|
|
745
|
+
const meta = buildMetadata(obj);
|
|
746
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
747
|
+
c.executionCtx.waitUntil(
|
|
748
|
+
Promise.resolve(hooks.afterUpload(auth, meta, hookCtx)).catch((err) => {
|
|
749
|
+
console.error('[EdgeBase] afterUpload hook error:', err);
|
|
750
|
+
}),
|
|
751
|
+
);
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
// afterUpload — plugin-registered storage hooks (metadata only, non-blocking)
|
|
755
|
+
executeStorageHooks('afterUpload', { ...buildMetadata(obj), bucket: bucketName }, auth, c.executionCtx, c.env, getWorkerUrl(c.req.url, c.env));
|
|
756
|
+
|
|
757
|
+
return c.json(buildMetadata(obj), 201);
|
|
758
|
+
});
|
|
759
|
+
|
|
760
|
+
// ─── Metadata ───
|
|
761
|
+
|
|
762
|
+
const getFileMetadata = createRoute({
|
|
763
|
+
operationId: 'getFileMetadata',
|
|
764
|
+
method: 'get',
|
|
765
|
+
path: '/{bucket}/{key}/metadata',
|
|
766
|
+
tags: ['client'],
|
|
767
|
+
summary: 'Get file metadata',
|
|
768
|
+
request: {
|
|
769
|
+
params: z.object({ bucket: z.string(), key: z.string() }),
|
|
770
|
+
},
|
|
771
|
+
responses: {
|
|
772
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
773
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
774
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
775
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
776
|
+
},
|
|
777
|
+
});
|
|
778
|
+
|
|
779
|
+
const handleGetFileMetadata = async (c: Context<HonoEnv>) => {
|
|
780
|
+
const bucketName = c.req.param('bucket')!;
|
|
781
|
+
const key = resolveStorageKey(c, bucketName, { suffix: '/metadata' });
|
|
782
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
783
|
+
|
|
784
|
+
const fullKey = r2Key(bucketName, key);
|
|
785
|
+
const obj = await getStoredObject(c.env.STORAGE, fullKey);
|
|
786
|
+
if (!obj) {
|
|
787
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
788
|
+
}
|
|
789
|
+
|
|
790
|
+
// Security: check read rule with resource context
|
|
791
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
792
|
+
if (!serviceKeyBypass) {
|
|
793
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
794
|
+
const resource = buildMetadata(obj);
|
|
795
|
+
checkStorageRule(bucketConfig.access?.read, auth, resource, 'read', bucketName, release);
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
return c.json(buildMetadata(obj));
|
|
799
|
+
};
|
|
800
|
+
storage.openapi(getFileMetadata, handleGetFileMetadata);
|
|
801
|
+
|
|
802
|
+
const updateFileMetadata = createRoute({
|
|
803
|
+
operationId: 'updateFileMetadata',
|
|
804
|
+
method: 'patch',
|
|
805
|
+
path: '/{bucket}/{key}/metadata',
|
|
806
|
+
tags: ['client'],
|
|
807
|
+
summary: 'Update file metadata',
|
|
808
|
+
request: {
|
|
809
|
+
params: z.object({ bucket: z.string(), key: z.string() }),
|
|
810
|
+
body: { content: { 'application/json': { schema: z.object({ customMetadata: z.record(z.string(), z.string()).optional(), contentType: z.string().optional() }) } }, required: true },
|
|
811
|
+
},
|
|
812
|
+
responses: {
|
|
813
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
814
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
815
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
816
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
817
|
+
},
|
|
818
|
+
});
|
|
819
|
+
|
|
820
|
+
const handleUpdateFileMetadata = async (c: Context<HonoEnv>) => {
|
|
821
|
+
const bucketName = c.req.param('bucket')!;
|
|
822
|
+
const key = resolveStorageKey(c, bucketName, { suffix: '/metadata' });
|
|
823
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
824
|
+
|
|
825
|
+
const fullKey = r2Key(bucketName, key);
|
|
826
|
+
const existing = await c.env.STORAGE.get(fullKey);
|
|
827
|
+
if (!existing) {
|
|
828
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
// Security: check write rule (metadata update = write)
|
|
832
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
833
|
+
if (!serviceKeyBypass) {
|
|
834
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
835
|
+
const resource = buildMetadata(existing);
|
|
836
|
+
checkStorageRule(bucketConfig.access?.write, auth, resource, 'write', bucketName, release);
|
|
837
|
+
}
|
|
838
|
+
|
|
839
|
+
const body = await c.req.json<{ customMetadata?: Record<string, string>; contentType?: string }>();
|
|
840
|
+
const newCustomMetadata = { ...existing.customMetadata, ...body.customMetadata };
|
|
841
|
+
const newContentType = body.contentType || existing.httpMetadata?.contentType || 'application/octet-stream';
|
|
842
|
+
|
|
843
|
+
// R2 doesn't support metadata-only update — re-put with same body
|
|
844
|
+
const obj = await c.env.STORAGE.put(fullKey, existing.body, {
|
|
845
|
+
httpMetadata: { contentType: newContentType },
|
|
846
|
+
customMetadata: newCustomMetadata,
|
|
847
|
+
});
|
|
848
|
+
|
|
849
|
+
if (!obj) {
|
|
850
|
+
throw new EdgeBaseError(500, 'Failed to update metadata.', undefined, 'internal-error');
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
// onMetadataUpdate — plugin-registered storage hooks (metadata only, non-blocking)
|
|
854
|
+
executeStorageHooks('onMetadataUpdate', { ...buildMetadata(obj), bucket: bucketName }, c.get('auth') as AuthContext | null, c.executionCtx, c.env, getWorkerUrl(c.req.url, c.env));
|
|
855
|
+
|
|
856
|
+
return c.json(buildMetadata(obj));
|
|
857
|
+
};
|
|
858
|
+
storage.openapi(updateFileMetadata, handleUpdateFileMetadata);
|
|
859
|
+
|
|
860
|
+
// ─── Exists (HEAD) ───
|
|
861
|
+
|
|
862
|
+
const checkFileExists = createRoute({
|
|
863
|
+
operationId: 'checkFileExists',
|
|
864
|
+
method: 'head',
|
|
865
|
+
path: '/{bucket}/{key}',
|
|
866
|
+
tags: ['client'],
|
|
867
|
+
summary: 'Check if file exists',
|
|
868
|
+
request: {
|
|
869
|
+
params: z.object({ bucket: z.string(), key: z.string() }),
|
|
870
|
+
},
|
|
871
|
+
responses: {
|
|
872
|
+
200: { description: 'File exists' },
|
|
873
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
874
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
875
|
+
},
|
|
876
|
+
});
|
|
877
|
+
|
|
878
|
+
const handleCheckFileExists = async (c: Context<HonoEnv>) => {
|
|
879
|
+
const bucketName = c.req.param('bucket')!;
|
|
880
|
+
const key = resolveStorageKey(c, bucketName);
|
|
881
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
882
|
+
|
|
883
|
+
const fullKey = r2Key(bucketName, key);
|
|
884
|
+
const obj = await getStoredObject(c.env.STORAGE, fullKey);
|
|
885
|
+
if (!obj) {
|
|
886
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
// Security: check read rule
|
|
890
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
891
|
+
if (!serviceKeyBypass) {
|
|
892
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
893
|
+
const resource = buildMetadata(obj);
|
|
894
|
+
checkStorageRule(bucketConfig.access?.read, auth, resource, 'read', bucketName, release);
|
|
895
|
+
}
|
|
896
|
+
|
|
897
|
+
return c.body(null, 200);
|
|
898
|
+
};
|
|
899
|
+
storage.openapi(checkFileExists, handleCheckFileExists);
|
|
900
|
+
|
|
901
|
+
// ─── Multipart Upload Resume (M17) ───
|
|
902
|
+
// Must come before the /:key{.+} catch-all to avoid route shadowing
|
|
903
|
+
|
|
904
|
+
const getUploadParts = createRoute({
|
|
905
|
+
operationId: 'getUploadParts',
|
|
906
|
+
method: 'get',
|
|
907
|
+
path: '/{bucket}/uploads/{uploadId}/parts',
|
|
908
|
+
tags: ['client'],
|
|
909
|
+
summary: 'Get uploaded parts',
|
|
910
|
+
request: {
|
|
911
|
+
params: z.object({ bucket: z.string(), uploadId: z.string() }),
|
|
912
|
+
query: z.object({ key: z.string() }),
|
|
913
|
+
},
|
|
914
|
+
responses: {
|
|
915
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
916
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
917
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
918
|
+
},
|
|
919
|
+
});
|
|
920
|
+
|
|
921
|
+
storage.openapi(getUploadParts, async (c) => {
|
|
922
|
+
const bucketName = c.req.param('bucket')!;
|
|
923
|
+
const uploadId = c.req.param('uploadId')!;
|
|
924
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
925
|
+
const key = c.req.query('key');
|
|
926
|
+
|
|
927
|
+
if (!key) {
|
|
928
|
+
throw new EdgeBaseError(400, 'Missing required query param: key.', undefined, 'validation-failed');
|
|
929
|
+
}
|
|
930
|
+
|
|
931
|
+
// Security: check write rule (resume upload = write operation)
|
|
932
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
933
|
+
if (!serviceKeyBypass) {
|
|
934
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
935
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
const kvKey = partTrackingKey(bucketName, key, uploadId);
|
|
939
|
+
const parts = await c.env.KV.get(kvKey, 'json') as Array<{ partNumber: number; etag: string }> | null;
|
|
940
|
+
|
|
941
|
+
return c.json({
|
|
942
|
+
uploadId,
|
|
943
|
+
key,
|
|
944
|
+
parts: parts || [],
|
|
945
|
+
});
|
|
946
|
+
});
|
|
947
|
+
|
|
948
|
+
const downloadFile = createRoute({
|
|
949
|
+
operationId: 'downloadFile',
|
|
950
|
+
method: 'get',
|
|
951
|
+
path: '/{bucket}/{key}',
|
|
952
|
+
tags: ['client'],
|
|
953
|
+
summary: 'Download file',
|
|
954
|
+
request: {
|
|
955
|
+
params: z.object({ bucket: z.string(), key: z.string() }),
|
|
956
|
+
},
|
|
957
|
+
responses: {
|
|
958
|
+
200: { description: 'File content' },
|
|
959
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
960
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
961
|
+
},
|
|
962
|
+
});
|
|
963
|
+
|
|
964
|
+
const handleDownloadFile = async (c: Context<HonoEnv>) => {
|
|
965
|
+
const bucketName = c.req.param('bucket')!;
|
|
966
|
+
const key = resolveStorageKey(c, bucketName);
|
|
967
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
968
|
+
|
|
969
|
+
// Check for signed URL token
|
|
970
|
+
const token = c.req.query('token');
|
|
971
|
+
let skipRules = false;
|
|
972
|
+
|
|
973
|
+
if (token) {
|
|
974
|
+
// Asymmetric fail-closed (#99): secret 미설정 시 토큰 무시 → 보안 규칙으로 fallback
|
|
975
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
976
|
+
if (secret) {
|
|
977
|
+
const verified = await verifySignedToken(token, key, bucketName, secret);
|
|
978
|
+
if (verified.valid) {
|
|
979
|
+
skipRules = true;
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
}
|
|
983
|
+
|
|
984
|
+
const fullKey = r2Key(bucketName, key);
|
|
985
|
+
const obj = await c.env.STORAGE.get(fullKey);
|
|
986
|
+
if (!obj) {
|
|
987
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
// Security: check read rule
|
|
991
|
+
if (!skipRules) {
|
|
992
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
993
|
+
if (!serviceKeyBypass) {
|
|
994
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
995
|
+
const resource = buildMetadata(obj);
|
|
996
|
+
checkStorageRule(bucketConfig.access?.read, auth, resource, 'read', bucketName, release);
|
|
997
|
+
}
|
|
998
|
+
}
|
|
999
|
+
|
|
1000
|
+
// Plugin blocking storage hooks (beforeDownload)
|
|
1001
|
+
{
|
|
1002
|
+
const dlAuth = c.get('auth') as AuthContext | null;
|
|
1003
|
+
const dlMeta = buildMetadata(obj);
|
|
1004
|
+
await executeBlockingStorageHooks('beforeDownload', { ...dlMeta, bucket: bucketName }, dlAuth, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1005
|
+
}
|
|
1006
|
+
|
|
1007
|
+
// beforeDownload hook — blocking, throw to reject
|
|
1008
|
+
const hooks = getStorageHooks(c.env, bucketName);
|
|
1009
|
+
if (hooks?.beforeDownload) {
|
|
1010
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1011
|
+
const meta = buildMetadata(obj);
|
|
1012
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
1013
|
+
try {
|
|
1014
|
+
await hooks.beforeDownload(auth, meta, hookCtx);
|
|
1015
|
+
} catch (error) {
|
|
1016
|
+
throw normalizeStorageHookError(error, 'beforeDownload');
|
|
1017
|
+
}
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
// Stream response
|
|
1021
|
+
const headers = new Headers();
|
|
1022
|
+
headers.set('Content-Type', obj.httpMetadata?.contentType || 'application/octet-stream');
|
|
1023
|
+
headers.set('Content-Length', String(obj.size));
|
|
1024
|
+
headers.set('ETag', obj.etag);
|
|
1025
|
+
if (obj.uploaded) {
|
|
1026
|
+
headers.set('Last-Modified', obj.uploaded.toUTCString());
|
|
1027
|
+
}
|
|
1028
|
+
|
|
1029
|
+
return new Response(obj.body, { headers });
|
|
1030
|
+
};
|
|
1031
|
+
storage.openapi(downloadFile, handleDownloadFile);
|
|
1032
|
+
|
|
1033
|
+
// ─── List ───
|
|
1034
|
+
|
|
1035
|
+
const listFiles = createRoute({
|
|
1036
|
+
operationId: 'listFiles',
|
|
1037
|
+
method: 'get',
|
|
1038
|
+
path: '/{bucket}',
|
|
1039
|
+
tags: ['client'],
|
|
1040
|
+
summary: 'List files in bucket',
|
|
1041
|
+
request: {
|
|
1042
|
+
params: z.object({ bucket: z.string() }),
|
|
1043
|
+
},
|
|
1044
|
+
responses: {
|
|
1045
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1046
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1047
|
+
},
|
|
1048
|
+
});
|
|
1049
|
+
|
|
1050
|
+
storage.openapi(listFiles, async (c) => {
|
|
1051
|
+
const bucketName = c.req.param('bucket')!;
|
|
1052
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1053
|
+
|
|
1054
|
+
// Security: check read rule (list = read)
|
|
1055
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
1056
|
+
if (!serviceKeyBypass) {
|
|
1057
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1058
|
+
checkStorageRule(bucketConfig.access?.read, auth, null, 'read', bucketName, release);
|
|
1059
|
+
}
|
|
1060
|
+
|
|
1061
|
+
const prefix = c.req.query('prefix') || '';
|
|
1062
|
+
const cursor = c.req.query('cursor') || undefined;
|
|
1063
|
+
const limit = parseStorageListInteger(c.req.query('limit'), 'storage limit', 100);
|
|
1064
|
+
const offset = parseStorageListInteger(c.req.query('offset'), 'storage offset', 0);
|
|
1065
|
+
|
|
1066
|
+
const fullPrefix = r2Key(bucketName, prefix);
|
|
1067
|
+
const listed = await listStorageObjects(c.env.STORAGE, {
|
|
1068
|
+
prefix: fullPrefix,
|
|
1069
|
+
cursor,
|
|
1070
|
+
limit,
|
|
1071
|
+
offset,
|
|
1072
|
+
});
|
|
1073
|
+
|
|
1074
|
+
const files = listed.objects.map(obj => buildMetadata(obj));
|
|
1075
|
+
|
|
1076
|
+
return c.json({
|
|
1077
|
+
files,
|
|
1078
|
+
cursor: listed.truncated ? listed.cursor : null,
|
|
1079
|
+
truncated: listed.truncated,
|
|
1080
|
+
});
|
|
1081
|
+
});
|
|
1082
|
+
|
|
1083
|
+
// ─── Delete ───
|
|
1084
|
+
|
|
1085
|
+
const deleteFile = createRoute({
|
|
1086
|
+
operationId: 'deleteFile',
|
|
1087
|
+
method: 'delete',
|
|
1088
|
+
path: '/{bucket}/{key}',
|
|
1089
|
+
tags: ['client'],
|
|
1090
|
+
summary: 'Delete file',
|
|
1091
|
+
request: {
|
|
1092
|
+
params: z.object({ bucket: z.string(), key: z.string() }),
|
|
1093
|
+
},
|
|
1094
|
+
responses: {
|
|
1095
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1096
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1097
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1098
|
+
},
|
|
1099
|
+
});
|
|
1100
|
+
|
|
1101
|
+
const handleDeleteFile = async (c: Context<HonoEnv>) => {
|
|
1102
|
+
const bucketName = c.req.param('bucket')!;
|
|
1103
|
+
const key = resolveStorageKey(c, bucketName);
|
|
1104
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1105
|
+
|
|
1106
|
+
const fullKey = r2Key(bucketName, key);
|
|
1107
|
+
|
|
1108
|
+
// Get existing file for resource context
|
|
1109
|
+
const existing = await getStoredObject(c.env.STORAGE, fullKey);
|
|
1110
|
+
if (!existing) {
|
|
1111
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
1112
|
+
}
|
|
1113
|
+
|
|
1114
|
+
// Security: check delete rule
|
|
1115
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:delete`, c.req);
|
|
1116
|
+
if (!serviceKeyBypass) {
|
|
1117
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1118
|
+
const resource = buildMetadata(existing);
|
|
1119
|
+
checkStorageRule(bucketConfig.access?.delete, auth, resource, 'delete', bucketName, release);
|
|
1120
|
+
}
|
|
1121
|
+
|
|
1122
|
+
// Plugin blocking storage hooks (beforeDelete)
|
|
1123
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1124
|
+
const fileMeta = buildMetadata(existing);
|
|
1125
|
+
await executeBlockingStorageHooks('beforeDelete', { ...fileMeta, bucket: bucketName }, auth, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1126
|
+
|
|
1127
|
+
// beforeDelete hook — blocking, throw to reject
|
|
1128
|
+
const hooks = getStorageHooks(c.env, bucketName);
|
|
1129
|
+
if (hooks?.beforeDelete) {
|
|
1130
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
1131
|
+
try {
|
|
1132
|
+
await hooks.beforeDelete(auth, fileMeta, hookCtx);
|
|
1133
|
+
} catch (error) {
|
|
1134
|
+
throw normalizeStorageHookError(error, 'beforeDelete');
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
|
|
1138
|
+
await c.env.STORAGE.delete(fullKey);
|
|
1139
|
+
|
|
1140
|
+
// afterDelete hook — fire-and-forget (config-level)
|
|
1141
|
+
if (hooks?.afterDelete) {
|
|
1142
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
1143
|
+
c.executionCtx.waitUntil(
|
|
1144
|
+
Promise.resolve(hooks.afterDelete(auth, fileMeta, hookCtx)).catch((err) => {
|
|
1145
|
+
console.error('[EdgeBase] afterDelete hook error:', err);
|
|
1146
|
+
}),
|
|
1147
|
+
);
|
|
1148
|
+
}
|
|
1149
|
+
|
|
1150
|
+
// afterDelete — plugin-registered storage hooks (metadata only, non-blocking)
|
|
1151
|
+
executeStorageHooks('afterDelete', { ...fileMeta, bucket: bucketName }, auth, c.executionCtx, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1152
|
+
|
|
1153
|
+
return c.json({ ok: true });
|
|
1154
|
+
};
|
|
1155
|
+
storage.openapi(deleteFile, handleDeleteFile);
|
|
1156
|
+
|
|
1157
|
+
// ─── Batch Delete ───
|
|
1158
|
+
|
|
1159
|
+
const deleteBatch = createRoute({
|
|
1160
|
+
operationId: 'deleteBatch',
|
|
1161
|
+
method: 'post',
|
|
1162
|
+
path: '/{bucket}/delete-batch',
|
|
1163
|
+
tags: ['client'],
|
|
1164
|
+
summary: 'Batch delete files',
|
|
1165
|
+
request: {
|
|
1166
|
+
params: z.object({ bucket: z.string() }),
|
|
1167
|
+
body: { content: { 'application/json': { schema: z.object({ keys: z.array(z.string()) }) } }, required: true },
|
|
1168
|
+
},
|
|
1169
|
+
responses: {
|
|
1170
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1171
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1172
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1173
|
+
},
|
|
1174
|
+
});
|
|
1175
|
+
|
|
1176
|
+
storage.openapi(deleteBatch, async (c) => {
|
|
1177
|
+
const bucketName = c.req.param('bucket')!;
|
|
1178
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1179
|
+
|
|
1180
|
+
const body = await c.req.json<{ keys: string[] }>();
|
|
1181
|
+
if (!body.keys || !Array.isArray(body.keys) || body.keys.length === 0) {
|
|
1182
|
+
throw new EdgeBaseError(400, 'Missing required field: keys (non-empty array).', undefined, 'validation-failed');
|
|
1183
|
+
}
|
|
1184
|
+
if (body.keys.length > 100) {
|
|
1185
|
+
throw new EdgeBaseError(400, 'Maximum 100 keys per batch delete request.', undefined, 'validation-failed');
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:delete`, c.req);
|
|
1189
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1190
|
+
const hooks = getStorageHooks(c.env, bucketName);
|
|
1191
|
+
|
|
1192
|
+
const deleted: string[] = [];
|
|
1193
|
+
const failed: Array<{ key: string; error: string }> = [];
|
|
1194
|
+
|
|
1195
|
+
for (const key of body.keys) {
|
|
1196
|
+
try {
|
|
1197
|
+
validateStorageKey(key);
|
|
1198
|
+
const fullKey = r2Key(bucketName, key);
|
|
1199
|
+
const existing = await getStoredObject(c.env.STORAGE, fullKey);
|
|
1200
|
+
if (!existing) {
|
|
1201
|
+
failed.push({ key, error: 'File not found.' });
|
|
1202
|
+
continue;
|
|
1203
|
+
}
|
|
1204
|
+
|
|
1205
|
+
if (!serviceKeyBypass) {
|
|
1206
|
+
const resource = buildMetadata(existing);
|
|
1207
|
+
checkStorageRule(bucketConfig.access?.delete, auth, resource, 'delete', bucketName, release);
|
|
1208
|
+
}
|
|
1209
|
+
|
|
1210
|
+
// Plugin blocking storage hooks (beforeDelete — batch)
|
|
1211
|
+
const fileMeta = buildMetadata(existing);
|
|
1212
|
+
await executeBlockingStorageHooks('beforeDelete', { ...fileMeta, bucket: bucketName }, auth, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1213
|
+
|
|
1214
|
+
// beforeDelete hook — blocking, throw to reject
|
|
1215
|
+
if (hooks?.beforeDelete) {
|
|
1216
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
1217
|
+
try {
|
|
1218
|
+
await hooks.beforeDelete(auth, fileMeta, hookCtx);
|
|
1219
|
+
} catch (error) {
|
|
1220
|
+
throw normalizeStorageHookError(error, 'beforeDelete');
|
|
1221
|
+
}
|
|
1222
|
+
}
|
|
1223
|
+
|
|
1224
|
+
await c.env.STORAGE.delete(fullKey);
|
|
1225
|
+
deleted.push(key);
|
|
1226
|
+
|
|
1227
|
+
// afterDelete hook — fire-and-forget (config-level)
|
|
1228
|
+
if (hooks?.afterDelete) {
|
|
1229
|
+
const hookCtx = buildStorageHookCtx(c.env, c.executionCtx, getWorkerUrl(c.req.url, c.env));
|
|
1230
|
+
c.executionCtx.waitUntil(
|
|
1231
|
+
Promise.resolve(hooks.afterDelete(auth, fileMeta, hookCtx)).catch((err) => {
|
|
1232
|
+
console.error('[EdgeBase] afterDelete hook error (batch):', err);
|
|
1233
|
+
}),
|
|
1234
|
+
);
|
|
1235
|
+
}
|
|
1236
|
+
|
|
1237
|
+
// afterDelete — plugin-registered storage hooks (per-file, non-blocking)
|
|
1238
|
+
executeStorageHooks('afterDelete', { ...fileMeta, bucket: bucketName }, auth, c.executionCtx, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1239
|
+
} catch (e) {
|
|
1240
|
+
const msg = e instanceof EdgeBaseError ? e.message : 'Unknown error.';
|
|
1241
|
+
failed.push({ key, error: msg });
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
|
|
1245
|
+
return c.json({ deleted, failed });
|
|
1246
|
+
});
|
|
1247
|
+
|
|
1248
|
+
// ─── Signed URL (for private downloads) ───
|
|
1249
|
+
|
|
1250
|
+
const createSignedDownloadUrl = createRoute({
|
|
1251
|
+
operationId: 'createSignedDownloadUrl',
|
|
1252
|
+
method: 'post',
|
|
1253
|
+
path: '/{bucket}/signed-url',
|
|
1254
|
+
tags: ['client'],
|
|
1255
|
+
summary: 'Create signed download URL',
|
|
1256
|
+
request: {
|
|
1257
|
+
params: z.object({ bucket: z.string() }),
|
|
1258
|
+
body: { content: { 'application/json': { schema: z.object({ key: z.string(), expiresIn: z.string().optional() }) } }, required: true },
|
|
1259
|
+
},
|
|
1260
|
+
responses: {
|
|
1261
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1262
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1263
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1264
|
+
404: { description: 'Not found', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1265
|
+
},
|
|
1266
|
+
});
|
|
1267
|
+
|
|
1268
|
+
storage.openapi(createSignedDownloadUrl, async (c) => {
|
|
1269
|
+
const bucketName = c.req.param('bucket')!;
|
|
1270
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1271
|
+
|
|
1272
|
+
// Security: check read rule (signed URL creation = read access)
|
|
1273
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
1274
|
+
if (!serviceKeyBypass) {
|
|
1275
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1276
|
+
checkStorageRule(bucketConfig.access?.read, auth, null, 'read', bucketName, release);
|
|
1277
|
+
}
|
|
1278
|
+
|
|
1279
|
+
const body = await c.req.json<{ key: string; expiresIn?: string }>();
|
|
1280
|
+
if (!body.key) {
|
|
1281
|
+
throw new EdgeBaseError(400, 'Missing required field: key.', undefined, 'validation-failed');
|
|
1282
|
+
}
|
|
1283
|
+
validateStorageKey(body.key);
|
|
1284
|
+
|
|
1285
|
+
// Check file exists
|
|
1286
|
+
const fullKey = r2Key(bucketName, body.key);
|
|
1287
|
+
const obj = await c.env.STORAGE.head(fullKey);
|
|
1288
|
+
if (!obj) {
|
|
1289
|
+
throw new EdgeBaseError(404, 'File not found.', undefined, 'not-found');
|
|
1290
|
+
}
|
|
1291
|
+
|
|
1292
|
+
const expiresInMs = parseDuration(body.expiresIn || '1h');
|
|
1293
|
+
const expiresAt = Date.now() + expiresInMs;
|
|
1294
|
+
|
|
1295
|
+
// Fail-closed: refuse to create signed URL without secret
|
|
1296
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
1297
|
+
if (!secret) {
|
|
1298
|
+
throw new EdgeBaseError(500, 'Signed URLs require JWT_USER_SECRET to be configured.', undefined, 'internal-error');
|
|
1299
|
+
}
|
|
1300
|
+
const token = await createSignedToken(body.key, bucketName, expiresAt, secret);
|
|
1301
|
+
|
|
1302
|
+
// Build signed URL
|
|
1303
|
+
const url = new URL(c.req.url);
|
|
1304
|
+
const signedUrl = `${url.protocol}//${url.host}/api/storage/${encodeURIComponent(bucketName)}/${encodeURIComponent(body.key)}?token=${token}`;
|
|
1305
|
+
|
|
1306
|
+
return c.json({ url: signedUrl, expiresAt: new Date(expiresAt).toISOString() });
|
|
1307
|
+
});
|
|
1308
|
+
|
|
1309
|
+
// ─── Batch Signed URLs ───
|
|
1310
|
+
|
|
1311
|
+
const createSignedDownloadUrls = createRoute({
|
|
1312
|
+
operationId: 'createSignedDownloadUrls',
|
|
1313
|
+
method: 'post',
|
|
1314
|
+
path: '/{bucket}/signed-urls',
|
|
1315
|
+
tags: ['client'],
|
|
1316
|
+
summary: 'Batch create signed download URLs',
|
|
1317
|
+
request: {
|
|
1318
|
+
params: z.object({ bucket: z.string() }),
|
|
1319
|
+
body: { content: { 'application/json': { schema: z.object({ keys: z.array(z.string()), expiresIn: z.string().optional() }) } }, required: true },
|
|
1320
|
+
},
|
|
1321
|
+
responses: {
|
|
1322
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1323
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1324
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1325
|
+
},
|
|
1326
|
+
});
|
|
1327
|
+
|
|
1328
|
+
storage.openapi(createSignedDownloadUrls, async (c) => {
|
|
1329
|
+
const bucketName = c.req.param('bucket')!;
|
|
1330
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1331
|
+
|
|
1332
|
+
// Security: check read rule
|
|
1333
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:read`, c.req);
|
|
1334
|
+
if (!serviceKeyBypass) {
|
|
1335
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1336
|
+
checkStorageRule(bucketConfig.access?.read, auth, null, 'read', bucketName, release);
|
|
1337
|
+
}
|
|
1338
|
+
|
|
1339
|
+
const body = await c.req.json<{ keys: string[]; expiresIn?: string }>();
|
|
1340
|
+
if (!body.keys || !Array.isArray(body.keys) || body.keys.length === 0) {
|
|
1341
|
+
throw new EdgeBaseError(400, 'Missing required field: keys (non-empty array).', undefined, 'validation-failed');
|
|
1342
|
+
}
|
|
1343
|
+
if (body.keys.length > 100) {
|
|
1344
|
+
throw new EdgeBaseError(400, 'Maximum 100 keys per batch signed URL request.', undefined, 'validation-failed');
|
|
1345
|
+
}
|
|
1346
|
+
|
|
1347
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
1348
|
+
if (!secret) {
|
|
1349
|
+
throw new EdgeBaseError(500, 'Signed URLs require JWT_USER_SECRET to be configured.', undefined, 'internal-error');
|
|
1350
|
+
}
|
|
1351
|
+
|
|
1352
|
+
const expiresInMs = parseDuration(body.expiresIn || '1h');
|
|
1353
|
+
const expiresAt = Date.now() + expiresInMs;
|
|
1354
|
+
const url = new URL(c.req.url);
|
|
1355
|
+
|
|
1356
|
+
const urls: Array<{ key: string; url: string; expiresAt: string }> = [];
|
|
1357
|
+
|
|
1358
|
+
for (const key of body.keys) {
|
|
1359
|
+
validateStorageKey(key);
|
|
1360
|
+
const fullKey = r2Key(bucketName, key);
|
|
1361
|
+
const obj = await c.env.STORAGE.head(fullKey);
|
|
1362
|
+
if (!obj) continue; // skip non-existent files
|
|
1363
|
+
|
|
1364
|
+
const token = await createSignedToken(key, bucketName, expiresAt, secret);
|
|
1365
|
+
urls.push({
|
|
1366
|
+
key,
|
|
1367
|
+
url: `${url.protocol}//${url.host}/api/storage/${encodeURIComponent(bucketName)}/${encodeURIComponent(key)}?token=${token}`,
|
|
1368
|
+
expiresAt: new Date(expiresAt).toISOString(),
|
|
1369
|
+
});
|
|
1370
|
+
}
|
|
1371
|
+
|
|
1372
|
+
return c.json({ urls });
|
|
1373
|
+
});
|
|
1374
|
+
|
|
1375
|
+
// ─── Signed Upload URL ───
|
|
1376
|
+
|
|
1377
|
+
const createSignedUploadUrl = createRoute({
|
|
1378
|
+
operationId: 'createSignedUploadUrl',
|
|
1379
|
+
method: 'post',
|
|
1380
|
+
path: '/{bucket}/signed-upload-url',
|
|
1381
|
+
tags: ['client'],
|
|
1382
|
+
summary: 'Create signed upload URL',
|
|
1383
|
+
request: {
|
|
1384
|
+
params: z.object({ bucket: z.string() }),
|
|
1385
|
+
body: { content: { 'application/json': { schema: z.object({ key: z.string(), expiresIn: z.string().optional(), maxFileSize: z.string().optional() }) } }, required: true },
|
|
1386
|
+
},
|
|
1387
|
+
responses: {
|
|
1388
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1389
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1390
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1391
|
+
},
|
|
1392
|
+
});
|
|
1393
|
+
|
|
1394
|
+
storage.openapi(createSignedUploadUrl, async (c) => {
|
|
1395
|
+
const bucketName = c.req.param('bucket')!;
|
|
1396
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1397
|
+
|
|
1398
|
+
// Security: check write rule at URL generation time
|
|
1399
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
1400
|
+
if (!serviceKeyBypass) {
|
|
1401
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1402
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
1403
|
+
}
|
|
1404
|
+
|
|
1405
|
+
const body = await c.req.json<{ key: string; expiresIn?: string; maxFileSize?: string }>();
|
|
1406
|
+
if (!body.key) {
|
|
1407
|
+
throw new EdgeBaseError(400, 'Missing required field: key.', undefined, 'validation-failed');
|
|
1408
|
+
}
|
|
1409
|
+
validateStorageKey(body.key);
|
|
1410
|
+
const maxBytes = parseByteSize(body.maxFileSize);
|
|
1411
|
+
|
|
1412
|
+
const expiresInMs = parseDuration(body.expiresIn || '30m');
|
|
1413
|
+
const expiresAt = Date.now() + expiresInMs;
|
|
1414
|
+
|
|
1415
|
+
// Fail-closed: refuse to create signed URL without secret
|
|
1416
|
+
const secret = c.env.JWT_USER_SECRET;
|
|
1417
|
+
if (!secret) {
|
|
1418
|
+
throw new EdgeBaseError(500, 'Signed URLs require JWT_USER_SECRET to be configured.', undefined, 'internal-error');
|
|
1419
|
+
}
|
|
1420
|
+
const token = await createSignedToken(body.key, bucketName, expiresAt, secret, maxBytes);
|
|
1421
|
+
|
|
1422
|
+
// Build signed upload URL (uploads go through our Worker endpoint with the token)
|
|
1423
|
+
const url = new URL(c.req.url);
|
|
1424
|
+
const signedUrl = `${url.protocol}//${url.host}/api/storage/${bucketName}/upload?token=${token}&key=${encodeURIComponent(body.key)}`;
|
|
1425
|
+
|
|
1426
|
+
// Add uploadedBy from auth context
|
|
1427
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1428
|
+
|
|
1429
|
+
return c.json({
|
|
1430
|
+
url: signedUrl,
|
|
1431
|
+
expiresAt: new Date(expiresAt).toISOString(),
|
|
1432
|
+
maxFileSize: body.maxFileSize ?? null,
|
|
1433
|
+
uploadedBy: auth?.id || null,
|
|
1434
|
+
});
|
|
1435
|
+
});
|
|
1436
|
+
|
|
1437
|
+
// ─── Multipart Upload (7.3) ───
|
|
1438
|
+
|
|
1439
|
+
const createMultipartUpload = createRoute({
|
|
1440
|
+
operationId: 'createMultipartUpload',
|
|
1441
|
+
method: 'post',
|
|
1442
|
+
path: '/{bucket}/multipart/create',
|
|
1443
|
+
tags: ['client'],
|
|
1444
|
+
summary: 'Start multipart upload',
|
|
1445
|
+
request: {
|
|
1446
|
+
params: z.object({ bucket: z.string() }),
|
|
1447
|
+
body: { content: { 'application/json': { schema: z.object({ key: z.string(), contentType: z.string().optional(), customMetadata: z.record(z.string(), z.string()).optional() }) } }, required: true },
|
|
1448
|
+
},
|
|
1449
|
+
responses: {
|
|
1450
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1451
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1452
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1453
|
+
},
|
|
1454
|
+
});
|
|
1455
|
+
|
|
1456
|
+
storage.openapi(createMultipartUpload, async (c) => {
|
|
1457
|
+
const bucketName = c.req.param('bucket')!;
|
|
1458
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1459
|
+
|
|
1460
|
+
// Security: check write rule
|
|
1461
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
1462
|
+
if (!serviceKeyBypass) {
|
|
1463
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1464
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
1465
|
+
}
|
|
1466
|
+
|
|
1467
|
+
const body = await c.req.json<{ key: string; contentType?: string; customMetadata?: Record<string, string> }>();
|
|
1468
|
+
if (!body.key) {
|
|
1469
|
+
throw new EdgeBaseError(400, 'Missing required field: key.', undefined, 'validation-failed');
|
|
1470
|
+
}
|
|
1471
|
+
validateStorageKey(body.key);
|
|
1472
|
+
|
|
1473
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1474
|
+
const customMetadata = body.customMetadata || {};
|
|
1475
|
+
if (auth?.id) {
|
|
1476
|
+
customMetadata.uploadedBy = auth.id as string;
|
|
1477
|
+
}
|
|
1478
|
+
|
|
1479
|
+
const fullKey = r2Key(bucketName, body.key);
|
|
1480
|
+
const multipartUpload = await c.env.STORAGE.createMultipartUpload(fullKey, {
|
|
1481
|
+
httpMetadata: { contentType: body.contentType || 'application/octet-stream' },
|
|
1482
|
+
customMetadata,
|
|
1483
|
+
});
|
|
1484
|
+
|
|
1485
|
+
return c.json({
|
|
1486
|
+
uploadId: multipartUpload.uploadId,
|
|
1487
|
+
key: body.key,
|
|
1488
|
+
});
|
|
1489
|
+
});
|
|
1490
|
+
|
|
1491
|
+
const uploadPart = createRoute({
|
|
1492
|
+
operationId: 'uploadPart',
|
|
1493
|
+
method: 'post',
|
|
1494
|
+
path: '/{bucket}/multipart/upload-part',
|
|
1495
|
+
tags: ['client'],
|
|
1496
|
+
summary: 'Upload a part',
|
|
1497
|
+
request: {
|
|
1498
|
+
params: z.object({ bucket: z.string() }),
|
|
1499
|
+
body: { content: { 'application/json': { schema: z.record(z.string(), z.unknown()) } }, required: true },
|
|
1500
|
+
},
|
|
1501
|
+
responses: {
|
|
1502
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1503
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1504
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1505
|
+
},
|
|
1506
|
+
});
|
|
1507
|
+
|
|
1508
|
+
storage.openapi(uploadPart, async (c) => {
|
|
1509
|
+
const bucketName = c.req.param('bucket')!;
|
|
1510
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1511
|
+
|
|
1512
|
+
// Security: check write rule
|
|
1513
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
1514
|
+
if (!serviceKeyBypass) {
|
|
1515
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1516
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
1517
|
+
}
|
|
1518
|
+
|
|
1519
|
+
const uploadId = c.req.query('uploadId');
|
|
1520
|
+
const partNumber = parseInt(c.req.query('partNumber') || '0', 10);
|
|
1521
|
+
const key = c.req.query('key');
|
|
1522
|
+
|
|
1523
|
+
if (!uploadId || !partNumber || !key) {
|
|
1524
|
+
throw new EdgeBaseError(400, 'Missing required query params: uploadId, partNumber, key.', undefined, 'validation-failed');
|
|
1525
|
+
}
|
|
1526
|
+
|
|
1527
|
+
const fullKey = r2Key(bucketName, key);
|
|
1528
|
+
const multipartUpload = c.env.STORAGE.resumeMultipartUpload(fullKey, uploadId);
|
|
1529
|
+
|
|
1530
|
+
const part = await multipartUpload.uploadPart(partNumber, c.req.raw.body!);
|
|
1531
|
+
|
|
1532
|
+
// M17: Save part info to KV for resume tracking
|
|
1533
|
+
const kvKey = partTrackingKey(bucketName, key, uploadId);
|
|
1534
|
+
const existing = await c.env.KV.get(kvKey, 'json') as Array<{ partNumber: number; etag: string }> | null;
|
|
1535
|
+
const parts = existing || [];
|
|
1536
|
+
// Replace if same partNumber exists (re-upload), otherwise append
|
|
1537
|
+
const idx = parts.findIndex(p => p.partNumber === part.partNumber);
|
|
1538
|
+
if (idx >= 0) {
|
|
1539
|
+
parts[idx] = { partNumber: part.partNumber, etag: part.etag };
|
|
1540
|
+
} else {
|
|
1541
|
+
parts.push({ partNumber: part.partNumber, etag: part.etag });
|
|
1542
|
+
}
|
|
1543
|
+
await c.env.KV.put(kvKey, JSON.stringify(parts), { expirationTtl: PART_TRACKING_TTL });
|
|
1544
|
+
|
|
1545
|
+
return c.json({
|
|
1546
|
+
partNumber: part.partNumber,
|
|
1547
|
+
etag: part.etag,
|
|
1548
|
+
});
|
|
1549
|
+
});
|
|
1550
|
+
|
|
1551
|
+
const completeMultipartUpload = createRoute({
|
|
1552
|
+
operationId: 'completeMultipartUpload',
|
|
1553
|
+
method: 'post',
|
|
1554
|
+
path: '/{bucket}/multipart/complete',
|
|
1555
|
+
tags: ['client'],
|
|
1556
|
+
summary: 'Complete multipart upload',
|
|
1557
|
+
request: {
|
|
1558
|
+
params: z.object({ bucket: z.string() }),
|
|
1559
|
+
body: { content: { 'application/json': { schema: z.object({ uploadId: z.string(), key: z.string(), parts: z.array(z.object({ partNumber: z.number(), etag: z.string() })) }) } }, required: true },
|
|
1560
|
+
},
|
|
1561
|
+
responses: {
|
|
1562
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1563
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1564
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1565
|
+
},
|
|
1566
|
+
});
|
|
1567
|
+
|
|
1568
|
+
storage.openapi(completeMultipartUpload, async (c) => {
|
|
1569
|
+
const bucketName = c.req.param('bucket')!;
|
|
1570
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1571
|
+
|
|
1572
|
+
// Security: check write rule
|
|
1573
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
1574
|
+
if (!serviceKeyBypass) {
|
|
1575
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1576
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
1577
|
+
}
|
|
1578
|
+
|
|
1579
|
+
const body = await c.req.json<{
|
|
1580
|
+
uploadId: string;
|
|
1581
|
+
key: string;
|
|
1582
|
+
parts: Array<{ partNumber: number; etag: string }>;
|
|
1583
|
+
}>();
|
|
1584
|
+
|
|
1585
|
+
if (!body.uploadId || !body.key || !body.parts?.length) {
|
|
1586
|
+
throw new EdgeBaseError(400, 'Missing required fields: uploadId, key, parts.', undefined, 'validation-failed');
|
|
1587
|
+
}
|
|
1588
|
+
|
|
1589
|
+
const fullKey = r2Key(bucketName, body.key);
|
|
1590
|
+
const multipartUpload = c.env.STORAGE.resumeMultipartUpload(fullKey, body.uploadId);
|
|
1591
|
+
|
|
1592
|
+
const obj = await multipartUpload.complete(body.parts);
|
|
1593
|
+
|
|
1594
|
+
// M17: Clean up KV part tracking data after successful completion
|
|
1595
|
+
const kvKey = partTrackingKey(bucketName, body.key, body.uploadId);
|
|
1596
|
+
await c.env.KV.delete(kvKey).catch(() => { /* best effort */ });
|
|
1597
|
+
|
|
1598
|
+
// afterUpload — plugin-registered storage hooks (multipart complete, metadata only, non-blocking)
|
|
1599
|
+
executeStorageHooks('afterUpload', { ...buildMetadata(obj), bucket: bucketName }, c.get('auth') as AuthContext | null, c.executionCtx, c.env, getWorkerUrl(c.req.url, c.env));
|
|
1600
|
+
|
|
1601
|
+
return c.json(buildMetadata(obj));
|
|
1602
|
+
});
|
|
1603
|
+
|
|
1604
|
+
const abortMultipartUpload = createRoute({
|
|
1605
|
+
operationId: 'abortMultipartUpload',
|
|
1606
|
+
method: 'post',
|
|
1607
|
+
path: '/{bucket}/multipart/abort',
|
|
1608
|
+
tags: ['client'],
|
|
1609
|
+
summary: 'Abort multipart upload',
|
|
1610
|
+
request: {
|
|
1611
|
+
params: z.object({ bucket: z.string() }),
|
|
1612
|
+
body: { content: { 'application/json': { schema: z.object({ uploadId: z.string(), key: z.string() }) } }, required: true },
|
|
1613
|
+
},
|
|
1614
|
+
responses: {
|
|
1615
|
+
200: { description: 'Success', content: { 'application/json': { schema: jsonResponseSchema } } },
|
|
1616
|
+
400: { description: 'Bad request', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1617
|
+
403: { description: 'Forbidden', content: { 'application/json': { schema: errorResponseSchema } } },
|
|
1618
|
+
},
|
|
1619
|
+
});
|
|
1620
|
+
|
|
1621
|
+
storage.openapi(abortMultipartUpload, async (c) => {
|
|
1622
|
+
const bucketName = c.req.param('bucket')!;
|
|
1623
|
+
const { bucketConfig, release } = getBucketConfig(c.env, bucketName);
|
|
1624
|
+
|
|
1625
|
+
// Security: check write rule
|
|
1626
|
+
const serviceKeyBypass = checkServiceKey(c.env, c.req.header('X-EdgeBase-Service-Key'), `storage:bucket:${bucketName}:write`, c.req);
|
|
1627
|
+
if (!serviceKeyBypass) {
|
|
1628
|
+
const auth = c.get('auth') as AuthContext | null;
|
|
1629
|
+
checkStorageRule(bucketConfig.access?.write, auth, null, 'write', bucketName, release);
|
|
1630
|
+
}
|
|
1631
|
+
|
|
1632
|
+
const body = await c.req.json<{ uploadId: string; key: string }>();
|
|
1633
|
+
if (!body.uploadId || !body.key) {
|
|
1634
|
+
throw new EdgeBaseError(400, 'Missing required fields: uploadId, key.', undefined, 'validation-failed');
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
const fullKey = r2Key(bucketName, body.key);
|
|
1638
|
+
const multipartUpload = c.env.STORAGE.resumeMultipartUpload(fullKey, body.uploadId);
|
|
1639
|
+
await multipartUpload.abort();
|
|
1640
|
+
|
|
1641
|
+
// M17: Clean up KV part tracking data after abort
|
|
1642
|
+
const kvKey = partTrackingKey(bucketName, body.key, body.uploadId);
|
|
1643
|
+
await c.env.KV.delete(kvKey).catch(() => { /* best effort */ });
|
|
1644
|
+
|
|
1645
|
+
return c.json({ ok: true });
|
|
1646
|
+
});
|
|
1647
|
+
|
|
1648
|
+
// ─── Subdirectory Key Catch-all Routes ───
|
|
1649
|
+
// OpenAPI /{bucket}/{key} params only match a single path segment.
|
|
1650
|
+
// These raw Hono routes use :key{.+} to handle multi-segment keys
|
|
1651
|
+
// (e.g., "folder/file.txt"). Registered AFTER OpenAPI routes so
|
|
1652
|
+
// single-segment keys still hit the OpenAPI-registered handlers first.
|
|
1653
|
+
// Metadata catch-alls cannot rely on `:key{.+}/metadata` in Hono because the
|
|
1654
|
+
// regexp consumes the suffix into `key`. Use wildcard tail parsing instead.
|
|
1655
|
+
storage.on('GET', '/:bucket/*', async (c) => {
|
|
1656
|
+
const bucketName = c.req.param('bucket')!;
|
|
1657
|
+
const tail = getCatchAllTail(c, bucketName);
|
|
1658
|
+
if (tail?.endsWith('/metadata')) {
|
|
1659
|
+
return handleGetFileMetadata(c);
|
|
1660
|
+
}
|
|
1661
|
+
return handleDownloadFile(c);
|
|
1662
|
+
});
|
|
1663
|
+
storage.on('PATCH', '/:bucket/*', async (c) => {
|
|
1664
|
+
const bucketName = c.req.param('bucket')!;
|
|
1665
|
+
const tail = getCatchAllTail(c, bucketName);
|
|
1666
|
+
if (tail?.endsWith('/metadata')) {
|
|
1667
|
+
return handleUpdateFileMetadata(c);
|
|
1668
|
+
}
|
|
1669
|
+
throw new EdgeBaseError(404, 'Not found.', undefined, 'not-found');
|
|
1670
|
+
});
|
|
1671
|
+
storage.on('HEAD', '/:bucket/*', handleCheckFileExists);
|
|
1672
|
+
storage.on('DELETE', '/:bucket/*', handleDeleteFile);
|
|
1673
|
+
|
|
1674
|
+
export { storage as storageRoute };
|