@edge-base/server 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (309) hide show
  1. package/admin-build/.gitkeep +0 -0
  2. package/admin-build/_app/env.js +1 -0
  3. package/admin-build/_app/immutable/assets/0.Bm6cF078.css +1 -0
  4. package/admin-build/_app/immutable/assets/1.BfW3pUNa.css +1 -0
  5. package/admin-build/_app/immutable/assets/11.CVmQOewb.css +1 -0
  6. package/admin-build/_app/immutable/assets/12.B1EhbRZT.css +1 -0
  7. package/admin-build/_app/immutable/assets/13.BvwYeuwE.css +1 -0
  8. package/admin-build/_app/immutable/assets/14.CdVfcO0R.css +1 -0
  9. package/admin-build/_app/immutable/assets/15.2yeZ66b-.css +1 -0
  10. package/admin-build/_app/immutable/assets/17.BVg0JEVu.css +1 -0
  11. package/admin-build/_app/immutable/assets/18.Rwnl3x_i.css +1 -0
  12. package/admin-build/_app/immutable/assets/20.DsPWA9AV.css +1 -0
  13. package/admin-build/_app/immutable/assets/21.Dz2RJ56c.css +1 -0
  14. package/admin-build/_app/immutable/assets/22.DwNLk5Ai.css +1 -0
  15. package/admin-build/_app/immutable/assets/23.CFpu0gOO.css +1 -0
  16. package/admin-build/_app/immutable/assets/24.Cy5LBeoJ.css +1 -0
  17. package/admin-build/_app/immutable/assets/25.pUyLVf-h.css +1 -0
  18. package/admin-build/_app/immutable/assets/26.DBcGrlXa.css +1 -0
  19. package/admin-build/_app/immutable/assets/27.BswYyAJD.css +1 -0
  20. package/admin-build/_app/immutable/assets/28.B4ueB1Kf.css +1 -0
  21. package/admin-build/_app/immutable/assets/29.B-qU6PdF.css +1 -0
  22. package/admin-build/_app/immutable/assets/3.Dg81Pgmd.css +1 -0
  23. package/admin-build/_app/immutable/assets/30.CsdWum94.css +1 -0
  24. package/admin-build/_app/immutable/assets/31.U6OwIp50.css +1 -0
  25. package/admin-build/_app/immutable/assets/4.CyawCCux.css +1 -0
  26. package/admin-build/_app/immutable/assets/5.C0YO2HTk.css +1 -0
  27. package/admin-build/_app/immutable/assets/8.Br5jd6kD.css +1 -0
  28. package/admin-build/_app/immutable/assets/Badge.EMYLHBxE.css +1 -0
  29. package/admin-build/_app/immutable/assets/Button.DpzMRTjK.css +1 -0
  30. package/admin-build/_app/immutable/assets/ConfirmDialog.DAnaWRRk.css +1 -0
  31. package/admin-build/_app/immutable/assets/EmptyState.CwKsu57Y.css +1 -0
  32. package/admin-build/_app/immutable/assets/Input.BDUSenmU.css +1 -0
  33. package/admin-build/_app/immutable/assets/Modal.Dm5B0Xie.css +1 -0
  34. package/admin-build/_app/immutable/assets/PageShell.CmU-Xh-b.css +1 -0
  35. package/admin-build/_app/immutable/assets/SchemaFieldEditor.g4NsCdno.css +1 -0
  36. package/admin-build/_app/immutable/assets/Select.BW4Keufm.css +1 -0
  37. package/admin-build/_app/immutable/assets/Skeleton.KWUulTKJ.css +1 -0
  38. package/admin-build/_app/immutable/assets/Tabs.CniGYb67.css +1 -0
  39. package/admin-build/_app/immutable/assets/TimeChart.BTCDAvmT.css +1 -0
  40. package/admin-build/_app/immutable/assets/Toggle.Cy_K12OM.css +1 -0
  41. package/admin-build/_app/immutable/assets/TopList.ClFzmPlA.css +1 -0
  42. package/admin-build/_app/immutable/chunks/7B47DvSx.js +1 -0
  43. package/admin-build/_app/immutable/chunks/7f08Id8e.js +1 -0
  44. package/admin-build/_app/immutable/chunks/8wJeQ7LN.js +1 -0
  45. package/admin-build/_app/immutable/chunks/B-h2afW5.js +1 -0
  46. package/admin-build/_app/immutable/chunks/B8vJP3wz.js +1 -0
  47. package/admin-build/_app/immutable/chunks/BR_fL5Yv.js +1 -0
  48. package/admin-build/_app/immutable/chunks/BY92tFS2.js +1 -0
  49. package/admin-build/_app/immutable/chunks/BcR-Rdj9.js +1 -0
  50. package/admin-build/_app/immutable/chunks/BdrwyZv8.js +1 -0
  51. package/admin-build/_app/immutable/chunks/Bh56EfQ_.js +1 -0
  52. package/admin-build/_app/immutable/chunks/BkrCkgYp.js +1 -0
  53. package/admin-build/_app/immutable/chunks/BmRjiP5k.js +1 -0
  54. package/admin-build/_app/immutable/chunks/BsokvhWC.js +1 -0
  55. package/admin-build/_app/immutable/chunks/C4D51vTW.js +1 -0
  56. package/admin-build/_app/immutable/chunks/C6puvcoR.js +2 -0
  57. package/admin-build/_app/immutable/chunks/CCKNu7m7.js +1 -0
  58. package/admin-build/_app/immutable/chunks/CWj6FrbW.js +1 -0
  59. package/admin-build/_app/immutable/chunks/Ce-ngf4p.js +5 -0
  60. package/admin-build/_app/immutable/chunks/Cs0GwzJA.js +1 -0
  61. package/admin-build/_app/immutable/chunks/CwROoZK0.js +1 -0
  62. package/admin-build/_app/immutable/chunks/CxCPv_Ut.js +1 -0
  63. package/admin-build/_app/immutable/chunks/CxbRue-5.js +1 -0
  64. package/admin-build/_app/immutable/chunks/CyqB6g-D.js +1 -0
  65. package/admin-build/_app/immutable/chunks/D5h5A1cc.js +2 -0
  66. package/admin-build/_app/immutable/chunks/DnyL7Zq-.js +1 -0
  67. package/admin-build/_app/immutable/chunks/DoPXzH7F.js +1 -0
  68. package/admin-build/_app/immutable/chunks/DrQSgw-f.js +1 -0
  69. package/admin-build/_app/immutable/chunks/DttM2zNO.js +1 -0
  70. package/admin-build/_app/immutable/chunks/DuXuUBWN.js +1 -0
  71. package/admin-build/_app/immutable/chunks/MdeqaOQx.js +10 -0
  72. package/admin-build/_app/immutable/chunks/NuUjtcO2.js +1 -0
  73. package/admin-build/_app/immutable/chunks/Q2nPFxS6.js +1 -0
  74. package/admin-build/_app/immutable/chunks/R6arueIl.js +1 -0
  75. package/admin-build/_app/immutable/chunks/UUazaC_N.js +1 -0
  76. package/admin-build/_app/immutable/chunks/cOYbrQxx.js +1 -0
  77. package/admin-build/_app/immutable/chunks/eFQHTGwA.js +1 -0
  78. package/admin-build/_app/immutable/chunks/ehbppgYb.js +1 -0
  79. package/admin-build/_app/immutable/chunks/glwixJlP.js +1 -0
  80. package/admin-build/_app/immutable/chunks/vApWTCBs.js +1 -0
  81. package/admin-build/_app/immutable/chunks/w89G9Xpi.js +1 -0
  82. package/admin-build/_app/immutable/chunks/wJsUhbfZ.js +1 -0
  83. package/admin-build/_app/immutable/chunks/zfauFM8P.js +1 -0
  84. package/admin-build/_app/immutable/entry/app.CcO-Uos3.js +2 -0
  85. package/admin-build/_app/immutable/entry/start.COebYq3I.js +1 -0
  86. package/admin-build/_app/immutable/nodes/0.CjtHKU-6.js +1 -0
  87. package/admin-build/_app/immutable/nodes/1.DEisjlM0.js +1 -0
  88. package/admin-build/_app/immutable/nodes/10.CvhdyWVB.js +1 -0
  89. package/admin-build/_app/immutable/nodes/11.DjHqcOvy.js +1 -0
  90. package/admin-build/_app/immutable/nodes/12.mQLz4Mj_.js +1 -0
  91. package/admin-build/_app/immutable/nodes/13.CBonZZyP.js +110 -0
  92. package/admin-build/_app/immutable/nodes/14.d-oiZL0j.js +3 -0
  93. package/admin-build/_app/immutable/nodes/15.CKPQsUYF.js +1 -0
  94. package/admin-build/_app/immutable/nodes/16.wPzAPQGx.js +1 -0
  95. package/admin-build/_app/immutable/nodes/17.DayhKyEZ.js +1 -0
  96. package/admin-build/_app/immutable/nodes/18.DKwS0Ir0.js +1 -0
  97. package/admin-build/_app/immutable/nodes/19.wPzAPQGx.js +1 -0
  98. package/admin-build/_app/immutable/nodes/2.BKoKrw1i.js +1 -0
  99. package/admin-build/_app/immutable/nodes/20.BvIkkkrW.js +1 -0
  100. package/admin-build/_app/immutable/nodes/21.DMaFhdHk.js +128 -0
  101. package/admin-build/_app/immutable/nodes/22.3xdgwuK1.js +1 -0
  102. package/admin-build/_app/immutable/nodes/23.8Bvgjbsl.js +112 -0
  103. package/admin-build/_app/immutable/nodes/24.DzSSzRhG.js +2 -0
  104. package/admin-build/_app/immutable/nodes/25.9KKYBnAE.js +2 -0
  105. package/admin-build/_app/immutable/nodes/26.Bhn9dfhY.js +1 -0
  106. package/admin-build/_app/immutable/nodes/27.kRLiC24G.js +1 -0
  107. package/admin-build/_app/immutable/nodes/28.BVIN1-7N.js +1 -0
  108. package/admin-build/_app/immutable/nodes/29.3yabZWj4.js +1 -0
  109. package/admin-build/_app/immutable/nodes/3.BFtSOkX7.js +2 -0
  110. package/admin-build/_app/immutable/nodes/30.CyCQlwaP.js +1 -0
  111. package/admin-build/_app/immutable/nodes/31.C4LDXjES.js +1 -0
  112. package/admin-build/_app/immutable/nodes/4.CvbiMlCa.js +1 -0
  113. package/admin-build/_app/immutable/nodes/5.C6BLv2eM.js +1 -0
  114. package/admin-build/_app/immutable/nodes/6.BcXvfl2P.js +1 -0
  115. package/admin-build/_app/immutable/nodes/7.CIuqhPiK.js +1 -0
  116. package/admin-build/_app/immutable/nodes/8.BQOR_JfO.js +1 -0
  117. package/admin-build/_app/immutable/nodes/9.NZqXQxPy.js +1 -0
  118. package/admin-build/_app/version.json +1 -0
  119. package/admin-build/favicon.svg +26 -0
  120. package/admin-build/index.html +45 -0
  121. package/openapi.json +19543 -0
  122. package/package.json +66 -0
  123. package/src/__tests__/admin-assets.test.ts +55 -0
  124. package/src/__tests__/admin-data-routes.test.ts +488 -0
  125. package/src/__tests__/admin-db-target.test.ts +103 -0
  126. package/src/__tests__/admin-routing.test.ts +31 -0
  127. package/src/__tests__/admin-user-management.test.ts +311 -0
  128. package/src/__tests__/analytics-query.test.ts +75 -0
  129. package/src/__tests__/auth-d1.test.ts +749 -0
  130. package/src/__tests__/auth-db-adapter.test.ts +73 -0
  131. package/src/__tests__/auth-jwt.test.ts +440 -0
  132. package/src/__tests__/auth-oauth.test.ts +389 -0
  133. package/src/__tests__/auth-password.test.ts +367 -0
  134. package/src/__tests__/auth-redirect.test.ts +87 -0
  135. package/src/__tests__/backup-restore.test.ts +711 -0
  136. package/src/__tests__/broadcast.test.ts +128 -0
  137. package/src/__tests__/cli.test.ts +178 -0
  138. package/src/__tests__/cloudflare-realtime.test.ts +113 -0
  139. package/src/__tests__/config.test.ts +469 -0
  140. package/src/__tests__/cors.test.ts +154 -0
  141. package/src/__tests__/cron.test.ts +302 -0
  142. package/src/__tests__/d1-handler.test.ts +402 -0
  143. package/src/__tests__/d1-sql.test.ts +120 -0
  144. package/src/__tests__/database-live-config.test.ts +42 -0
  145. package/src/__tests__/database-live-emitter.test.ts +56 -0
  146. package/src/__tests__/database-live-filters.test.ts +63 -0
  147. package/src/__tests__/database-live-route.test.ts +113 -0
  148. package/src/__tests__/db-sql.test.ts +163 -0
  149. package/src/__tests__/do-lifecycle.test.ts +263 -0
  150. package/src/__tests__/do-router.test.ts +729 -0
  151. package/src/__tests__/email-provider.test.ts +128 -0
  152. package/src/__tests__/email-templates.test.ts +528 -0
  153. package/src/__tests__/error-format.test.ts +250 -0
  154. package/src/__tests__/field-ops.test.ts +242 -0
  155. package/src/__tests__/functions-context.test.ts +334 -0
  156. package/src/__tests__/functions-d1-proxy.test.ts +229 -0
  157. package/src/__tests__/functions-registry-runtime-config.test.ts +17 -0
  158. package/src/__tests__/functions-route.test.ts +139 -0
  159. package/src/__tests__/internal-request.test.ts +77 -0
  160. package/src/__tests__/log-writer.test.ts +44 -0
  161. package/src/__tests__/logger.test.ts +58 -0
  162. package/src/__tests__/meta-admin-proxy.test.ts +48 -0
  163. package/src/__tests__/meta-export-coverage.test.ts +191 -0
  164. package/src/__tests__/meta-route-registration.test.ts +47 -0
  165. package/src/__tests__/namespace-dump.test.ts +28 -0
  166. package/src/__tests__/oauth-providers.test.ts +337 -0
  167. package/src/__tests__/openapi-coverage.test.ts +144 -0
  168. package/src/__tests__/pagination.test.ts +59 -0
  169. package/src/__tests__/password-policy.test.ts +191 -0
  170. package/src/__tests__/plugin-migrations.test.ts +379 -0
  171. package/src/__tests__/postgres-batch-compat.test.ts +133 -0
  172. package/src/__tests__/postgres-dialect.test.ts +328 -0
  173. package/src/__tests__/postgres-executor.test.ts +79 -0
  174. package/src/__tests__/postgres-field-ops-compat.test.ts +222 -0
  175. package/src/__tests__/postgres-schema-init.test.ts +105 -0
  176. package/src/__tests__/postgres-table-utils.test.ts +107 -0
  177. package/src/__tests__/presence.test.ts +199 -0
  178. package/src/__tests__/provider.test.ts +550 -0
  179. package/src/__tests__/public-user-profile.test.ts +339 -0
  180. package/src/__tests__/push-handlers.test.ts +179 -0
  181. package/src/__tests__/push-provider.test.ts +80 -0
  182. package/src/__tests__/push-token.test.ts +418 -0
  183. package/src/__tests__/query.test.ts +771 -0
  184. package/src/__tests__/rate-limit.test.ts +260 -0
  185. package/src/__tests__/room-access-policy.test.ts +101 -0
  186. package/src/__tests__/room-handler-context.test.ts +130 -0
  187. package/src/__tests__/room-monitoring.test.ts +138 -0
  188. package/src/__tests__/room-runtime-routing.test.ts +222 -0
  189. package/src/__tests__/room.test.ts +254 -0
  190. package/src/__tests__/route-parser.test.ts +490 -0
  191. package/src/__tests__/rules.test.ts +234 -0
  192. package/src/__tests__/runtime-surface-accounting.test.ts +120 -0
  193. package/src/__tests__/scheduled.test.ts +80 -0
  194. package/src/__tests__/schema.test.ts +1273 -0
  195. package/src/__tests__/security-hardening.test.ts +312 -0
  196. package/src/__tests__/server.unit.test.ts +333 -0
  197. package/src/__tests__/service-key-db-proxy.test.ts +650 -0
  198. package/src/__tests__/service-key-provider-bypass.test.ts +138 -0
  199. package/src/__tests__/service-key.test.ts +757 -0
  200. package/src/__tests__/smoke-skip-report.test.ts +72 -0
  201. package/src/__tests__/sms-provider.test.ts +39 -0
  202. package/src/__tests__/sql-route.test.ts +218 -0
  203. package/src/__tests__/storage-hook-context.test.ts +115 -0
  204. package/src/__tests__/totp.test.ts +200 -0
  205. package/src/__tests__/uuid.test.ts +144 -0
  206. package/src/__tests__/validation.test.ts +773 -0
  207. package/src/__tests__/websocket-pending.test.ts +163 -0
  208. package/src/_functions-registry.ts +51 -0
  209. package/src/bench-entry.ts +9 -0
  210. package/src/cloudflare-test.d.ts +1 -0
  211. package/src/durable-objects/auth-do.ts +49 -0
  212. package/src/durable-objects/database-do.ts +2240 -0
  213. package/src/durable-objects/database-live-do.ts +949 -0
  214. package/src/durable-objects/logs-do.ts +1200 -0
  215. package/src/durable-objects/room-runtime-base.ts +1604 -0
  216. package/src/durable-objects/rooms-do.ts +2191 -0
  217. package/src/generated-config.ts +6 -0
  218. package/src/index.ts +382 -0
  219. package/src/lib/admin-assets.ts +54 -0
  220. package/src/lib/admin-db-target.ts +301 -0
  221. package/src/lib/admin-routing.ts +35 -0
  222. package/src/lib/admin-user-management.ts +464 -0
  223. package/src/lib/analytics-adapter.ts +103 -0
  224. package/src/lib/analytics-query.ts +579 -0
  225. package/src/lib/auth-d1-service.ts +1193 -0
  226. package/src/lib/auth-d1.ts +1056 -0
  227. package/src/lib/auth-db-adapter.ts +289 -0
  228. package/src/lib/auth-redirect.ts +116 -0
  229. package/src/lib/cidr.ts +115 -0
  230. package/src/lib/client-ip.ts +51 -0
  231. package/src/lib/cloudflare-realtime.ts +251 -0
  232. package/src/lib/control-db.ts +36 -0
  233. package/src/lib/cron.ts +163 -0
  234. package/src/lib/d1-handler.ts +1425 -0
  235. package/src/lib/d1-schema-init.ts +255 -0
  236. package/src/lib/d1-sql.ts +33 -0
  237. package/src/lib/database-live-config.ts +24 -0
  238. package/src/lib/database-live-emitter.ts +111 -0
  239. package/src/lib/db-sql.ts +66 -0
  240. package/src/lib/do-retry.ts +36 -0
  241. package/src/lib/do-router.ts +270 -0
  242. package/src/lib/do-sql.ts +73 -0
  243. package/src/lib/email-provider.ts +379 -0
  244. package/src/lib/email-templates.ts +285 -0
  245. package/src/lib/email-translations.ts +422 -0
  246. package/src/lib/errors.ts +151 -0
  247. package/src/lib/functions.ts +2091 -0
  248. package/src/lib/hono.ts +56 -0
  249. package/src/lib/internal-request.ts +56 -0
  250. package/src/lib/jwt.ts +354 -0
  251. package/src/lib/log-writer.ts +272 -0
  252. package/src/lib/namespace-dump.ts +125 -0
  253. package/src/lib/oauth-providers.ts +1225 -0
  254. package/src/lib/op-parser.ts +99 -0
  255. package/src/lib/openapi.ts +146 -0
  256. package/src/lib/pagination.ts +19 -0
  257. package/src/lib/password-policy.ts +102 -0
  258. package/src/lib/password.ts +145 -0
  259. package/src/lib/plugin-migrations.ts +612 -0
  260. package/src/lib/postgres-executor.ts +203 -0
  261. package/src/lib/postgres-handler.ts +1102 -0
  262. package/src/lib/postgres-schema-init.ts +341 -0
  263. package/src/lib/postgres-table-utils.ts +87 -0
  264. package/src/lib/public-user-profile.ts +187 -0
  265. package/src/lib/push-provider.ts +409 -0
  266. package/src/lib/push-token.ts +294 -0
  267. package/src/lib/query-engine.ts +768 -0
  268. package/src/lib/room-monitoring.ts +97 -0
  269. package/src/lib/room-runtime.ts +14 -0
  270. package/src/lib/route-parser.ts +434 -0
  271. package/src/lib/schema.ts +538 -0
  272. package/src/lib/schemas.ts +152 -0
  273. package/src/lib/service-key.ts +419 -0
  274. package/src/lib/sms-provider.ts +230 -0
  275. package/src/lib/startup-config.ts +99 -0
  276. package/src/lib/totp.ts +242 -0
  277. package/src/lib/uuid.ts +87 -0
  278. package/src/lib/validation.ts +205 -0
  279. package/src/lib/version.ts +2 -0
  280. package/src/lib/websocket-pending.ts +40 -0
  281. package/src/middleware/auth.ts +169 -0
  282. package/src/middleware/captcha-verify.ts +217 -0
  283. package/src/middleware/cors.ts +159 -0
  284. package/src/middleware/error-handler.ts +54 -0
  285. package/src/middleware/internal-guard.ts +26 -0
  286. package/src/middleware/logger.ts +126 -0
  287. package/src/middleware/rate-limit.ts +283 -0
  288. package/src/middleware/rules.ts +475 -0
  289. package/src/routes/admin-auth.ts +447 -0
  290. package/src/routes/admin.ts +3501 -0
  291. package/src/routes/analytics-api.ts +290 -0
  292. package/src/routes/auth.ts +4222 -0
  293. package/src/routes/backup.ts +1466 -0
  294. package/src/routes/config.ts +53 -0
  295. package/src/routes/d1.ts +109 -0
  296. package/src/routes/database-live.ts +281 -0
  297. package/src/routes/functions.ts +155 -0
  298. package/src/routes/health.ts +32 -0
  299. package/src/routes/kv.ts +167 -0
  300. package/src/routes/oauth.ts +1055 -0
  301. package/src/routes/push.ts +1465 -0
  302. package/src/routes/room.ts +639 -0
  303. package/src/routes/schema-endpoint.ts +76 -0
  304. package/src/routes/sql.ts +176 -0
  305. package/src/routes/storage.ts +1674 -0
  306. package/src/routes/tables.ts +699 -0
  307. package/src/routes/users.ts +21 -0
  308. package/src/routes/vectorize.ts +372 -0
  309. package/src/types.ts +99 -0
@@ -0,0 +1,2240 @@
1
+ /**
2
+ * DatabaseDO — Durable Object for table data storage.
3
+ *
4
+ * Single class, multiple instances:
5
+ * {namespace} — static DB (e.g. 'shared')
6
+ * {namespace}:{id} — dynamic DB (e.g. 'workspace:ws-456')
7
+ *
8
+ * NOTE: db:_system eliminated — _users_public → AUTH_DB D1,
9
+ * _schedules → Cron Triggers, plugin _meta → CONTROL_DB D1.
10
+ *
11
+ * Responsibilities:
12
+ * - Lazy Schema Init: create/update tables on first request
13
+ * - Lazy Migration: run user-defined migrations
14
+ * - CRUD operations via internal Hono sub-app
15
+ * - Backup dump/restore via /internal/backup/*
16
+ */
17
+ import { DurableObject } from 'cloudflare:workers';
18
+ import { Hono } from 'hono';
19
+ import type {
20
+ EdgeBaseConfig,
21
+ TableConfig,
22
+ TableRules,
23
+ MigrationConfig,
24
+ HookCtx,
25
+ AuthContext,
26
+ } from '@edge-base/shared';
27
+ import { EdgeBaseError, getTableAccess, getTableHooks } from '@edge-base/shared';
28
+ import {
29
+ META_TABLE_DDL,
30
+ generateTableDDL,
31
+ generateAddColumnDDL,
32
+ generateFTS5DDL,
33
+ generateFTS5Triggers,
34
+ generateIndexDDL,
35
+ buildEffectiveSchema,
36
+ computeSchemaHashSync,
37
+ } from '../lib/schema.js';
38
+
39
+ import { generateId } from '../lib/uuid.js';
40
+ import { parseUpdateBody } from '../lib/op-parser.js';
41
+ import {
42
+ buildListQuery,
43
+ buildGetQuery,
44
+ buildCountQuery,
45
+ buildSearchQuery,
46
+ buildSubstringSearchQuery,
47
+ parseQueryParams,
48
+ type FilterTuple,
49
+ } from '../lib/query-engine.js';
50
+ import { summarizeValidationErrors, validateInsert, validateUpdate } from '../lib/validation.js';
51
+ import { hookRejectedError, validationError, notFoundError } from '../lib/errors.js';
52
+ import {
53
+ executeDbTriggers,
54
+ getRegisteredFunctions,
55
+ buildFunctionContext,
56
+ } from '../lib/functions.js';
57
+ import { parseDbDoName, parseConfig as getGlobalConfig } from '../lib/do-router.js';
58
+ import { parseDuration } from '../lib/jwt.js';
59
+ import { createPushProvider } from '../lib/push-provider.js';
60
+ import { getDevicesForUser } from '../lib/push-token.js';
61
+ import { ensureAuthSchema } from '../lib/auth-d1.js';
62
+ import { resolveAuthDb, type AuthDb } from '../lib/auth-db-adapter.js';
63
+ import { buildDbLiveChannel, DATABASE_LIVE_HUB_DO_NAME } from '../lib/database-live-emitter.js';
64
+ import { resolveRootServiceKey } from '../lib/service-key.js';
65
+ import { resolveDbLiveBatchThreshold } from '../lib/database-live-config.js';
66
+ import type { Env } from '../types.js';
67
+
68
+ // ─── Types ───
69
+
70
+ interface DOEnv {
71
+ DATABASE_LIVE: DurableObjectNamespace;
72
+ DATABASE: DurableObjectNamespace;
73
+ AUTH: DurableObjectNamespace;
74
+ AUTH_DB?: D1Database;
75
+ KV?: KVNamespace;
76
+ SERVICE_KEY?: string;
77
+ }
78
+
79
+ // ─── DatabaseDO Class ───
80
+
81
+ export class DatabaseDO extends DurableObject<DOEnv> {
82
+ private app: Hono;
83
+ private config: EdgeBaseConfig;
84
+ private initialized = false;
85
+ private doName = '';
86
+
87
+ constructor(ctx: DurableObjectState, env: DOEnv) {
88
+ super(ctx, env);
89
+ this.config = this.parseConfig(env);
90
+ this.app = this.buildApp();
91
+ }
92
+
93
+ private getServiceKey(): string | undefined {
94
+ return resolveRootServiceKey(this.config, this.env as unknown as Env);
95
+ }
96
+
97
+ async fetch(request: Request): Promise<Response> {
98
+ // Determine DO name from header or URL
99
+ const doNameHeader = request.headers.get('X-DO-Name');
100
+
101
+ if (doNameHeader) this.doName = doNameHeader;
102
+
103
+ // Lazy initialization on first request
104
+ if (!this.initialized) {
105
+ // §36: Newly created DO must be authorized before initialization.
106
+ // If X-DO-Create-Authorized header is absent, signal Worker to evaluate canCreate.
107
+ // Shared/static DOs (doName === 'shared' or system) skip this gate.
108
+ const isStaticDO = !this.doName || this.doName === 'shared' || this.doName.startsWith('_');
109
+ if (!isStaticDO && !request.headers.get('X-DO-Create-Authorized')) {
110
+ // Check if _meta table already exists (i.e., DO was previously initialized)
111
+ let alreadyExists = false;
112
+ try {
113
+ this.ctx.storage.sql.exec('SELECT 1 FROM _meta LIMIT 1');
114
+ alreadyExists = true;
115
+ } catch {
116
+ // Table doesn't exist yet — this is a genuinely new DO
117
+ }
118
+
119
+ if (!alreadyExists) {
120
+ // Signal Worker: this DO needs canCreate evaluation before init
121
+ const parsed = this.doName ? parseDbDoName(this.doName) : null;
122
+ return Response.json(
123
+ { needsCreate: true, namespace: parsed?.namespace ?? 'shared', id: parsed?.id },
124
+ { status: 201 },
125
+ );
126
+ }
127
+ }
128
+
129
+ this.initializeSchema();
130
+ this.initialized = true;
131
+ // Persist doName for backup DO enumeration
132
+ if (this.doName) {
133
+ this.setMeta('doName', this.doName);
134
+ }
135
+ }
136
+
137
+ return this.app.fetch(request);
138
+ }
139
+
140
+ // ─── Auth Context parsing (for hooks) ───
141
+
142
+ /**
143
+ * Parse auth context from X-Auth-Context header forwarded by Worker (#133 §6).
144
+ */
145
+ private parseAuthContext(request: Request): AuthContext | null {
146
+ const raw = request.headers.get('X-Auth-Context');
147
+ if (!raw) return null;
148
+ try {
149
+ return JSON.parse(raw) as AuthContext;
150
+ } catch {
151
+ return null;
152
+ }
153
+ }
154
+
155
+ /**
156
+ * Check if this request was made with a valid Service Key.
157
+ * The 'X-Is-Service-Key: true' header is injected by tables.ts ONLY after
158
+ * the Worker validates the SK — it is not forwarded from external requests.
159
+ * SK requests bypass all row-level rules.
160
+ */
161
+ private isServiceKeyRequest(request: Request): boolean {
162
+ return (
163
+ request.headers.get('X-Is-Service-Key') === 'true'
164
+ || (
165
+ request.headers.get('X-EdgeBase-Internal') === 'true'
166
+ && new URL(request.url).host === 'do'
167
+ )
168
+ );
169
+ }
170
+
171
+ /**
172
+ * Build HookCtx passed to table hooks (#133 §6).
173
+ * db.get/list/exists use local SQL; databaseLive.broadcast uses emitDbLiveEvent.
174
+ */
175
+ private buildHookCtx(_table: string): HookCtx {
176
+ return {
177
+ db: {
178
+ get: (tbl: string, id: string) => {
179
+ const rows = [...this.sql(`SELECT * FROM "${tbl}" WHERE "id" = ? LIMIT 1`, id)];
180
+ return Promise.resolve((rows[0] as Record<string, unknown>) ?? null);
181
+ },
182
+ list: (tbl: string, filter?: Record<string, unknown>) => {
183
+ const escId = (n: string) => `"${n.replace(/"/g, '""')}"`;
184
+ if (filter && Object.keys(filter).length > 0) {
185
+ const keys = Object.keys(filter);
186
+ const cond = keys.map((k) => `${escId(k)} = ?`).join(' AND ');
187
+ const vals = keys.map((k) => filter[k]);
188
+ const rows = [...this.sql(`SELECT * FROM ${escId(tbl)} WHERE ${cond}`, ...vals)];
189
+ return Promise.resolve(rows as Record<string, unknown>[]);
190
+ }
191
+ const rows = [...this.sql(`SELECT * FROM ${escId(tbl)}`)];
192
+ return Promise.resolve(rows as Record<string, unknown>[]);
193
+ },
194
+ exists: (tbl: string, filter: Record<string, unknown>) => {
195
+ const escId = (n: string) => `"${n.replace(/"/g, '""')}"`;
196
+ const keys = Object.keys(filter);
197
+ if (keys.length === 0) return Promise.resolve(false);
198
+ const cond = keys.map((k) => `${escId(k)} = ?`).join(' AND ');
199
+ const vals = keys.map((k) => filter[k]);
200
+ const rows = [...this.sql(`SELECT 1 FROM ${escId(tbl)} WHERE ${cond} LIMIT 1`, ...vals)];
201
+ return Promise.resolve(rows.length > 0);
202
+ },
203
+ },
204
+ databaseLive: {
205
+ broadcast: (channel: string, event: string, data: unknown) => {
206
+ return this.sendBroadcastToDatabaseLiveDO(
207
+ channel,
208
+ { channel, event, payload: data ?? {} },
209
+ );
210
+ },
211
+ },
212
+ push: {
213
+ // Push from hooks — direct FCM via push-provider + KV device tokens
214
+ send: async (userId: string, payload: { title?: string; body: string }) => {
215
+ // Fire-and-forget — hooks are non-critical side effects
216
+ try {
217
+ if (!this.env.KV) return;
218
+ const provider = createPushProvider(this.config.push, this.env as unknown as Env);
219
+ if (!provider) return;
220
+ let tokenStore: KVNamespace | { kv: KVNamespace; authDb?: AuthDb | null } = this.env.KV;
221
+ try {
222
+ const authDb = resolveAuthDb(this.env as unknown as Record<string, unknown>);
223
+ await ensureAuthSchema(authDb);
224
+ tokenStore = { kv: this.env.KV, authDb };
225
+ } catch {
226
+ tokenStore = this.env.KV;
227
+ }
228
+ const devices = await getDevicesForUser(tokenStore, userId);
229
+ if (devices.length === 0) return;
230
+ await Promise.allSettled(
231
+ devices.map((device) =>
232
+ provider.send({ token: device.token, platform: device.platform, payload }),
233
+ ),
234
+ );
235
+ } catch {
236
+ /* best-effort */
237
+ }
238
+ },
239
+ },
240
+ waitUntil: (p: Promise<unknown>) => this.ctx.waitUntil(p),
241
+ };
242
+ }
243
+
244
+ // ─── Record Enrich ───
245
+
246
+ /**
247
+ * Run onEnrich hook for a single record.
248
+ * Returns the original record plus enriched fields (original if no hook or hook returns void).
249
+ */
250
+ private async enrichRecord(
251
+ tableName: string,
252
+ record: Record<string, unknown>,
253
+ auth: AuthContext | null,
254
+ ): Promise<Record<string, unknown>> {
255
+ const tableConfig = this.getTableConfig(tableName);
256
+ const onEnrich = getTableHooks(tableConfig ?? undefined)?.onEnrich;
257
+ if (!onEnrich) return record;
258
+ try {
259
+ const hookCtx = this.buildHookCtx(tableName);
260
+ const result = await onEnrich(auth, record, hookCtx);
261
+ if (result && typeof result === 'object') return { ...record, ...result };
262
+ return record;
263
+ } catch (err) {
264
+ console.error(`[EdgeBase] onEnrich hook error for table "${tableName}":`, err);
265
+ return record; // return original record on hook failure
266
+ }
267
+ }
268
+
269
+ /**
270
+ * Run onEnrich hook for multiple records in parallel.
271
+ */
272
+ private async enrichRecords(
273
+ tableName: string,
274
+ records: Record<string, unknown>[],
275
+ auth: AuthContext | null,
276
+ ): Promise<Record<string, unknown>[]> {
277
+ const tableConfig = this.getTableConfig(tableName);
278
+ const onEnrich = getTableHooks(tableConfig ?? undefined)?.onEnrich;
279
+ if (!onEnrich || records.length === 0) return records;
280
+ return Promise.all(records.map((r) => this.enrichRecord(tableName, r, auth)));
281
+ }
282
+
283
+ // ─── Schema Initialization ───
284
+
285
+ private initializeSchema(): void {
286
+ // Enable FK enforcement (#133 §35) — SQLite FKs are off by default
287
+ this.ctx.storage.sql.exec('PRAGMA foreign_keys = ON');
288
+
289
+ // 1. Always create _meta table
290
+ this.execMulti(META_TABLE_DDL);
291
+
292
+ // NOTE: System DO (db:_system) tables removed — _users_public → AUTH_DB D1,
293
+ // _schedules → Cron Triggers.
294
+
295
+ // 3. User tables — Lazy Schema Init
296
+ const tables = this.getMyTables();
297
+
298
+ for (const [name, tableConfig] of Object.entries(tables)) {
299
+ this.initTable(name, tableConfig as TableConfig);
300
+ }
301
+ }
302
+
303
+ private initTable(name: string, config: TableConfig): void {
304
+ const hashKey = `schemaHash:${name}`;
305
+ const currentHash = this.getMeta(hashKey);
306
+ const newHash = computeSchemaHashSync(config);
307
+
308
+ if (!currentHash) {
309
+ // First time — create table with all DDL
310
+ const ddlStatements = generateTableDDL(name, config);
311
+ for (const ddl of ddlStatements) {
312
+ this.execMulti(ddl);
313
+ }
314
+
315
+ // Set initial migration version if migrations exist
316
+ const maxVersion = config.migrations?.length
317
+ ? Math.max(...config.migrations.map((m: MigrationConfig) => m.version))
318
+ : 1;
319
+ this.setMeta(`migration_version:${name}`, String(maxVersion));
320
+ this.setMeta(hashKey, newHash);
321
+ } else if (currentHash !== newHash) {
322
+ // Schema changed — detect new columns (non-destructive only)
323
+ this.handleSchemaUpdate(name, config);
324
+ this.setMeta(hashKey, newHash);
325
+
326
+ // Run pending migrations
327
+ this.runMigrations(name, config);
328
+ } else {
329
+ // No schema change — still check migrations
330
+ this.runMigrations(name, config);
331
+ }
332
+
333
+ // Always ensure FTS5 + indexes exist (idempotent IF NOT EXISTS DDL).
334
+ // Covers case where initial creation failed silently but hash was saved.
335
+ this.ensureFTS5AndIndexes(name, config);
336
+ }
337
+
338
+ private handleSchemaUpdate(name: string, config: TableConfig): void {
339
+ // Add new columns (non-destructive)
340
+ const existingCols = new Set<string>();
341
+ for (const row of this.sql(`PRAGMA table_info("${name}")`)) {
342
+ existingCols.add(row.name as string);
343
+ }
344
+
345
+ const effectiveSchema = buildEffectiveSchema(config.schema);
346
+ for (const [colName, field] of Object.entries(effectiveSchema)) {
347
+ if (!existingCols.has(colName)) {
348
+ const ddl = generateAddColumnDDL(name, colName, field);
349
+ this.execMulti(ddl);
350
+ }
351
+ }
352
+ }
353
+
354
+ /**
355
+ * Ensure FTS5 virtual tables, triggers, and indexes exist.
356
+ * All DDL uses IF NOT EXISTS / IF NOT EXISTS — safe to run idempotently.
357
+ * This is called on EVERY initTable path, so even if initial creation
358
+ * silently failed (e.g., trigram tokenizer unavailable), subsequent DO
359
+ * wake-ups will retry and self-heal.
360
+ */
361
+ private ensureFTS5AndIndexes(name: string, config: TableConfig): void {
362
+ if (config.fts?.length) {
363
+ try {
364
+ this.execMulti(generateFTS5DDL(name, config.fts));
365
+ for (const triggerDDL of generateFTS5Triggers(name, config.fts)) {
366
+ this.execMulti(triggerDDL);
367
+ }
368
+ } catch {
369
+ // FTS5 may not be supported in this SQLite build — log and continue
370
+ }
371
+ }
372
+
373
+ if (config.indexes?.length) {
374
+ for (const indexDDL of generateIndexDDL(name, config.indexes)) {
375
+ this.execMulti(indexDDL);
376
+ }
377
+ }
378
+ }
379
+
380
+ /**
381
+ * Schemaless CRUD support: dynamically add TEXT columns
382
+ * for user-provided fields that don't yet exist in the table.
383
+ * Only called when colConfig.schema is undefined.
384
+ */
385
+ private ensureSchemalessColumns(tableName: string, fields: string[]): void {
386
+ const existingCols = new Set<string>();
387
+ for (const row of this.sql(`PRAGMA table_info("${tableName}")`)) {
388
+ existingCols.add(row.name as string);
389
+ }
390
+ for (const field of fields) {
391
+ if (!existingCols.has(field)) {
392
+ this.sql(`ALTER TABLE "${tableName}" ADD COLUMN "${field}" TEXT`);
393
+ }
394
+ }
395
+ }
396
+
397
+ // ─── Lazy Migration Engine ───
398
+
399
+ private runMigrations(name: string, config: TableConfig): void {
400
+ if (!config.migrations?.length) return;
401
+
402
+ const versionKey = `migration_version:${name}`;
403
+ const currentVersion = parseInt(this.getMeta(versionKey) || '1', 10);
404
+
405
+ const pending = config.migrations
406
+ .filter((m: MigrationConfig) => m.version > currentVersion)
407
+ .sort((a: MigrationConfig, b: MigrationConfig) => a.version - b.version);
408
+
409
+ for (const migration of pending) {
410
+ try {
411
+ this.execMulti(migration.up);
412
+ this.setMeta(versionKey, String(migration.version));
413
+ } catch (err) {
414
+ // Migration failed — stop here, return 503 on subsequent requests
415
+ console.error(`Migration v${migration.version} failed for ${name}:`, err);
416
+ throw new Error(`Migration v${migration.version} failed: ${(err as Error).message}`);
417
+ }
418
+ }
419
+ }
420
+
421
+ // ─── Hono Sub-App (Internal Routes) ───
422
+
423
+ private buildApp(): Hono {
424
+ const app = new Hono();
425
+
426
+ // Error handler — registered at end of buildApp() (see bottom of this method).
427
+ // NOTE: only ONE onError per Hono app (duplicates are silently ignored).
428
+
429
+ // Health check
430
+ app.get('/health', (c) => c.json({ status: 'ok', do: this.doName }));
431
+
432
+ // ─── Table CRUD ───
433
+
434
+ // LIST: GET /tables/:name
435
+ app.get('/tables/:name', async (c) => {
436
+ const name = c.req.param('name');
437
+ this.ensureTableExists(name);
438
+
439
+ const queryParams = Object.fromEntries(new URL(c.req.url).searchParams);
440
+ const options = parseQueryParams(queryParams);
441
+ const { sql, params, countSql, countParams } = buildListQuery(name, options);
442
+
443
+ const tableConfig = this.getTableConfig(name);
444
+ const rows = [...this.sql(sql, ...params)] as Record<string, unknown>[];
445
+ const normalizedRows = this.normalizeRows(rows, tableConfig);
446
+
447
+ // §7 All-or-Nothing row-level read rule (BUG-005) — SK bypasses
448
+ const listRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
449
+ if (listRules?.read && !this.isServiceKeyRequest(c.req.raw)) {
450
+ const listAuth = this.parseAuthContext(c.req.raw);
451
+ for (const row of normalizedRows) {
452
+ const canRead = await this.evalRowRule(listRules.read, listAuth, row);
453
+ if (!canRead) {
454
+ throw new EdgeBaseError(
455
+ 403,
456
+ `Access denied: 'read' rule blocked row "${row.id}" in table "${name}".`,
457
+ );
458
+ }
459
+ }
460
+ }
461
+
462
+ // onEnrich hook — transform/augment records before response
463
+ const authContext = this.parseAuthContext(c.req.raw);
464
+ const enrichedRows = await this.enrichRecords(name, normalizedRows, authContext);
465
+
466
+ // Build response
467
+ const response: Record<string, unknown> = { items: enrichedRows };
468
+
469
+ // Offset pagination: include total, page, perPage
470
+ if (countSql && countParams) {
471
+ const countResult = [...this.sql(countSql, ...countParams)];
472
+ const total = (countResult[0]?.total as number) ?? 0;
473
+ const perPage = options.pagination?.perPage ?? options.pagination?.limit ?? 20;
474
+ response.total = total;
475
+ response.page = options.pagination?.page ?? 1;
476
+ response.perPage = perPage;
477
+ }
478
+
479
+ // Cursor pagination: always include cursor and hasMore when items exist
480
+ // so clients can start cursor-based pagination from any page (including the first)
481
+ const limit = options.pagination?.limit ?? options.pagination?.perPage ?? 20;
482
+ const hasMore = normalizedRows.length === limit;
483
+ response.hasMore = hasMore;
484
+ if (normalizedRows.length > 0) {
485
+ response.cursor = normalizedRows[normalizedRows.length - 1].id;
486
+ }
487
+
488
+ return c.json(response);
489
+ });
490
+
491
+ // COUNT: GET /tables/:name/count
492
+ // NOTE: must be registered BEFORE /:name/:id to avoid "count" matching as :id
493
+ app.get('/tables/:name/count', async (c) => {
494
+ const name = c.req.param('name');
495
+ this.ensureTableExists(name);
496
+
497
+ const queryParams = Object.fromEntries(new URL(c.req.url).searchParams);
498
+ const options = parseQueryParams(queryParams);
499
+
500
+ const { sql, params } = buildCountQuery(name, options.filters, options.orFilters);
501
+ const rows = [...this.sql(sql, ...params)];
502
+ const total = (rows[0]?.total as number) ?? 0;
503
+ return c.json({ total });
504
+ });
505
+
506
+ // SEARCH: GET /tables/:name/search
507
+ // NOTE: must be registered BEFORE /:name/:id to avoid "search" matching as :id
508
+ app.get('/tables/:name/search', async (c) => {
509
+ const name = c.req.param('name');
510
+ this.ensureTableExists(name);
511
+
512
+ const queryParams = Object.fromEntries(new URL(c.req.url).searchParams);
513
+ const options = parseQueryParams(queryParams);
514
+ const q = options.search || '';
515
+ if (!q) {
516
+ return c.json({ items: [] });
517
+ }
518
+
519
+ const limit = options.pagination?.limit ?? options.pagination?.perPage ?? 20;
520
+ const offset = options.pagination?.offset ?? ((options.pagination?.page ?? 1) - 1) * limit;
521
+
522
+ const tableConfig = this.getTableConfig(name);
523
+ const ftsFields = tableConfig?.fts;
524
+
525
+ const highlightPre = c.req.query('highlightPre') || '<mark>';
526
+ const highlightPost = c.req.query('highlightPost') || '</mark>';
527
+
528
+ const searchQuery = buildSearchQuery(name, q, {
529
+ pagination: options.pagination,
530
+ filters: options.filters,
531
+ orFilters: options.orFilters,
532
+ sort: options.sort,
533
+ ftsFields,
534
+ highlightPre,
535
+ highlightPost,
536
+ });
537
+
538
+ try {
539
+ let rows = [...this.sql(searchQuery.sql, ...searchQuery.params)] as Record<string, unknown>[];
540
+ let total = Number(
541
+ searchQuery.countSql
542
+ ? [...this.sql(searchQuery.countSql, ...(searchQuery.countParams ?? []))][0]?.total ?? rows.length
543
+ : rows.length,
544
+ );
545
+ if (rows.length === 0) {
546
+ const fallback = buildSubstringSearchQuery(name, q, {
547
+ pagination: options.pagination,
548
+ filters: options.filters,
549
+ orFilters: options.orFilters,
550
+ sort: options.sort,
551
+ fields: ftsFields,
552
+ });
553
+ rows = [...this.sql(fallback.sql, ...fallback.params)] as Record<string, unknown>[];
554
+ total = Number(
555
+ fallback.countSql
556
+ ? [...this.sql(fallback.countSql, ...(fallback.countParams ?? []))][0]?.total ?? rows.length
557
+ : rows.length,
558
+ );
559
+ }
560
+ const tableConfig = this.getTableConfig(name);
561
+ const normalizedSearch = this.normalizeRows(rows, tableConfig);
562
+
563
+ // §7 All-or-Nothing read rule for search results (BUG-005) — SK bypasses
564
+ const searchRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
565
+ if (searchRules?.read && !this.isServiceKeyRequest(c.req.raw)) {
566
+ const searchAuth = this.parseAuthContext(c.req.raw);
567
+ for (const row of normalizedSearch) {
568
+ const canRead = await this.evalRowRule(searchRules.read, searchAuth, row);
569
+ if (!canRead) {
570
+ throw new EdgeBaseError(
571
+ 403,
572
+ `Access denied: 'read' rule blocked row "${row.id}" in table "${name}".`,
573
+ );
574
+ }
575
+ }
576
+ }
577
+
578
+ // onEnrich hook — transform/augment records before response
579
+ const searchEnrichAuth = this.parseAuthContext(c.req.raw);
580
+ const enrichedSearch = await this.enrichRecords(name, normalizedSearch, searchEnrichAuth);
581
+
582
+ return c.json({ items: enrichedSearch, total, hasMore: total > offset + enrichedSearch.length, cursor: null, page: null, perPage: limit });
583
+ } catch (err) {
584
+ if (err instanceof EdgeBaseError) throw err;
585
+ return c.json({ items: [], error: 'FTS5 not configured for this table.' }, 400);
586
+ }
587
+ });
588
+
589
+ // GET: GET /tables/:name/:id
590
+ app.get('/tables/:name/:id', async (c) => {
591
+ const name = c.req.param('name');
592
+ const id = c.req.param('id');
593
+ this.ensureTableExists(name);
594
+
595
+ const fieldsParam = c.req.query('fields');
596
+ const fields = fieldsParam ? fieldsParam.split(',').map((f) => f.trim()) : undefined;
597
+ const { sql, params } = buildGetQuery(name, id, fields);
598
+ const rows = [...this.sql(sql, ...params)];
599
+
600
+ if (rows.length === 0) {
601
+ throw notFoundError(`Record ${id} not found.`);
602
+ }
603
+
604
+ const tableConfig = this.getTableConfig(name);
605
+ const normalizedGet = this.normalizeRow(rows[0] as Record<string, unknown>, tableConfig);
606
+
607
+ // §7 row-level read rule (BUG-005) — SK bypasses
608
+ const getRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
609
+ if (getRules?.read && !this.isServiceKeyRequest(c.req.raw)) {
610
+ const getAuth = this.parseAuthContext(c.req.raw);
611
+ const canRead = await this.evalRowRule(getRules.read, getAuth, normalizedGet);
612
+ if (!canRead)
613
+ throw new EdgeBaseError(
614
+ 403,
615
+ `Access denied: 'read' rule blocked record "${id}" in table "${name}".`,
616
+ );
617
+ }
618
+
619
+ // onEnrich hook — transform/augment record before response
620
+ const getAuth = this.parseAuthContext(c.req.raw);
621
+ const enrichedGet = await this.enrichRecord(name, normalizedGet, getAuth);
622
+
623
+ return c.json(enrichedGet);
624
+ });
625
+
626
+ // CREATE: POST /tables/:name
627
+ app.post('/tables/:name', async (c) => {
628
+ const name = c.req.param('name');
629
+ this.ensureTableExists(name);
630
+ const tableConfig = this.getTableConfig(name);
631
+ if (!tableConfig) {
632
+ throw validationError(`Table '${name}' is not defined in the schema configuration.`);
633
+ }
634
+
635
+ const body = await c.req.json<Record<string, unknown>>();
636
+
637
+ // Check for upsert mode
638
+ const upsertMode = c.req.query('upsert') === 'true';
639
+ const conflictTarget = c.req.query('conflictTarget') || 'id';
640
+
641
+ // Validate conflictTarget if upsert mode
642
+ if (upsertMode && conflictTarget !== 'id') {
643
+ const effectiveForConflict = buildEffectiveSchema(tableConfig.schema);
644
+ const targetField = effectiveForConflict[conflictTarget];
645
+ if (!targetField) {
646
+ throw validationError(`Field '${conflictTarget}' does not exist in schema.`);
647
+ }
648
+ if (!targetField.unique) {
649
+ throw validationError(
650
+ `Field '${conflictTarget}' is not unique. conflictTarget must be a unique field.`,
651
+ );
652
+ }
653
+ }
654
+
655
+ // Validate
656
+ const result = validateInsert(body, tableConfig.schema);
657
+ if (!result.valid) {
658
+ throw validationError(
659
+ summarizeValidationErrors(result.errors),
660
+ Object.fromEntries(
661
+ Object.entries(result.errors).map(([k, v]) => [k, { code: 'invalid', message: v }]),
662
+ ),
663
+ );
664
+ }
665
+
666
+ const now = new Date().toISOString();
667
+ const id = (body.id as string) || generateId();
668
+ const effective = buildEffectiveSchema(tableConfig.schema);
669
+
670
+ // Build INSERT data with auto fields
671
+ const record: Record<string, unknown> = { ...body, id };
672
+ if ('createdAt' in effective) record.createdAt = now;
673
+ if ('updatedAt' in effective) record.updatedAt = now;
674
+
675
+ // Apply default values
676
+ for (const [fname, field] of Object.entries(effective)) {
677
+ if (record[fname] === undefined && field.default !== undefined) {
678
+ record[fname] = field.default;
679
+ }
680
+ }
681
+
682
+ // Run beforeInsert hook if defined (#133 §6)
683
+ const auth = this.parseAuthContext(c.req.raw);
684
+ const tableHooks = getTableHooks(tableConfig ?? undefined);
685
+ if (tableHooks?.beforeInsert) {
686
+ const hookCtx = this.buildHookCtx(name);
687
+ try {
688
+ const transformed = await tableHooks.beforeInsert(auth, record, hookCtx);
689
+ if (transformed && typeof transformed === 'object') {
690
+ Object.assign(record, transformed);
691
+ }
692
+ } catch (err) {
693
+ throw hookRejectedError(err, 'Insert rejected by beforeInsert hook.');
694
+ }
695
+ }
696
+
697
+ // Schemaless: include all record keys; schema-defined: filter through effective
698
+ let columns: string[];
699
+ if (!tableConfig.schema) {
700
+ columns = Object.keys(record);
701
+ this.ensureSchemalessColumns(
702
+ name,
703
+ columns.filter((k) => !(k in effective)),
704
+ );
705
+ } else {
706
+ columns = Object.keys(record).filter((k) => k in effective);
707
+ }
708
+ const values = columns.map((k) => {
709
+ const v = record[k];
710
+ // Serialize json-type fields to string for SQLite TEXT storage (BUG-006)
711
+ if (
712
+ effective[k]?.type === 'json' &&
713
+ v !== null &&
714
+ v !== undefined &&
715
+ typeof v === 'object'
716
+ ) {
717
+ return JSON.stringify(v);
718
+ }
719
+ if (effective[k]?.type === 'boolean' && v !== null && v !== undefined) {
720
+ return v === true || v === 'true' || v === 1 || v === '1' ? 1 : 0;
721
+ }
722
+ return v;
723
+ });
724
+ const placeholders = columns.map(() => '?').join(', ');
725
+ const colStr = columns.map((c) => `"${c}"`).join(', ');
726
+
727
+ // Track whether this is an update (for database-live/response,)
728
+ let isUpdate = false;
729
+ // Store before row for upsert path — used by afterUpdate hook and triggers (BUG-013)
730
+ let upsertBeforeRow: Record<string, unknown> | null = null;
731
+
732
+ if (upsertMode) {
733
+ // Check if record exists and capture before row (BUG-013: SELECT * instead of SELECT 1)
734
+ if (conflictTarget === 'id') {
735
+ const existing = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ? LIMIT 1`, id)];
736
+ isUpdate = existing.length > 0;
737
+ if (isUpdate) upsertBeforeRow = existing[0] as Record<string, unknown>;
738
+ } else {
739
+ const targetValue = record[conflictTarget];
740
+ if (targetValue !== undefined) {
741
+ const existing = [
742
+ ...this.sql(
743
+ `SELECT * FROM "${name}" WHERE "${conflictTarget}" = ? LIMIT 1`,
744
+ targetValue,
745
+ ),
746
+ ];
747
+ isUpdate = existing.length > 0;
748
+ if (isUpdate) upsertBeforeRow = existing[0] as Record<string, unknown>;
749
+ }
750
+ }
751
+
752
+ // UPSERT: ON CONFLICT DO UPDATE
753
+ const updateCols = columns.filter(
754
+ (k) => k !== 'id' && k !== 'createdAt' && k !== conflictTarget,
755
+ );
756
+ const updateSet = updateCols.map((k) => `"${k}" = excluded."${k}"`).join(', ');
757
+ const sql = updateSet
758
+ ? `INSERT INTO "${name}" (${colStr}) VALUES (${placeholders}) ON CONFLICT("${conflictTarget}") DO UPDATE SET ${updateSet}`
759
+ : `INSERT INTO "${name}" (${colStr}) VALUES (${placeholders}) ON CONFLICT("${conflictTarget}") DO NOTHING`;
760
+ this.sql(sql, ...values);
761
+ } else {
762
+ const sql = `INSERT INTO "${name}" (${colStr}) VALUES (${placeholders})`;
763
+ this.sql(sql, ...values);
764
+ }
765
+
766
+ // Return the created/updated record
767
+ const fetchField = upsertMode && conflictTarget !== 'id' ? conflictTarget : 'id';
768
+ const fetchValue = upsertMode && conflictTarget !== 'id' ? record[conflictTarget] : id;
769
+ const resultRow = [
770
+ ...this.sql(`SELECT * FROM "${name}" WHERE "${fetchField}" = ?`, fetchValue),
771
+ ];
772
+
773
+ // Emit database-live event
774
+ const eventType = isUpdate ? 'modified' : 'added';
775
+ const resultId = ((resultRow[0] as Record<string, unknown>)?.id as string) ?? id;
776
+ this.ctx.waitUntil(
777
+ this.emitDbLiveEvent(name, eventType, resultId, resultRow[0] as Record<string, unknown>),
778
+ );
779
+
780
+ // Fire DB triggers asynchronously
781
+ const triggerEvent = isUpdate ? 'update' : 'insert';
782
+ const doOrigin = this.doName ? parseDbDoName(this.doName) : { namespace: 'shared' };
783
+ const triggerData = isUpdate
784
+ ? {
785
+ before: upsertBeforeRow ?? (resultRow[0] as Record<string, unknown>),
786
+ after: resultRow[0] as Record<string, unknown>,
787
+ }
788
+ : { after: resultRow[0] as Record<string, unknown> };
789
+ this.ctx.waitUntil(
790
+ executeDbTriggers(
791
+ name,
792
+ triggerEvent,
793
+ triggerData,
794
+ {
795
+ databaseNamespace: this.env.DATABASE,
796
+ authNamespace: this.env.AUTH,
797
+ kvNamespace: this.env.KV,
798
+ config: this.config,
799
+ serviceKey: this.getServiceKey(),
800
+ },
801
+ doOrigin,
802
+ ),
803
+ );
804
+
805
+ // Run afterInsert/afterUpdate hook if defined (#133 §6)
806
+ if (!isUpdate && tableHooks?.afterInsert) {
807
+ const hookCtx = this.buildHookCtx(name);
808
+ this.ctx.waitUntil(
809
+ Promise.resolve(
810
+ tableHooks.afterInsert(resultRow[0] as Record<string, unknown>, hookCtx),
811
+ ).catch(() => {
812
+ /* best-effort */
813
+ }),
814
+ );
815
+ } else if (isUpdate && tableHooks?.afterUpdate) {
816
+ const hookCtx = this.buildHookCtx(name);
817
+ this.ctx.waitUntil(
818
+ Promise.resolve(
819
+ tableHooks.afterUpdate(
820
+ // BUG-013 fix: pass actual before row captured before upsert
821
+ upsertBeforeRow ?? (resultRow[0] as Record<string, unknown>),
822
+ resultRow[0] as Record<string, unknown>,
823
+ hookCtx,
824
+ ),
825
+ ).catch(() => {
826
+ /* best-effort */
827
+ }),
828
+ );
829
+ }
830
+
831
+ // Response: 201 + action:inserted or 200 + action:updated
832
+ const statusCode = isUpdate ? 200 : 201;
833
+ const action = isUpdate ? 'updated' : 'inserted';
834
+ const normalizedResult = this.normalizeRow(
835
+ resultRow[0] as Record<string, unknown>,
836
+ tableConfig,
837
+ );
838
+ if (upsertMode) {
839
+ return c.json({ ...normalizedResult, action }, statusCode as 200);
840
+ }
841
+ return c.json(normalizedResult, 201);
842
+ });
843
+
844
+ // UPDATE: PATCH /tables/:name/:id
845
+ app.patch('/tables/:name/:id', async (c) => {
846
+ const name = c.req.param('name');
847
+ const id = c.req.param('id');
848
+ this.ensureTableExists(name);
849
+ const tableConfig = this.getTableConfig(name);
850
+ if (!tableConfig) {
851
+ throw validationError(`Table '${name}' is not defined in the schema configuration.`);
852
+ }
853
+
854
+ const body = await c.req.json<Record<string, unknown>>();
855
+
856
+ // Validate
857
+ const result = validateUpdate(body, tableConfig.schema);
858
+ if (!result.valid) {
859
+ throw validationError(
860
+ 'Validation failed.',
861
+ Object.fromEntries(
862
+ Object.entries(result.errors).map(([k, v]) => [k, { code: 'invalid', message: v }]),
863
+ ),
864
+ );
865
+ }
866
+
867
+ // Check record exists
868
+ const existing = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
869
+ if (existing.length === 0) {
870
+ throw notFoundError(`Record ${id} not found.`);
871
+ }
872
+
873
+ // §7 row-level update rule (BUG-005) — SK bypasses
874
+ const updateRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
875
+ const authForUpdate = this.parseAuthContext(c.req.raw);
876
+ if (updateRules?.update && !this.isServiceKeyRequest(c.req.raw)) {
877
+ const canUpdate = await this.evalRowRule(
878
+ updateRules.update,
879
+ authForUpdate,
880
+ existing[0] as Record<string, unknown>,
881
+ );
882
+ if (!canUpdate)
883
+ throw new EdgeBaseError(
884
+ 403,
885
+ `Access denied: 'update' rule blocked record "${id}" in table "${name}".`,
886
+ );
887
+ }
888
+
889
+ // Build UPDATE with $op support
890
+ const effective = buildEffectiveSchema(tableConfig.schema);
891
+ const updateData = { ...body };
892
+ delete updateData.id;
893
+ delete updateData.createdAt;
894
+
895
+ // Apply onUpdate: 'now'
896
+ if ('updatedAt' in effective && effective.updatedAt?.onUpdate === 'now') {
897
+ updateData.updatedAt = new Date().toISOString();
898
+ }
899
+
900
+ // Schema-defined: remove fields not in effective schema to prevent SQLite
901
+ // "no such column" errors when deleteField() is applied to non-schema fields
902
+ if (tableConfig.schema) {
903
+ for (const key of Object.keys(updateData)) {
904
+ if (!(key in effective)) delete updateData[key];
905
+ }
906
+ } else {
907
+ // Schemaless: ensure columns exist for all update fields
908
+ this.ensureSchemalessColumns(
909
+ name,
910
+ Object.keys(updateData).filter((k) => !(k in effective)),
911
+ );
912
+ }
913
+
914
+ // Serialize json-type fields to string for SQLite TEXT storage (BUG-006)
915
+ for (const [key, value] of Object.entries(updateData)) {
916
+ if (
917
+ effective[key]?.type === 'json' &&
918
+ value !== null &&
919
+ value !== undefined &&
920
+ typeof value === 'object' &&
921
+ !('$op' in value)
922
+ ) {
923
+ updateData[key] = JSON.stringify(value);
924
+ } else if (
925
+ effective[key]?.type === 'boolean' &&
926
+ value !== null &&
927
+ value !== undefined &&
928
+ (typeof value !== 'object' || !('$op' in value))
929
+ ) {
930
+ updateData[key] = value === true || value === 'true' || value === 1 || value === '1'
931
+ ? 1
932
+ : 0;
933
+ }
934
+ }
935
+
936
+ const { setClauses, params } = parseUpdateBody(updateData);
937
+ if (setClauses.length === 0) {
938
+ return c.json(existing[0]);
939
+ }
940
+
941
+ params.push(id);
942
+
943
+ // Run beforeUpdate hook if defined (#133 §6) — reuses authForUpdate from above
944
+ const tableHooks = getTableHooks(tableConfig ?? undefined);
945
+ if (tableHooks?.beforeUpdate) {
946
+ const hookCtx = this.buildHookCtx(name);
947
+ try {
948
+ const transformed = await tableHooks.beforeUpdate(
949
+ authForUpdate,
950
+ existing[0] as Record<string, unknown>,
951
+ updateData,
952
+ hookCtx,
953
+ );
954
+ if (transformed && typeof transformed === 'object') {
955
+ // Re-build SET clause from transformed data
956
+ const newUpdateData = { ...updateData, ...transformed } as Record<string, unknown>;
957
+ delete newUpdateData.id;
958
+ delete newUpdateData.createdAt;
959
+ const rebuilt = parseUpdateBody(newUpdateData);
960
+ setClauses.length = 0;
961
+ setClauses.push(...rebuilt.setClauses);
962
+ params.length = 0;
963
+ params.push(...rebuilt.params, id);
964
+ }
965
+ } catch (err) {
966
+ throw hookRejectedError(err, 'Update rejected by beforeUpdate hook.');
967
+ }
968
+ }
969
+
970
+ // Build SQL after hook processing so setClauses/params reflect any transformations
971
+ const sql = `UPDATE "${name}" SET ${setClauses.join(', ')} WHERE "id" = ?`;
972
+ this.sql(sql, ...params);
973
+
974
+ // Return updated record
975
+ const updated = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
976
+
977
+ // Emit database-live event
978
+ this.ctx.waitUntil(
979
+ this.emitDbLiveEvent(name, 'modified', id, updated[0] as Record<string, unknown>),
980
+ );
981
+
982
+ // Fire DB triggers asynchronously
983
+ const doOriginUpdate = this.doName ? parseDbDoName(this.doName) : { namespace: 'shared' };
984
+ this.ctx.waitUntil(
985
+ executeDbTriggers(
986
+ name,
987
+ 'update',
988
+ {
989
+ before: existing[0] as Record<string, unknown>,
990
+ after: updated[0] as Record<string, unknown>,
991
+ },
992
+ {
993
+ databaseNamespace: this.env.DATABASE,
994
+ authNamespace: this.env.AUTH,
995
+ kvNamespace: this.env.KV,
996
+ config: this.config,
997
+ serviceKey: this.getServiceKey(),
998
+ },
999
+ doOriginUpdate,
1000
+ ),
1001
+ );
1002
+
1003
+ // Run afterUpdate hook if defined (#133 §6)
1004
+ if (tableHooks?.afterUpdate) {
1005
+ const hookCtx = this.buildHookCtx(name);
1006
+ this.ctx.waitUntil(
1007
+ Promise.resolve(
1008
+ tableHooks.afterUpdate(
1009
+ existing[0] as Record<string, unknown>,
1010
+ updated[0] as Record<string, unknown>,
1011
+ hookCtx,
1012
+ ),
1013
+ ).catch(() => {
1014
+ /* best-effort */
1015
+ }),
1016
+ );
1017
+ }
1018
+
1019
+ return c.json(this.normalizeRow(updated[0] as Record<string, unknown>, tableConfig));
1020
+ });
1021
+
1022
+ // DELETE: DELETE /tables/:name/:id
1023
+ app.delete('/tables/:name/:id', async (c) => {
1024
+ const name = c.req.param('name');
1025
+ const id = c.req.param('id');
1026
+ this.ensureTableExists(name);
1027
+ const tableConfig = this.getTableConfig(name);
1028
+
1029
+ const existing = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
1030
+ if (existing.length === 0) {
1031
+ throw notFoundError(`Record ${id} not found.`);
1032
+ }
1033
+
1034
+ // §7 row-level delete rule (BUG-005) — SK bypasses
1035
+ const deleteRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
1036
+ const authForDelete = this.parseAuthContext(c.req.raw);
1037
+ if (deleteRules?.delete && !this.isServiceKeyRequest(c.req.raw)) {
1038
+ const canDelete = await this.evalRowRule(
1039
+ deleteRules.delete,
1040
+ authForDelete,
1041
+ existing[0] as Record<string, unknown>,
1042
+ );
1043
+ if (!canDelete)
1044
+ throw new EdgeBaseError(
1045
+ 403,
1046
+ `Access denied: 'delete' rule blocked record "${id}" in table "${name}".`,
1047
+ );
1048
+ }
1049
+
1050
+ // Run beforeDelete hook if defined (#133 §6)
1051
+ const tableHooks = getTableHooks(tableConfig ?? undefined);
1052
+ if (tableHooks?.beforeDelete) {
1053
+ const hookCtx = this.buildHookCtx(name);
1054
+ try {
1055
+ await tableHooks.beforeDelete(
1056
+ authForDelete,
1057
+ existing[0] as Record<string, unknown>,
1058
+ hookCtx,
1059
+ );
1060
+ } catch (err) {
1061
+ throw hookRejectedError(err, 'Delete rejected by beforeDelete hook.');
1062
+ }
1063
+ }
1064
+
1065
+ this.sql(`DELETE FROM "${name}" WHERE "id" = ?`, id);
1066
+
1067
+ // Emit database-live event
1068
+ this.ctx.waitUntil(
1069
+ this.emitDbLiveEvent(name, 'removed', id, existing[0] as Record<string, unknown>),
1070
+ );
1071
+
1072
+ // Fire DB triggers asynchronously
1073
+ const doOriginDelete = this.doName ? parseDbDoName(this.doName) : { namespace: 'shared' };
1074
+ this.ctx.waitUntil(
1075
+ executeDbTriggers(
1076
+ name,
1077
+ 'delete',
1078
+ { before: existing[0] as Record<string, unknown> },
1079
+ {
1080
+ databaseNamespace: this.env.DATABASE,
1081
+ authNamespace: this.env.AUTH,
1082
+ kvNamespace: this.env.KV,
1083
+ config: this.config,
1084
+ serviceKey: this.getServiceKey(),
1085
+ },
1086
+ doOriginDelete,
1087
+ ),
1088
+ );
1089
+
1090
+ // Run afterDelete hook if defined (#133 §6)
1091
+ if (tableHooks?.afterDelete) {
1092
+ const hookCtx = this.buildHookCtx(name);
1093
+ this.ctx.waitUntil(
1094
+ Promise.resolve(
1095
+ tableHooks.afterDelete(existing[0] as Record<string, unknown>, hookCtx),
1096
+ ).catch(() => {
1097
+ /* best-effort */
1098
+ }),
1099
+ );
1100
+ }
1101
+
1102
+ return c.json({ deleted: true });
1103
+ });
1104
+
1105
+ // BATCH: POST /tables/:name/batch
1106
+ app.post('/tables/:name/batch', async (c) => {
1107
+ const name = c.req.param('name');
1108
+ this.ensureTableExists(name);
1109
+ const tableConfig = this.getTableConfig(name);
1110
+ if (!tableConfig) {
1111
+ throw validationError(`Table '${name}' is not defined in the schema configuration.`);
1112
+ }
1113
+
1114
+ // upsertMany: ?upsert=true
1115
+ const upsertMode = c.req.query('upsert') === 'true';
1116
+ const conflictTarget = c.req.query('conflictTarget') || 'id';
1117
+
1118
+ // Validate conflictTarget if upsert mode
1119
+ if (upsertMode && conflictTarget !== 'id') {
1120
+ const eff = buildEffectiveSchema(tableConfig.schema);
1121
+ const targetField = eff[conflictTarget];
1122
+ if (!targetField) {
1123
+ throw validationError(`Field '${conflictTarget}' does not exist in schema.`);
1124
+ }
1125
+ if (!targetField.unique) {
1126
+ throw validationError(
1127
+ `Field '${conflictTarget}' is not unique. conflictTarget must be a unique field.`,
1128
+ );
1129
+ }
1130
+ }
1131
+
1132
+ const body = await c.req.json<{
1133
+ inserts?: Record<string, unknown>[];
1134
+ updates?: { id: string; data: Record<string, unknown> }[];
1135
+ deletes?: string[];
1136
+ }>();
1137
+
1138
+ // Batch size limit
1139
+ const MAX_BATCH_SIZE = 500;
1140
+ const totalOps =
1141
+ (body.inserts?.length ?? 0) + (body.updates?.length ?? 0) + (body.deletes?.length ?? 0);
1142
+ if (totalOps > MAX_BATCH_SIZE) {
1143
+ throw validationError(
1144
+ `Batch limit exceeded: ${totalOps} operations (max ${MAX_BATCH_SIZE}).`,
1145
+ );
1146
+ }
1147
+
1148
+ // Check rules for each operation type — SK bypasses
1149
+ // Insert: table-level (no row needed). Update/Delete: per-row inside transaction.
1150
+ const batchRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
1151
+ const batchAuth = this.parseAuthContext(c.req.raw);
1152
+ const isSKBatch = this.isServiceKeyRequest(c.req.raw);
1153
+ if (!isSKBatch) {
1154
+ if (body.inserts?.length && batchRules?.insert) {
1155
+ const canInsert = await this.evalRowRule(batchRules.insert, batchAuth, {});
1156
+ if (!canInsert)
1157
+ throw new EdgeBaseError(
1158
+ 403,
1159
+ `Access denied: 'insert' rule blocked batch insert on table "${name}".`,
1160
+ );
1161
+ }
1162
+ // update/delete rules are evaluated per-row inside the transaction below
1163
+ }
1164
+
1165
+ const results: Record<string, unknown> = {};
1166
+ // Store full rows before deletion for triggers (BUG-012) — declared outside transaction
1167
+ const deletedRows: Record<string, unknown>[] = [];
1168
+ // Store before-rows for batch updates so triggers receive { before, after } (like single-row updates)
1169
+ const updateBeforeRows: Map<string, Record<string, unknown>> = new Map();
1170
+
1171
+ // All-or-nothing: use transactionSync
1172
+ this.ctx.storage.transactionSync(() => {
1173
+ const now = new Date().toISOString();
1174
+ const effective = buildEffectiveSchema(tableConfig.schema);
1175
+
1176
+ // Inserts (or Upserts when ?upsert=true)
1177
+ if (body.inserts) results.inserted = [];
1178
+ if (body.inserts?.length) {
1179
+ const inserted = results.inserted as Record<string, unknown>[];
1180
+ for (const item of body.inserts) {
1181
+ const validation = validateInsert(item, tableConfig.schema);
1182
+ if (!validation.valid) {
1183
+ throw validationError(
1184
+ 'Batch insert request failed validation. See data for field-level errors.',
1185
+ Object.fromEntries(
1186
+ Object.entries(validation.errors).map(([k, v]) => [
1187
+ k,
1188
+ { code: 'invalid', message: v },
1189
+ ]),
1190
+ ),
1191
+ );
1192
+ }
1193
+
1194
+ const id = (item.id as string) || generateId();
1195
+ const record: Record<string, unknown> = { ...item, id };
1196
+ if ('createdAt' in effective) record.createdAt = now;
1197
+ if ('updatedAt' in effective) record.updatedAt = now;
1198
+
1199
+ for (const [fname, field] of Object.entries(effective)) {
1200
+ if (record[fname] === undefined && field.default !== undefined) {
1201
+ record[fname] = field.default;
1202
+ }
1203
+ }
1204
+
1205
+ // Schemaless: include all record keys
1206
+ let columns: string[];
1207
+ if (!tableConfig.schema) {
1208
+ columns = Object.keys(record);
1209
+ this.ensureSchemalessColumns(
1210
+ name,
1211
+ columns.filter((k) => !(k in effective)),
1212
+ );
1213
+ } else {
1214
+ columns = Object.keys(record).filter((k) => k in effective);
1215
+ }
1216
+ const values = columns.map((k) => {
1217
+ const v = record[k];
1218
+ // Serialize json-type fields to string for SQLite TEXT storage (BUG-006)
1219
+ if (
1220
+ effective[k]?.type === 'json' &&
1221
+ v !== null &&
1222
+ v !== undefined &&
1223
+ typeof v === 'object'
1224
+ ) {
1225
+ return JSON.stringify(v);
1226
+ }
1227
+ if (effective[k]?.type === 'boolean' && v !== null && v !== undefined) {
1228
+ return v === true || v === 'true' || v === 1 || v === '1' ? 1 : 0;
1229
+ }
1230
+ return v;
1231
+ });
1232
+ const placeholders = columns.map(() => '?').join(', ');
1233
+ const colStr = columns.map((c) => `"${c}"`).join(', ');
1234
+
1235
+ if (upsertMode) {
1236
+ // ON CONFLICT DO UPDATE
1237
+ const updateCols = columns.filter(
1238
+ (k) => k !== 'id' && k !== 'createdAt' && k !== conflictTarget,
1239
+ );
1240
+ const updateSet = updateCols.map((k) => `"${k}" = excluded."${k}"`).join(', ');
1241
+ const sql = updateSet
1242
+ ? `INSERT INTO "${name}" (${colStr}) VALUES (${placeholders}) ON CONFLICT("${conflictTarget}") DO UPDATE SET ${updateSet}`
1243
+ : `INSERT INTO "${name}" (${colStr}) VALUES (${placeholders}) ON CONFLICT("${conflictTarget}") DO NOTHING`;
1244
+ this.sql(sql, ...values);
1245
+ } else {
1246
+ this.sql(`INSERT INTO "${name}" (${colStr}) VALUES (${placeholders})`, ...values);
1247
+ }
1248
+ inserted.push(record);
1249
+ }
1250
+ }
1251
+
1252
+ // Updates (BUG-010: per-row rule evaluation, BUG-011: SELECT * after write for database-live/triggers)
1253
+ if (body.updates) results.updated = [];
1254
+ if (body.updates?.length) {
1255
+ const updated = results.updated as Record<string, unknown>[];
1256
+ for (const entry of body.updates) {
1257
+ if (!entry.id) {
1258
+ throw validationError('Each batch update entry must include an id.');
1259
+ }
1260
+ if (!entry.data || typeof entry.data !== 'object') {
1261
+ throw validationError('Each batch update entry must include a data object.');
1262
+ }
1263
+ const { id, data } = entry;
1264
+ const validation = validateUpdate(data, tableConfig.schema);
1265
+ if (!validation.valid) {
1266
+ throw validationError(
1267
+ 'Batch update request failed validation. See data for field-level errors.',
1268
+ Object.fromEntries(
1269
+ Object.entries(validation.errors).map(([k, v]) => [
1270
+ k,
1271
+ { code: 'invalid', message: v },
1272
+ ]),
1273
+ ),
1274
+ );
1275
+ }
1276
+
1277
+ // Per-row update rule evaluation (BUG-010)
1278
+ if (!isSKBatch && batchRules?.update && typeof batchRules.update === 'function') {
1279
+ const existing = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
1280
+ if (existing.length > 0) {
1281
+ try {
1282
+ const canUpdate = (
1283
+ batchRules.update as (
1284
+ auth: AuthContext | null,
1285
+ row: Record<string, unknown>,
1286
+ ) => boolean
1287
+ )(batchAuth, existing[0] as Record<string, unknown>);
1288
+ if (!canUpdate)
1289
+ throw new EdgeBaseError(
1290
+ 403,
1291
+ `Access denied: 'update' rule blocked record "${id}" in table "${name}".`,
1292
+ );
1293
+ } catch (e) {
1294
+ if (e instanceof EdgeBaseError) throw e;
1295
+ throw new EdgeBaseError(
1296
+ 403,
1297
+ `Access denied: 'update' rule blocked record "${id}" in table "${name}".`,
1298
+ );
1299
+ }
1300
+ }
1301
+ }
1302
+
1303
+ // Capture before-row for triggers (matches single-row update behaviour)
1304
+ const beforeRow = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
1305
+ if (beforeRow.length > 0) {
1306
+ updateBeforeRows.set(id as string, beforeRow[0] as Record<string, unknown>);
1307
+ }
1308
+
1309
+ const updateData = { ...data };
1310
+ delete updateData.id;
1311
+ delete updateData.createdAt;
1312
+ if ('updatedAt' in effective && effective.updatedAt?.onUpdate === 'now') {
1313
+ updateData.updatedAt = now;
1314
+ }
1315
+
1316
+ // Schemaless: ensure columns exist
1317
+ if (!tableConfig.schema) {
1318
+ this.ensureSchemalessColumns(
1319
+ name,
1320
+ Object.keys(updateData).filter((k) => !(k in effective)),
1321
+ );
1322
+ }
1323
+
1324
+ // Serialize json-type fields to string for SQLite TEXT storage (BUG-006)
1325
+ for (const [key, value] of Object.entries(updateData)) {
1326
+ if (
1327
+ effective[key]?.type === 'json' &&
1328
+ value !== null &&
1329
+ value !== undefined &&
1330
+ typeof value === 'object' &&
1331
+ !('$op' in value)
1332
+ ) {
1333
+ updateData[key] = JSON.stringify(value);
1334
+ } else if (
1335
+ effective[key]?.type === 'boolean' &&
1336
+ value !== null &&
1337
+ value !== undefined &&
1338
+ (typeof value !== 'object' || !('$op' in value))
1339
+ ) {
1340
+ updateData[key] = value === true || value === 'true' || value === 1 || value === '1'
1341
+ ? 1
1342
+ : 0;
1343
+ }
1344
+ }
1345
+
1346
+ const { setClauses, params } = parseUpdateBody(updateData);
1347
+ if (setClauses.length > 0) {
1348
+ params.push(id);
1349
+ this.sql(`UPDATE "${name}" SET ${setClauses.join(', ')} WHERE "id" = ?`, ...params);
1350
+ }
1351
+ // Read actual DB state after write for database-live/triggers (BUG-011)
1352
+ const afterRow = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
1353
+ updated.push(
1354
+ afterRow.length > 0 ? (afterRow[0] as Record<string, unknown>) : { id, ...data },
1355
+ );
1356
+ }
1357
+ }
1358
+
1359
+ // Deletes (BUG-010: per-row rule evaluation, BUG-012: full row for triggers)
1360
+ if (body.deletes) results.deleted = 0;
1361
+ if (body.deletes?.length) {
1362
+ for (const id of body.deletes) {
1363
+ const existing = [...this.sql(`SELECT * FROM "${name}" WHERE "id" = ?`, id)];
1364
+ if (existing.length > 0) {
1365
+ // Per-row delete rule evaluation (BUG-010)
1366
+ if (!isSKBatch && batchRules?.delete && typeof batchRules.delete === 'function') {
1367
+ try {
1368
+ const canDelete = (
1369
+ batchRules.delete as (
1370
+ auth: AuthContext | null,
1371
+ row: Record<string, unknown>,
1372
+ ) => boolean
1373
+ )(batchAuth, existing[0] as Record<string, unknown>);
1374
+ if (!canDelete)
1375
+ throw new EdgeBaseError(
1376
+ 403,
1377
+ `Access denied: 'delete' rule blocked record "${id}" in table "${name}".`,
1378
+ );
1379
+ } catch (e) {
1380
+ if (e instanceof EdgeBaseError) throw e;
1381
+ throw new EdgeBaseError(
1382
+ 403,
1383
+ `Access denied: 'delete' rule blocked record "${id}" in table "${name}".`,
1384
+ );
1385
+ }
1386
+ }
1387
+ deletedRows.push(existing[0] as Record<string, unknown>);
1388
+ }
1389
+ this.sql(`DELETE FROM "${name}" WHERE "id" = ?`, id);
1390
+ }
1391
+ results.deleted = body.deletes.length;
1392
+ }
1393
+ });
1394
+
1395
+ // Emit database-live events for batch operations
1396
+ const batchResults = results as Record<string, unknown>;
1397
+ const allChanges: Array<{
1398
+ type: 'added' | 'modified' | 'removed';
1399
+ docId: string;
1400
+ data: Record<string, unknown> | null;
1401
+ }> = [];
1402
+ if (Array.isArray(batchResults.inserted)) {
1403
+ for (const item of batchResults.inserted as Record<string, unknown>[]) {
1404
+ allChanges.push({ type: 'added', docId: item.id as string, data: item });
1405
+ }
1406
+ }
1407
+ if (Array.isArray(batchResults.updated)) {
1408
+ for (const item of batchResults.updated as Record<string, unknown>[]) {
1409
+ allChanges.push({ type: 'modified', docId: item.id as string, data: item });
1410
+ }
1411
+ }
1412
+ if (body.deletes?.length) {
1413
+ for (const id of body.deletes) {
1414
+ allChanges.push({ type: 'removed', docId: id, data: null });
1415
+ }
1416
+ }
1417
+
1418
+ const batchThreshold = resolveDbLiveBatchThreshold(this.config);
1419
+ if (allChanges.length >= batchThreshold) {
1420
+ //: batch_changes message
1421
+ this.ctx.waitUntil(this.emitDbLiveBatchEvent(name, allChanges));
1422
+ } else {
1423
+ // Below threshold: individual events (no overhead)
1424
+ for (const change of allChanges) {
1425
+ this.ctx.waitUntil(this.emitDbLiveEvent(name, change.type, change.docId, change.data));
1426
+ }
1427
+ }
1428
+
1429
+ // Fire DB triggers asynchronously for batch items
1430
+ const doOriginBatch = this.doName ? parseDbDoName(this.doName) : { namespace: 'shared' };
1431
+ const triggerContext = {
1432
+ databaseNamespace: this.env.DATABASE,
1433
+ authNamespace: this.env.AUTH,
1434
+ kvNamespace: this.env.KV,
1435
+ config: this.config,
1436
+ serviceKey: this.getServiceKey(),
1437
+ };
1438
+ if (Array.isArray(batchResults.inserted)) {
1439
+ for (const item of batchResults.inserted as Record<string, unknown>[]) {
1440
+ this.ctx.waitUntil(
1441
+ executeDbTriggers(name, 'insert', { after: item }, triggerContext, doOriginBatch),
1442
+ );
1443
+ }
1444
+ }
1445
+ if (Array.isArray(batchResults.updated)) {
1446
+ for (const item of batchResults.updated as Record<string, unknown>[]) {
1447
+ const beforeRow = updateBeforeRows.get(item.id as string);
1448
+ this.ctx.waitUntil(
1449
+ executeDbTriggers(
1450
+ name,
1451
+ 'update',
1452
+ { before: beforeRow, after: item },
1453
+ triggerContext,
1454
+ doOriginBatch,
1455
+ ),
1456
+ );
1457
+ }
1458
+ }
1459
+ // Use full row data for delete triggers (BUG-012)
1460
+ if (deletedRows.length > 0) {
1461
+ for (const row of deletedRows) {
1462
+ this.ctx.waitUntil(
1463
+ executeDbTriggers(name, 'delete', { before: row }, triggerContext, doOriginBatch),
1464
+ );
1465
+ }
1466
+ }
1467
+
1468
+ return c.json(results);
1469
+ });
1470
+
1471
+ // BATCH-BY-FILTER: POST /tables/:name/batch-by-filter
1472
+ app.post('/tables/:name/batch-by-filter', async (c) => {
1473
+ const name = c.req.param('name');
1474
+ this.ensureTableExists(name);
1475
+ const tableConfig = this.getTableConfig(name);
1476
+ if (!tableConfig) {
1477
+ throw validationError(`Table '${name}' is not defined in the schema configuration.`);
1478
+ }
1479
+
1480
+ const body = await c.req.json<{
1481
+ action: 'delete' | 'update';
1482
+ filter: FilterTuple[];
1483
+ orFilter?: FilterTuple[];
1484
+ update?: Record<string, unknown>;
1485
+ limit?: number;
1486
+ }>();
1487
+
1488
+ // Validate required fields
1489
+ if (!body.action || !['delete', 'update'].includes(body.action)) {
1490
+ throw new EdgeBaseError(
1491
+ 400,
1492
+ "batch-by-filter requires 'action' to be 'delete' or 'update'.",
1493
+ );
1494
+ }
1495
+ if (!body.filter || !Array.isArray(body.filter) || body.filter.length === 0) {
1496
+ throw new EdgeBaseError(400, "batch-by-filter requires 'filter' to be a non-empty array.");
1497
+ }
1498
+ if (body.action === 'update' && !body.update) {
1499
+ throw new EdgeBaseError(
1500
+ 400,
1501
+ "batch-by-filter with action 'update' requires 'update' data.",
1502
+ );
1503
+ }
1504
+
1505
+ // Row-level access rule check — SK bypasses
1506
+ const bfRules = getTableAccess(tableConfig ?? undefined) as TableRules | undefined;
1507
+ const bfAuth = this.parseAuthContext(c.req.raw);
1508
+ if (!this.isServiceKeyRequest(c.req.raw)) {
1509
+ const ruleFn = body.action === 'delete' ? bfRules?.delete : bfRules?.update;
1510
+ if (ruleFn) {
1511
+ // Pre-check with empty row: for table-level boolean/auth-only rules this is sufficient.
1512
+ // Per-row evaluation happens below inside the transaction after SELECT.
1513
+ const preCheck = await this.evalRowRule(ruleFn, bfAuth, {});
1514
+ if (!preCheck)
1515
+ throw new EdgeBaseError(
1516
+ 403,
1517
+ `Access denied: '${body.action}' rule blocked batch-by-filter on table "${name}".`,
1518
+ );
1519
+ } else if (this.config.release) {
1520
+ // Release mode: no rule defined → deny
1521
+ throw new EdgeBaseError(
1522
+ 403,
1523
+ `Access denied. No '${body.action}' rule defined for '${name}'.`,
1524
+ );
1525
+ }
1526
+ }
1527
+
1528
+ const limit = Math.min(body.limit ?? 500, 500);
1529
+ let processed = 0;
1530
+ let succeeded = 0;
1531
+
1532
+ // Store the rule function and auth for per-row evaluation inside the transaction
1533
+ const bfRuleFn =
1534
+ !this.isServiceKeyRequest(c.req.raw) && bfRules
1535
+ ? body.action === 'delete'
1536
+ ? bfRules.delete
1537
+ : bfRules.update
1538
+ : undefined;
1539
+
1540
+ this.ctx.storage.transactionSync(() => {
1541
+ // Find matching records
1542
+ const { sql: selectSql, params: selectParams } = buildListQuery(name, {
1543
+ filters: body.filter,
1544
+ orFilters: body.orFilter,
1545
+ pagination: { limit },
1546
+ });
1547
+
1548
+ const allRows = [...this.sql(selectSql, ...selectParams)];
1549
+ processed = allRows.length;
1550
+
1551
+ if (allRows.length === 0) return;
1552
+
1553
+ // Per-row rule evaluation (BUG-009): filter rows that pass the rule
1554
+ let rows = allRows;
1555
+ if (bfRuleFn && typeof bfRuleFn === 'function') {
1556
+ rows = allRows.filter((r) => {
1557
+ try {
1558
+ const result = (
1559
+ bfRuleFn as (auth: AuthContext | null, row: Record<string, unknown>) => boolean
1560
+ )(bfAuth, r as Record<string, unknown>);
1561
+ return Boolean(result);
1562
+ } catch {
1563
+ return false; // fail-closed
1564
+ }
1565
+ });
1566
+ if (rows.length === 0) {
1567
+ throw new EdgeBaseError(
1568
+ 403,
1569
+ `Access denied: '${body.action}' rule blocked all matched rows in table "${name}".`,
1570
+ );
1571
+ }
1572
+ }
1573
+
1574
+ const ids = rows.map((r) => (r as Record<string, unknown>).id as string);
1575
+ const placeholders = ids.map(() => '?').join(', ');
1576
+
1577
+ if (body.action === 'delete') {
1578
+ this.sql(`DELETE FROM "${name}" WHERE "id" IN (${placeholders})`, ...ids);
1579
+ succeeded = ids.length;
1580
+ } else if (body.action === 'update' && body.update) {
1581
+ const effective = buildEffectiveSchema(tableConfig.schema);
1582
+ const updateData = { ...body.update };
1583
+ if ('updatedAt' in effective && effective.updatedAt?.onUpdate === 'now') {
1584
+ updateData.updatedAt = new Date().toISOString();
1585
+ }
1586
+
1587
+ // Schemaless: ensure columns exist
1588
+ if (!tableConfig.schema) {
1589
+ this.ensureSchemalessColumns(
1590
+ name,
1591
+ Object.keys(updateData).filter((k) => !(k in effective)),
1592
+ );
1593
+ }
1594
+
1595
+ // Serialize json-type fields to string for SQLite TEXT storage (BUG-006)
1596
+ for (const [key, value] of Object.entries(updateData)) {
1597
+ if (
1598
+ effective[key]?.type === 'json' &&
1599
+ value !== null &&
1600
+ value !== undefined &&
1601
+ typeof value === 'object' &&
1602
+ !('$op' in value)
1603
+ ) {
1604
+ updateData[key] = JSON.stringify(value);
1605
+ } else if (
1606
+ effective[key]?.type === 'boolean' &&
1607
+ value !== null &&
1608
+ value !== undefined &&
1609
+ (typeof value !== 'object' || !('$op' in value))
1610
+ ) {
1611
+ updateData[key] = value === true || value === 'true' || value === 1 || value === '1'
1612
+ ? 1
1613
+ : 0;
1614
+ }
1615
+ }
1616
+
1617
+ const { setClauses, params } = parseUpdateBody(updateData);
1618
+ if (setClauses.length > 0) {
1619
+ this.sql(
1620
+ `UPDATE "${name}" SET ${setClauses.join(', ')} WHERE "id" IN (${placeholders})`,
1621
+ ...params,
1622
+ ...ids,
1623
+ );
1624
+ }
1625
+ succeeded = ids.length;
1626
+ }
1627
+ });
1628
+
1629
+ // Emit database-live events for batch-by-filter
1630
+ // Note: we don't have individual record data here, emit summary event
1631
+ if (succeeded > 0) {
1632
+ const eventType = body.action === 'delete' ? 'removed' : 'modified';
1633
+ const batchThreshold = resolveDbLiveBatchThreshold(this.config);
1634
+ if (succeeded >= batchThreshold) {
1635
+ //: batch_changes for large batch-by-filter operations
1636
+ this.ctx.waitUntil(
1637
+ this.emitDbLiveBatchEvent(name, [
1638
+ {
1639
+ type: eventType as 'modified' | 'removed',
1640
+ docId: '_bulk',
1641
+ data: { action: body.action, count: succeeded },
1642
+ },
1643
+ ]),
1644
+ );
1645
+ } else {
1646
+ this.ctx.waitUntil(
1647
+ this.emitDbLiveEvent(name, eventType as 'modified' | 'removed', '_bulk', {
1648
+ action: body.action,
1649
+ count: succeeded,
1650
+ }),
1651
+ );
1652
+ }
1653
+ }
1654
+
1655
+ return c.json({ processed, succeeded });
1656
+ });
1657
+
1658
+ // INTERNAL: POST /internal/sql — raw SQL execution for server SDK
1659
+ // Only accessible via Worker-level /api/sql route which validates Service Key.
1660
+ // Parameterized queries enforced: query + params are separate.
1661
+ app.post('/internal/sql', async (c) => {
1662
+ const { query, params } = await c.req.json<{ query: string; params?: unknown[] }>();
1663
+ if (!query || typeof query !== 'string') {
1664
+ return c.json({ code: 400, message: 'query is required' }, 400);
1665
+ }
1666
+ try {
1667
+ const rows = [...this.sql(query, ...(params ?? []))];
1668
+ return c.json({ rows });
1669
+ } catch (err) {
1670
+ const message = err instanceof Error ? err.message : 'SQL execution failed';
1671
+ return c.json({ code: 500, message }, 500);
1672
+ }
1673
+ });
1674
+
1675
+ // NOTE: /internal/upsert-user-public, /internal/batch-delete-user-public,
1676
+ // /internal/meta-get, /internal/meta-set removed — all handled by AUTH_DB D1 directly.
1677
+ // getMeta()/setMeta() still exist for per-DO schema hash tracking.
1678
+
1679
+ // INTERNAL: POST /internal/execute-function — execute a registered function on this DO
1680
+ app.post('/internal/execute-function', async (c) => {
1681
+ const { functionName, scheduledTime, cron } = await c.req.json<{
1682
+ functionName: string;
1683
+ scheduledTime?: string;
1684
+ cron?: string;
1685
+ }>();
1686
+
1687
+ const registry = getRegisteredFunctions();
1688
+ const definition = registry.get(functionName);
1689
+ if (!definition) {
1690
+ throw new EdgeBaseError(404, `Function '${functionName}' not found.`);
1691
+ }
1692
+
1693
+ // Build context using buildFunctionContext (§5: buildDbContext removed)
1694
+ const doOriginFn = this.doName ? parseDbDoName(this.doName) : { namespace: 'shared' };
1695
+ const ctx = buildFunctionContext({
1696
+ request: new Request('http://internal/execute-function/' + functionName),
1697
+ auth: null,
1698
+ databaseNamespace: this.env.DATABASE,
1699
+ authNamespace: this.env.AUTH,
1700
+ kvNamespace: this.env.KV,
1701
+ env: this.env as never,
1702
+ executionCtx: this.ctx as never,
1703
+ config: this.config,
1704
+ serviceKey: this.getServiceKey(),
1705
+ triggerInfo: { namespace: doOriginFn.namespace, id: doOriginFn.id },
1706
+ });
1707
+ (ctx as unknown as Record<string, unknown>).data = { scheduledTime, cron };
1708
+
1709
+ // Apply schedule function timeout (default: 10s)
1710
+ const timeoutStr = this.config.functions?.scheduleFunctionTimeout ?? '10s';
1711
+ const timeoutMs = parseDuration(timeoutStr) * 1000;
1712
+ await Promise.race([
1713
+ definition.handler(ctx),
1714
+ new Promise((_, reject) =>
1715
+ setTimeout(
1716
+ () =>
1717
+ reject(new Error(`Schedule function '${functionName}' timed out (${timeoutStr})`)),
1718
+ timeoutMs,
1719
+ ),
1720
+ ),
1721
+ ]);
1722
+ return c.json({ ok: true, function: functionName });
1723
+ });
1724
+
1725
+ // NOTE: /internal/init-schedule removed — scheduling now uses Cloudflare Cron Triggers
1726
+ // (see index.ts `scheduled` event handler). No alarm-based scheduling on db:_system.
1727
+
1728
+ // ─── Backup/Restore ───
1729
+
1730
+ // GET /internal/backup/dump — export all tables as JSON
1731
+ app.get('/internal/backup/dump', (c) => {
1732
+ const tables: Record<string, unknown[]> = {};
1733
+ const schema: Record<string, string> = {};
1734
+
1735
+ // Get all user tables (exclude internal SQLite tables and FTS5 shadow tables)
1736
+ const tableRows = [
1737
+ ...this.sql(
1738
+ `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%' ORDER BY name`,
1739
+ ),
1740
+ ];
1741
+
1742
+ // Detect FTS5 virtual tables to exclude their shadow tables
1743
+ const ftsVirtualTables = [
1744
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='table' AND sql LIKE '%fts5%'`),
1745
+ ].map((r) => r.name as string);
1746
+
1747
+ // FTS5 shadow table suffixes
1748
+ const ftsSuffixes = ['_config', '_content', '_data', '_docsize', '_idx'];
1749
+ const isFtsShadow = (name: string) =>
1750
+ ftsVirtualTables.some((fts) => ftsSuffixes.some((s) => name === `${fts}${s}`));
1751
+
1752
+ for (const row of tableRows) {
1753
+ const tableName = row.name as string;
1754
+
1755
+ // Skip FTS5 shadow tables (managed internally by FTS5 virtual table)
1756
+ if (isFtsShadow(tableName)) continue;
1757
+
1758
+ // Collect DDL schema (informational — not used in restore)
1759
+ const ddlRows = [
1760
+ ...this.sql(`SELECT sql FROM sqlite_master WHERE type='table' AND name=?`, tableName),
1761
+ ];
1762
+ if (ddlRows.length > 0 && ddlRows[0].sql) {
1763
+ schema[tableName] = ddlRows[0].sql as string;
1764
+ }
1765
+
1766
+ const rows = [...this.sql(`SELECT * FROM "${tableName}"`)];
1767
+
1768
+ // Base64-encode any Uint8Array/ArrayBuffer values (BLOB safety)
1769
+ const encoded = rows.map((r) => {
1770
+ const record: Record<string, unknown> = {};
1771
+ for (const [k, v] of Object.entries(r)) {
1772
+ if (v instanceof Uint8Array || v instanceof ArrayBuffer) {
1773
+ const bytes = v instanceof ArrayBuffer ? new Uint8Array(v) : v;
1774
+ record[k] = { __blob__: true, data: btoa(String.fromCharCode(...bytes)) };
1775
+ } else {
1776
+ record[k] = v;
1777
+ }
1778
+ }
1779
+ return record;
1780
+ });
1781
+ tables[tableName] = encoded;
1782
+ }
1783
+
1784
+ return c.json({
1785
+ doName: this.doName,
1786
+ doType: 'database',
1787
+ schema,
1788
+ tables,
1789
+ timestamp: new Date().toISOString(),
1790
+ });
1791
+ });
1792
+
1793
+ // GET /internal/backup/list-ids — list all record IDs (or distinct column values) in a table
1794
+ // Optional ?column=fieldName to get DISTINCT values of a specific column instead of id
1795
+ app.get('/internal/backup/list-ids', (c) => {
1796
+ const table = c.req.query('table');
1797
+ if (!table) return c.json({ ids: [] });
1798
+
1799
+ // Validate table name to prevent SQL injection (alphanumeric + underscore only)
1800
+ if (!/^[a-zA-Z_]\w*$/.test(table)) return c.json({ ids: [] });
1801
+
1802
+ const column = c.req.query('column') || 'id';
1803
+ // Validate column name to prevent SQL injection (alphanumeric + underscore only)
1804
+ if (!/^[a-zA-Z_]\w*$/.test(column)) return c.json({ ids: [] });
1805
+
1806
+ try {
1807
+ const rows = [...this.sql(`SELECT DISTINCT "${column}" AS val FROM "${table}"`)];
1808
+ return c.json({ ids: rows.map((r) => r.val as string) });
1809
+ } catch {
1810
+ return c.json({ ids: [] });
1811
+ }
1812
+ });
1813
+
1814
+ // POST /internal/backup/wipe — drop all user tables (for orphan DO cleanup)
1815
+ app.post('/internal/backup/wipe', (c) => {
1816
+ this.ctx.storage.transactionSync(() => {
1817
+ const tables = [
1818
+ ...this.sql(
1819
+ `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%' ORDER BY name`,
1820
+ ),
1821
+ ];
1822
+ const views = [
1823
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='view' ORDER BY name`),
1824
+ ];
1825
+ const triggers = [
1826
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='trigger' ORDER BY name`),
1827
+ ];
1828
+ for (const row of triggers) this.sql(`DROP TRIGGER IF EXISTS "${row.name}"`);
1829
+ for (const row of views) this.sql(`DROP VIEW IF EXISTS "${row.name}"`);
1830
+ for (const row of tables) this.sql(`DROP TABLE IF EXISTS "${row.name}"`);
1831
+ });
1832
+ return c.json({ ok: true });
1833
+ });
1834
+
1835
+ // POST /internal/drop-all — delete all DO SQLite storage
1836
+ // Used to clean up orphaned Isolated DOs when a user account is deleted.
1837
+ // Note: ctx.storage.deleteAll() clears all DO storage but the DO instance
1838
+ // itself remains on Cloudflare infrastructure (idle DOs are free).
1839
+ app.post('/internal/drop-all', async (c) => {
1840
+ await this.ctx.storage.deleteAll();
1841
+ return c.json({ ok: true });
1842
+ });
1843
+
1844
+ // POST /internal/backup/restore — Wipe & Restore all tables from backup
1845
+ app.post('/internal/backup/restore', async (c) => {
1846
+ const body = await c.req.json<{
1847
+ tables: Record<string, Array<Record<string, unknown>>>;
1848
+ }>();
1849
+
1850
+ this.ctx.storage.transactionSync(() => {
1851
+ // 1. Drop all existing user tables (reverse order for FK safety)
1852
+ const existingTables = [
1853
+ ...this.sql(
1854
+ `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' AND name NOT LIKE '_cf_%' ORDER BY name`,
1855
+ ),
1856
+ ];
1857
+ // Drop legacy views (backward compat cleanup)
1858
+ const existingViews = [
1859
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='view' ORDER BY name`),
1860
+ ];
1861
+ for (const row of existingViews) {
1862
+ this.sql(`DROP VIEW IF EXISTS "${row.name}"`);
1863
+ }
1864
+ // Drop triggers
1865
+ const existingTriggers = [
1866
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='trigger' ORDER BY name`),
1867
+ ];
1868
+ for (const row of existingTriggers) {
1869
+ this.sql(`DROP TRIGGER IF EXISTS "${row.name}"`);
1870
+ }
1871
+ for (const row of existingTables) {
1872
+ this.sql(`DROP TABLE IF EXISTS "${row.name}"`);
1873
+ }
1874
+
1875
+ // 2. Re-run schema init to create tables with current config schema
1876
+ this.initialized = false;
1877
+ this.initializeSchema();
1878
+ this.initialized = true;
1879
+
1880
+ // 3. Insert backup data into tables
1881
+ for (const [tableName, rows] of Object.entries(body.tables)) {
1882
+ if (rows.length === 0) continue;
1883
+
1884
+ // Check if table exists after schema init
1885
+ const tableExists = [
1886
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type='table' AND name=?`, tableName),
1887
+ ];
1888
+ if (tableExists.length === 0) {
1889
+ // Table from backup doesn't exist in current schema — skip
1890
+ // Also covers FTS5 shadow tables that shouldn't be restored manually
1891
+ continue;
1892
+ }
1893
+
1894
+ for (const row of rows) {
1895
+ // Decode base64 BLOB values
1896
+ const decoded: Record<string, unknown> = {};
1897
+ for (const [k, v] of Object.entries(row)) {
1898
+ if (v && typeof v === 'object' && '__blob__' in (v as Record<string, unknown>)) {
1899
+ const b64 = (v as { data: string }).data;
1900
+ const binary = atob(b64);
1901
+ const bytes = new Uint8Array(binary.length);
1902
+ for (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i);
1903
+ decoded[k] = bytes;
1904
+ } else {
1905
+ decoded[k] = v;
1906
+ }
1907
+ }
1908
+
1909
+ const columns = Object.keys(decoded);
1910
+ const values = columns.map((col) => decoded[col]);
1911
+ const placeholders = columns.map(() => '?').join(', ');
1912
+ const colStr = columns.map((col) => `"${col}"`).join(', ');
1913
+ this.sql(
1914
+ `INSERT OR REPLACE INTO "${tableName}" (${colStr}) VALUES (${placeholders})`,
1915
+ ...values,
1916
+ );
1917
+ }
1918
+ }
1919
+
1920
+ // 4. Persist doName after restore
1921
+ if (this.doName) {
1922
+ this.setMeta('doName', this.doName);
1923
+ }
1924
+ });
1925
+
1926
+ return c.json({ ok: true, restored: Object.keys(body.tables).length });
1927
+ });
1928
+
1929
+ // Error handler
1930
+ app.onError((err, c) => {
1931
+ if (err instanceof EdgeBaseError) {
1932
+ return c.json(err.toJSON(), err.code as 400);
1933
+ }
1934
+ // Fallback: check for code property (duck-typing)
1935
+ if ('code' in err && typeof (err as Record<string, unknown>).code === 'number') {
1936
+ const e = err as { code: number; message: string; data?: unknown };
1937
+ return c.json({ code: e.code, message: e.message, data: e.data }, e.code as 200);
1938
+ }
1939
+ console.error('DatabaseDO Error:', err);
1940
+ return c.json({ code: 500, message: 'Internal server error.' }, 500);
1941
+ });
1942
+
1943
+ return app;
1944
+ }
1945
+
1946
+ // ─── Alarm Handler ───
1947
+ // NOTE: Schedule alarm processing removed — now handled by Cloudflare Cron Triggers
1948
+ // (see index.ts `scheduled` event handler). The alarm() method is kept as a no-op
1949
+ // for any existing alarms that may fire during migration.
1950
+
1951
+ async alarm(): Promise<void> {
1952
+ // No-op — alarm-based scheduling removed in favor of Cron Triggers.
1953
+ }
1954
+
1955
+ // ─── Helper Methods ───
1956
+
1957
+ /** Execute SQL query on DO's SQLite storage. */
1958
+ private sql(query: string, ...params: unknown[]): Iterable<Record<string, unknown>> {
1959
+ return this.ctx.storage.sql.exec(query, ...params);
1960
+ }
1961
+
1962
+ /** Execute multi-statement SQL (separated by semicolons, trigger-aware). */
1963
+ private execMulti(ddl: string): void {
1964
+ // If this DDL contains a BEGIN...END block (trigger), execute as single statement
1965
+ const upper = ddl.toUpperCase();
1966
+ if (upper.includes('BEGIN') && upper.includes('END')) {
1967
+ const clean = ddl.replace(/;\s*$/, '').trim();
1968
+ if (clean.length > 0) this.sql(clean);
1969
+ return;
1970
+ }
1971
+
1972
+ // Otherwise, split on semicolons
1973
+ const statements = ddl
1974
+ .split(';')
1975
+ .map((s) => s.trim())
1976
+ .filter((s) => s.length > 0);
1977
+ for (const stmt of statements) {
1978
+ this.sql(stmt);
1979
+ }
1980
+ }
1981
+
1982
+ /** Get a meta value from _meta table. */
1983
+ private getMeta(key: string): string | null {
1984
+ const rows = [...this.sql('SELECT "value" FROM "_meta" WHERE "key" = ?', key)];
1985
+ return rows.length > 0 ? (rows[0].value as string) : null;
1986
+ }
1987
+
1988
+ /** Set a meta value in _meta table. */
1989
+ private setMeta(key: string, value: string): void {
1990
+ this.sql('INSERT OR REPLACE INTO "_meta" ("key", "value") VALUES (?, ?)', key, value);
1991
+ }
1992
+
1993
+ /** Parse config from env — delegates to global singleton (§13). */
1994
+ private parseConfig(env: DOEnv): EdgeBaseConfig {
1995
+ return getGlobalConfig(env);
1996
+ }
1997
+
1998
+ // ─── Database Live Event Emission ───
1999
+
2000
+ /**
2001
+ * Emit a CUD event to DatabaseLiveDO for real-time subscriptions.
2002
+ * Fire-and-forget: errors are silently ignored to avoid blocking CUD ops.
2003
+ * Sends to both table channel and document channel (dual propagation).
2004
+ */
2005
+ private emitDbLiveEvent(
2006
+ table: string,
2007
+ type: 'added' | 'modified' | 'removed',
2008
+ docId: string,
2009
+ data: Record<string, unknown> | null,
2010
+ ): Promise<void> {
2011
+ const eventBase = {
2012
+ type,
2013
+ table,
2014
+ docId,
2015
+ data,
2016
+ timestamp: new Date().toISOString(),
2017
+ };
2018
+
2019
+ const { namespace, id } = this.doName
2020
+ ? parseDbDoName(this.doName)
2021
+ : { namespace: 'shared' as string, id: undefined as string | undefined };
2022
+
2023
+ const tableChannel = buildDbLiveChannel(namespace, table, id);
2024
+ const deliveries = [this.sendToDatabaseLiveDO({ ...eventBase, channel: tableChannel })];
2025
+
2026
+ // Document channel: dblive:{namespace}:{table}:{docId} (skip for bulk events)
2027
+ if (docId !== '_bulk') {
2028
+ const docChannel = buildDbLiveChannel(namespace, table, id, docId);
2029
+ deliveries.push(this.sendToDatabaseLiveDO({ ...eventBase, channel: docChannel }));
2030
+ }
2031
+
2032
+ return Promise.all(deliveries).then(() => undefined);
2033
+ }
2034
+
2035
+ /**
2036
+ * Emit a batch of CUD events as a single batch_changes message.
2037
+ * Sends to DatabaseLiveDO which forwards to subscribers based on SDK version negotiation.
2038
+ */
2039
+ private emitDbLiveBatchEvent(
2040
+ table: string,
2041
+ changes: Array<{
2042
+ type: 'added' | 'modified' | 'removed';
2043
+ docId: string;
2044
+ data: Record<string, unknown> | null;
2045
+ }>,
2046
+ ): Promise<void> {
2047
+ const { namespace, id } = this.doName
2048
+ ? parseDbDoName(this.doName)
2049
+ : { namespace: 'shared' as string, id: undefined as string | undefined };
2050
+ const tableChannel = buildDbLiveChannel(namespace, table, id);
2051
+ const event = {
2052
+ type: 'batch_changes' as const,
2053
+ channel: tableChannel,
2054
+ table,
2055
+ changes: changes.map((c) => ({
2056
+ type: c.type,
2057
+ docId: c.docId,
2058
+ data: c.data,
2059
+ timestamp: new Date().toISOString(),
2060
+ })),
2061
+ total: changes.length,
2062
+ };
2063
+ return this.sendToDatabaseLiveDO(event, '/internal/batch-event');
2064
+ }
2065
+
2066
+ private sendToDatabaseLiveDO(
2067
+ event: Record<string, unknown>,
2068
+ path = '/internal/event',
2069
+ ): Promise<void> {
2070
+ try {
2071
+ const doId = this.env.DATABASE_LIVE.idFromName(DATABASE_LIVE_HUB_DO_NAME);
2072
+ const stub = this.env.DATABASE_LIVE.get(doId);
2073
+ return stub
2074
+ .fetch(`http://internal${path}`, {
2075
+ method: 'POST',
2076
+ headers: { 'Content-Type': 'application/json' },
2077
+ body: JSON.stringify(event),
2078
+ })
2079
+ .then(() => undefined)
2080
+ .catch(() => undefined);
2081
+ } catch {
2082
+ // Ignore — database live should not block database operations
2083
+ return Promise.resolve();
2084
+ }
2085
+ }
2086
+
2087
+ /**
2088
+ * Send broadcast event to DatabaseLiveDO.
2089
+ * Sends broadcast events to DatabaseLiveDO hub for channel broadcasting.
2090
+ */
2091
+ private sendBroadcastToDatabaseLiveDO(
2092
+ _channel: string,
2093
+ event: Record<string, unknown>,
2094
+ path = '/internal/broadcast',
2095
+ ): Promise<void> {
2096
+ try {
2097
+ const doId = this.env.DATABASE_LIVE.idFromName('database-live:hub');
2098
+ const stub = this.env.DATABASE_LIVE.get(doId);
2099
+ return stub
2100
+ .fetch(`http://internal${path}`, {
2101
+ method: 'POST',
2102
+ headers: { 'Content-Type': 'application/json' },
2103
+ body: JSON.stringify(event),
2104
+ })
2105
+ .then(() => undefined)
2106
+ .catch(() => undefined);
2107
+ } catch {
2108
+ // Ignore — broadcast should not block database operations
2109
+ return Promise.resolve();
2110
+ }
2111
+ }
2112
+
2113
+ /**
2114
+ * Get tables managed by this DO instance (§1,).
2115
+ * Returns all tables from the DB namespace that matches this DO's name.
2116
+ * DO name format: 'shared' (static) or 'namespace:id' (dynamic).
2117
+ */
2118
+ private getMyTables(): Record<string, TableConfig> {
2119
+ if (!this.config.databases) return {};
2120
+
2121
+ const { namespace } = parseDbDoName(this.doName);
2122
+
2123
+ const dbBlock = this.config.databases[namespace];
2124
+ if (!dbBlock?.tables) return {};
2125
+
2126
+ return dbBlock.tables as Record<string, TableConfig>;
2127
+ }
2128
+
2129
+ /** Get a specific table config (§1,). */
2130
+ private getTableConfig(name: string): TableConfig | null {
2131
+ if (!this.config.databases) return null;
2132
+ for (const dbBlock of Object.values(this.config.databases)) {
2133
+ const tableConfig = dbBlock.tables?.[name];
2134
+ if (tableConfig) return tableConfig as TableConfig;
2135
+ }
2136
+ return null;
2137
+ }
2138
+
2139
+ /** Ensure a table exists in this DO (throw if not). */
2140
+ private ensureTableExists(name: string): void {
2141
+ // We rely on the schema init having already created the table
2142
+ const tables = [
2143
+ ...this.sql(`SELECT name FROM sqlite_master WHERE type = 'table' AND name = ?`, name),
2144
+ ];
2145
+ if (tables.length === 0) {
2146
+ throw notFoundError(`Table "${name}" not found in this DO.`);
2147
+ }
2148
+ }
2149
+
2150
+ /**
2151
+ * Normalize a SQLite row to correct JS types. (BUG-004)
2152
+ * SQLite stores booleans as 0/1 integers or string ("false"/"true") depending on
2153
+ * how the value was originally inserted. Schema-driven: only converts known boolean/number fields.
2154
+ * Falls back to raw value for unknown fields (schemaless tables).
2155
+ */
2156
+ private normalizeRow(
2157
+ row: Record<string, unknown>,
2158
+ tableConfig: TableConfig | null,
2159
+ ): Record<string, unknown> {
2160
+ if (!tableConfig?.schema) return row;
2161
+ const effective = buildEffectiveSchema(tableConfig.schema);
2162
+ const result: Record<string, unknown> = {};
2163
+ for (const [key, value] of Object.entries(row)) {
2164
+ const fieldDef = effective[key];
2165
+ if (!fieldDef) {
2166
+ result[key] = value;
2167
+ continue;
2168
+ }
2169
+ if (fieldDef.type === 'boolean') {
2170
+ // SQLite stores 0/1 or string. Normalize to JS boolean.
2171
+ if (value === 1 || value === '1' || value === 'true' || value === true) {
2172
+ result[key] = true;
2173
+ } else if (value === 0 || value === '0' || value === 'false' || value === false) {
2174
+ result[key] = false;
2175
+ } else {
2176
+ result[key] = value === null ? null : Boolean(value);
2177
+ }
2178
+ } else if (fieldDef.type === 'number') {
2179
+ // SQLite may return numbers as strings in some edge cases
2180
+ result[key] = value === null ? null : Number(value);
2181
+ } else if (fieldDef.type === 'json') {
2182
+ // Parse JSON strings back to objects (BUG-006)
2183
+ if (value === null || value === undefined) {
2184
+ result[key] = value;
2185
+ } else if (typeof value === 'string') {
2186
+ try {
2187
+ result[key] = JSON.parse(value);
2188
+ } catch {
2189
+ result[key] = value; // Not valid JSON — return raw string
2190
+ }
2191
+ } else {
2192
+ result[key] = value;
2193
+ }
2194
+ } else {
2195
+ result[key] = value;
2196
+ }
2197
+ }
2198
+ return result;
2199
+ }
2200
+
2201
+ /**
2202
+ * Evaluate a single table-level rule against an auth context and row. (BUG-005)
2203
+ *
2204
+ * rule is typed as the union of all TableRules fn signatures:
2205
+ * (auth, row) => boolean (for read/update/delete)
2206
+ * or boolean (shorthand allow/deny).
2207
+ * Uses a 50ms hard timeout — fail-closed on timeout or error (§12①).
2208
+ */
2209
+ private async evalRowRule(
2210
+ rule:
2211
+ | ((auth: AuthContext | null, row: Record<string, unknown>) => boolean | Promise<boolean>)
2212
+ | boolean
2213
+ | undefined,
2214
+ auth: AuthContext | null,
2215
+ row: Record<string, unknown>,
2216
+ ): Promise<boolean> {
2217
+ if (rule === undefined || rule === null) return true; // no rule = allow
2218
+ if (typeof rule === 'boolean') return rule;
2219
+ try {
2220
+ const result = await Promise.race([
2221
+ Promise.resolve(rule(auth, row)),
2222
+ new Promise<never>((_, reject) =>
2223
+ setTimeout(() => reject(new Error('Rule evaluation timeout')), 50),
2224
+ ),
2225
+ ]);
2226
+ return Boolean(result);
2227
+ } catch {
2228
+ return false; // timeout or error → deny (fail-closed)
2229
+ }
2230
+ }
2231
+
2232
+ /** Normalize an array of rows. */
2233
+ private normalizeRows(
2234
+ rows: Record<string, unknown>[],
2235
+ tableConfig: TableConfig | null,
2236
+ ): Record<string, unknown>[] {
2237
+ if (!tableConfig?.schema) return rows;
2238
+ return rows.map((row) => this.normalizeRow(row, tableConfig));
2239
+ }
2240
+ }