alepha 0.14.3 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (317) hide show
  1. package/README.md +2 -5
  2. package/dist/api/audits/index.d.ts +620 -811
  3. package/dist/api/audits/index.d.ts.map +1 -1
  4. package/dist/api/files/index.d.ts +185 -377
  5. package/dist/api/files/index.d.ts.map +1 -1
  6. package/dist/api/files/index.js +0 -1
  7. package/dist/api/files/index.js.map +1 -1
  8. package/dist/api/jobs/index.d.ts +245 -435
  9. package/dist/api/jobs/index.d.ts.map +1 -1
  10. package/dist/api/notifications/index.d.ts +238 -429
  11. package/dist/api/notifications/index.d.ts.map +1 -1
  12. package/dist/api/parameters/index.d.ts +236 -427
  13. package/dist/api/parameters/index.d.ts.map +1 -1
  14. package/dist/api/users/index.browser.js +1 -2
  15. package/dist/api/users/index.browser.js.map +1 -1
  16. package/dist/api/users/index.d.ts +1010 -1196
  17. package/dist/api/users/index.d.ts.map +1 -1
  18. package/dist/api/users/index.js +178 -151
  19. package/dist/api/users/index.js.map +1 -1
  20. package/dist/api/verifications/index.d.ts +17 -17
  21. package/dist/api/verifications/index.d.ts.map +1 -1
  22. package/dist/batch/index.d.ts +122 -122
  23. package/dist/batch/index.d.ts.map +1 -1
  24. package/dist/batch/index.js +1 -2
  25. package/dist/batch/index.js.map +1 -1
  26. package/dist/bucket/index.d.ts +163 -163
  27. package/dist/bucket/index.d.ts.map +1 -1
  28. package/dist/cache/core/index.d.ts +46 -46
  29. package/dist/cache/core/index.d.ts.map +1 -1
  30. package/dist/cache/redis/index.d.ts.map +1 -1
  31. package/dist/cli/index.d.ts +384 -285
  32. package/dist/cli/index.d.ts.map +1 -1
  33. package/dist/cli/index.js +1113 -623
  34. package/dist/cli/index.js.map +1 -1
  35. package/dist/command/index.d.ts +299 -300
  36. package/dist/command/index.d.ts.map +1 -1
  37. package/dist/command/index.js +13 -9
  38. package/dist/command/index.js.map +1 -1
  39. package/dist/core/index.browser.js +445 -103
  40. package/dist/core/index.browser.js.map +1 -1
  41. package/dist/core/index.d.ts +733 -625
  42. package/dist/core/index.d.ts.map +1 -1
  43. package/dist/core/index.js +446 -103
  44. package/dist/core/index.js.map +1 -1
  45. package/dist/core/index.native.js +445 -103
  46. package/dist/core/index.native.js.map +1 -1
  47. package/dist/datetime/index.d.ts +44 -44
  48. package/dist/datetime/index.d.ts.map +1 -1
  49. package/dist/datetime/index.js +4 -4
  50. package/dist/datetime/index.js.map +1 -1
  51. package/dist/email/index.d.ts +97 -50
  52. package/dist/email/index.d.ts.map +1 -1
  53. package/dist/email/index.js +129 -33
  54. package/dist/email/index.js.map +1 -1
  55. package/dist/fake/index.d.ts +7981 -14
  56. package/dist/fake/index.d.ts.map +1 -1
  57. package/dist/file/index.d.ts +523 -390
  58. package/dist/file/index.d.ts.map +1 -1
  59. package/dist/file/index.js +253 -1
  60. package/dist/file/index.js.map +1 -1
  61. package/dist/lock/core/index.d.ts +208 -208
  62. package/dist/lock/core/index.d.ts.map +1 -1
  63. package/dist/lock/redis/index.d.ts.map +1 -1
  64. package/dist/logger/index.d.ts +25 -26
  65. package/dist/logger/index.d.ts.map +1 -1
  66. package/dist/logger/index.js +12 -2
  67. package/dist/logger/index.js.map +1 -1
  68. package/dist/mcp/index.d.ts +197 -197
  69. package/dist/mcp/index.d.ts.map +1 -1
  70. package/dist/mcp/index.js +1 -1
  71. package/dist/mcp/index.js.map +1 -1
  72. package/dist/orm/chunk-DtkW-qnP.js +38 -0
  73. package/dist/orm/index.browser.js.map +1 -1
  74. package/dist/orm/index.bun.js +2814 -0
  75. package/dist/orm/index.bun.js.map +1 -0
  76. package/dist/orm/index.d.ts +1228 -1216
  77. package/dist/orm/index.d.ts.map +1 -1
  78. package/dist/orm/index.js +2041 -1967
  79. package/dist/orm/index.js.map +1 -1
  80. package/dist/queue/core/index.d.ts +248 -248
  81. package/dist/queue/core/index.d.ts.map +1 -1
  82. package/dist/queue/redis/index.d.ts.map +1 -1
  83. package/dist/redis/index.bun.js +285 -0
  84. package/dist/redis/index.bun.js.map +1 -0
  85. package/dist/redis/index.d.ts +118 -136
  86. package/dist/redis/index.d.ts.map +1 -1
  87. package/dist/redis/index.js +18 -38
  88. package/dist/redis/index.js.map +1 -1
  89. package/dist/retry/index.d.ts +69 -69
  90. package/dist/retry/index.d.ts.map +1 -1
  91. package/dist/router/index.d.ts +6 -6
  92. package/dist/router/index.d.ts.map +1 -1
  93. package/dist/scheduler/index.d.ts +25 -25
  94. package/dist/scheduler/index.d.ts.map +1 -1
  95. package/dist/security/index.browser.js +5 -1
  96. package/dist/security/index.browser.js.map +1 -1
  97. package/dist/security/index.d.ts +417 -254
  98. package/dist/security/index.d.ts.map +1 -1
  99. package/dist/security/index.js +386 -86
  100. package/dist/security/index.js.map +1 -1
  101. package/dist/server/auth/index.d.ts +110 -110
  102. package/dist/server/auth/index.d.ts.map +1 -1
  103. package/dist/server/auth/index.js +20 -20
  104. package/dist/server/auth/index.js.map +1 -1
  105. package/dist/server/cache/index.d.ts +62 -47
  106. package/dist/server/cache/index.d.ts.map +1 -1
  107. package/dist/server/cache/index.js +56 -3
  108. package/dist/server/cache/index.js.map +1 -1
  109. package/dist/server/compress/index.d.ts +6 -0
  110. package/dist/server/compress/index.d.ts.map +1 -1
  111. package/dist/server/compress/index.js +36 -1
  112. package/dist/server/compress/index.js.map +1 -1
  113. package/dist/server/cookies/index.d.ts +6 -6
  114. package/dist/server/cookies/index.d.ts.map +1 -1
  115. package/dist/server/cookies/index.js +3 -3
  116. package/dist/server/cookies/index.js.map +1 -1
  117. package/dist/server/core/index.browser.js +2 -2
  118. package/dist/server/core/index.browser.js.map +1 -1
  119. package/dist/server/core/index.d.ts +242 -150
  120. package/dist/server/core/index.d.ts.map +1 -1
  121. package/dist/server/core/index.js +294 -125
  122. package/dist/server/core/index.js.map +1 -1
  123. package/dist/server/cors/index.d.ts +11 -12
  124. package/dist/server/cors/index.d.ts.map +1 -1
  125. package/dist/server/health/index.d.ts +0 -1
  126. package/dist/server/health/index.d.ts.map +1 -1
  127. package/dist/server/helmet/index.d.ts +2 -2
  128. package/dist/server/helmet/index.d.ts.map +1 -1
  129. package/dist/server/links/index.browser.js.map +1 -1
  130. package/dist/server/links/index.d.ts +123 -124
  131. package/dist/server/links/index.d.ts.map +1 -1
  132. package/dist/server/links/index.js +1 -2
  133. package/dist/server/links/index.js.map +1 -1
  134. package/dist/server/metrics/index.d.ts.map +1 -1
  135. package/dist/server/multipart/index.d.ts +6 -6
  136. package/dist/server/multipart/index.d.ts.map +1 -1
  137. package/dist/server/proxy/index.d.ts +102 -103
  138. package/dist/server/proxy/index.d.ts.map +1 -1
  139. package/dist/server/rate-limit/index.d.ts +16 -16
  140. package/dist/server/rate-limit/index.d.ts.map +1 -1
  141. package/dist/server/static/index.d.ts +44 -44
  142. package/dist/server/static/index.d.ts.map +1 -1
  143. package/dist/server/static/index.js +4 -0
  144. package/dist/server/static/index.js.map +1 -1
  145. package/dist/server/swagger/index.d.ts +48 -49
  146. package/dist/server/swagger/index.d.ts.map +1 -1
  147. package/dist/server/swagger/index.js +3 -5
  148. package/dist/server/swagger/index.js.map +1 -1
  149. package/dist/sms/index.d.ts +13 -11
  150. package/dist/sms/index.d.ts.map +1 -1
  151. package/dist/sms/index.js +7 -7
  152. package/dist/sms/index.js.map +1 -1
  153. package/dist/thread/index.d.ts +71 -72
  154. package/dist/thread/index.d.ts.map +1 -1
  155. package/dist/topic/core/index.d.ts +318 -318
  156. package/dist/topic/core/index.d.ts.map +1 -1
  157. package/dist/topic/redis/index.d.ts +6 -6
  158. package/dist/topic/redis/index.d.ts.map +1 -1
  159. package/dist/vite/index.d.ts +5805 -249
  160. package/dist/vite/index.d.ts.map +1 -1
  161. package/dist/vite/index.js +599 -513
  162. package/dist/vite/index.js.map +1 -1
  163. package/dist/websocket/index.browser.js +6 -6
  164. package/dist/websocket/index.browser.js.map +1 -1
  165. package/dist/websocket/index.d.ts +247 -247
  166. package/dist/websocket/index.d.ts.map +1 -1
  167. package/dist/websocket/index.js +6 -6
  168. package/dist/websocket/index.js.map +1 -1
  169. package/package.json +9 -14
  170. package/src/api/files/controllers/AdminFileStatsController.ts +0 -1
  171. package/src/api/users/atoms/realmAuthSettingsAtom.ts +5 -0
  172. package/src/api/users/controllers/{UserRealmController.ts → RealmController.ts} +11 -11
  173. package/src/api/users/entities/users.ts +1 -1
  174. package/src/api/users/index.ts +8 -8
  175. package/src/api/users/primitives/{$userRealm.ts → $realm.ts} +17 -19
  176. package/src/api/users/providers/{UserRealmProvider.ts → RealmProvider.ts} +26 -30
  177. package/src/api/users/schemas/{userRealmConfigSchema.ts → realmConfigSchema.ts} +2 -2
  178. package/src/api/users/services/CredentialService.ts +7 -7
  179. package/src/api/users/services/IdentityService.ts +4 -4
  180. package/src/api/users/services/RegistrationService.spec.ts +25 -27
  181. package/src/api/users/services/RegistrationService.ts +38 -27
  182. package/src/api/users/services/SessionCrudService.ts +3 -3
  183. package/src/api/users/services/SessionService.spec.ts +3 -3
  184. package/src/api/users/services/SessionService.ts +28 -9
  185. package/src/api/users/services/UserService.ts +7 -7
  186. package/src/batch/providers/BatchProvider.ts +1 -2
  187. package/src/cli/apps/AlephaCli.ts +0 -2
  188. package/src/cli/apps/AlephaPackageBuilderCli.ts +38 -19
  189. package/src/cli/assets/apiHelloControllerTs.ts +18 -0
  190. package/src/cli/assets/apiIndexTs.ts +16 -0
  191. package/src/cli/assets/claudeMd.ts +303 -0
  192. package/src/cli/assets/mainBrowserTs.ts +2 -2
  193. package/src/cli/assets/mainServerTs.ts +24 -0
  194. package/src/cli/assets/webAppRouterTs.ts +15 -0
  195. package/src/cli/assets/webHelloComponentTsx.ts +16 -0
  196. package/src/cli/assets/webIndexTs.ts +16 -0
  197. package/src/cli/atoms/buildOptions.ts +88 -0
  198. package/src/cli/commands/build.ts +70 -87
  199. package/src/cli/commands/db.ts +21 -22
  200. package/src/cli/commands/deploy.ts +17 -5
  201. package/src/cli/commands/dev.ts +22 -14
  202. package/src/cli/commands/format.ts +8 -2
  203. package/src/cli/commands/gen/env.ts +53 -0
  204. package/src/cli/commands/gen/openapi.ts +1 -1
  205. package/src/cli/commands/gen/resource.ts +15 -0
  206. package/src/cli/commands/gen.ts +7 -1
  207. package/src/cli/commands/init.ts +74 -30
  208. package/src/cli/commands/lint.ts +8 -2
  209. package/src/cli/commands/test.ts +8 -3
  210. package/src/cli/commands/typecheck.ts +5 -1
  211. package/src/cli/commands/verify.ts +5 -3
  212. package/src/cli/defineConfig.ts +49 -7
  213. package/src/cli/index.ts +0 -1
  214. package/src/cli/services/AlephaCliUtils.ts +39 -589
  215. package/src/cli/services/PackageManagerUtils.ts +301 -0
  216. package/src/cli/services/ProjectScaffolder.ts +306 -0
  217. package/src/command/helpers/Runner.spec.ts +2 -2
  218. package/src/command/helpers/Runner.ts +16 -4
  219. package/src/command/primitives/$command.ts +0 -6
  220. package/src/command/providers/CliProvider.ts +1 -3
  221. package/src/core/Alepha.ts +42 -0
  222. package/src/core/__tests__/Alepha-graph.spec.ts +4 -0
  223. package/src/core/index.shared.ts +1 -0
  224. package/src/core/index.ts +2 -0
  225. package/src/core/primitives/$hook.ts +6 -2
  226. package/src/core/primitives/$module.spec.ts +4 -0
  227. package/src/core/providers/AlsProvider.ts +1 -1
  228. package/src/core/providers/CodecManager.spec.ts +12 -6
  229. package/src/core/providers/CodecManager.ts +26 -6
  230. package/src/core/providers/EventManager.ts +169 -13
  231. package/src/core/providers/KeylessJsonSchemaCodec.spec.ts +621 -0
  232. package/src/core/providers/KeylessJsonSchemaCodec.ts +407 -0
  233. package/src/core/providers/StateManager.spec.ts +27 -16
  234. package/src/email/providers/LocalEmailProvider.spec.ts +111 -87
  235. package/src/email/providers/LocalEmailProvider.ts +52 -15
  236. package/src/email/providers/NodemailerEmailProvider.ts +167 -56
  237. package/src/file/errors/FileError.ts +7 -0
  238. package/src/file/index.ts +9 -1
  239. package/src/file/providers/MemoryFileSystemProvider.ts +393 -0
  240. package/src/logger/index.ts +15 -3
  241. package/src/mcp/transports/StdioMcpTransport.ts +1 -1
  242. package/src/orm/index.browser.ts +1 -19
  243. package/src/orm/index.bun.ts +77 -0
  244. package/src/orm/index.shared-server.ts +22 -0
  245. package/src/orm/index.shared.ts +15 -0
  246. package/src/orm/index.ts +13 -39
  247. package/src/orm/providers/drivers/BunPostgresProvider.ts +3 -5
  248. package/src/orm/providers/drivers/BunSqliteProvider.ts +1 -1
  249. package/src/orm/providers/drivers/CloudflareD1Provider.ts +4 -0
  250. package/src/orm/providers/drivers/DatabaseProvider.ts +4 -0
  251. package/src/orm/providers/drivers/PglitePostgresProvider.ts +4 -0
  252. package/src/orm/services/Repository.ts +8 -0
  253. package/src/queue/core/providers/WorkerProvider.spec.ts +48 -32
  254. package/src/redis/index.bun.ts +35 -0
  255. package/src/redis/providers/BunRedisProvider.ts +12 -43
  256. package/src/redis/providers/BunRedisSubscriberProvider.ts +2 -3
  257. package/src/redis/providers/NodeRedisProvider.ts +16 -34
  258. package/src/{server/security → security}/__tests__/BasicAuth.spec.ts +11 -11
  259. package/src/{server/security → security}/__tests__/ServerSecurityProvider-realm.spec.ts +21 -16
  260. package/src/{server/security/providers → security/__tests__}/ServerSecurityProvider.spec.ts +5 -5
  261. package/src/security/index.browser.ts +5 -0
  262. package/src/security/index.ts +90 -7
  263. package/src/security/primitives/{$realm.spec.ts → $issuer.spec.ts} +11 -11
  264. package/src/security/primitives/{$realm.ts → $issuer.ts} +20 -17
  265. package/src/security/primitives/$role.ts +5 -5
  266. package/src/security/primitives/$serviceAccount.spec.ts +5 -5
  267. package/src/security/primitives/$serviceAccount.ts +3 -3
  268. package/src/{server/security → security}/providers/ServerSecurityProvider.ts +5 -7
  269. package/src/server/auth/primitives/$auth.ts +10 -10
  270. package/src/server/auth/primitives/$authCredentials.ts +3 -3
  271. package/src/server/auth/primitives/$authGithub.ts +3 -3
  272. package/src/server/auth/primitives/$authGoogle.ts +3 -3
  273. package/src/server/auth/providers/ServerAuthProvider.ts +13 -13
  274. package/src/server/cache/providers/ServerCacheProvider.spec.ts +183 -0
  275. package/src/server/cache/providers/ServerCacheProvider.ts +95 -10
  276. package/src/server/compress/providers/ServerCompressProvider.ts +61 -2
  277. package/src/server/cookies/providers/ServerCookiesProvider.ts +3 -3
  278. package/src/server/core/helpers/ServerReply.ts +2 -2
  279. package/src/server/core/providers/NodeHttpServerProvider.ts +25 -6
  280. package/src/server/core/providers/ServerBodyParserProvider.ts +19 -23
  281. package/src/server/core/providers/ServerLoggerProvider.ts +23 -19
  282. package/src/server/core/providers/ServerProvider.ts +155 -22
  283. package/src/server/core/providers/ServerRouterProvider.ts +259 -115
  284. package/src/server/core/providers/ServerTimingProvider.ts +2 -2
  285. package/src/server/links/index.ts +1 -1
  286. package/src/server/links/providers/LinkProvider.ts +1 -1
  287. package/src/server/static/providers/ServerStaticProvider.ts +10 -0
  288. package/src/server/swagger/index.ts +1 -1
  289. package/src/server/swagger/providers/ServerSwaggerProvider.ts +5 -8
  290. package/src/sms/providers/LocalSmsProvider.spec.ts +153 -111
  291. package/src/sms/providers/LocalSmsProvider.ts +8 -7
  292. package/src/vite/helpers/boot.ts +28 -17
  293. package/src/vite/helpers/importViteReact.ts +13 -0
  294. package/src/vite/index.ts +1 -21
  295. package/src/vite/plugins/viteAlephaDev.ts +16 -1
  296. package/src/vite/plugins/viteAlephaSsrPreload.ts +222 -0
  297. package/src/vite/tasks/buildClient.ts +11 -0
  298. package/src/vite/tasks/buildServer.ts +59 -4
  299. package/src/vite/tasks/devServer.ts +71 -0
  300. package/src/vite/tasks/generateCloudflare.ts +7 -0
  301. package/src/vite/tasks/index.ts +2 -1
  302. package/dist/server/security/index.browser.js +0 -13
  303. package/dist/server/security/index.browser.js.map +0 -1
  304. package/dist/server/security/index.d.ts +0 -173
  305. package/dist/server/security/index.d.ts.map +0 -1
  306. package/dist/server/security/index.js +0 -311
  307. package/dist/server/security/index.js.map +0 -1
  308. package/src/cli/assets/appRouterTs.ts +0 -9
  309. package/src/cli/assets/mainTs.ts +0 -13
  310. package/src/cli/assets/viteConfigTs.ts +0 -14
  311. package/src/cli/commands/run.ts +0 -24
  312. package/src/server/security/index.browser.ts +0 -10
  313. package/src/server/security/index.ts +0 -94
  314. package/src/vite/plugins/viteAlepha.ts +0 -37
  315. package/src/vite/plugins/viteAlephaBuild.ts +0 -281
  316. /package/src/{server/security → security}/primitives/$basicAuth.ts +0 -0
  317. /package/src/{server/security → security}/providers/ServerBasicAuthProvider.ts +0 -0
package/dist/orm/index.js CHANGED
@@ -1,13 +1,13 @@
1
+ import { n as __reExport, t as __exportAll } from "./chunk-DtkW-qnP.js";
1
2
  import { createRequire } from "node:module";
2
3
  import { $atom, $context, $env, $hook, $inject, $module, $use, Alepha, AlephaError, KIND, Primitive, Value, createPagination, createPrimitive, pageQuerySchema, pageSchema, pageSchema as pageSchema$1, t } from "alepha";
3
4
  import { AlephaDateTime, DateTimeProvider } from "alepha/datetime";
4
- import * as drizzle from "drizzle-orm";
5
- import { and, arrayContained, arrayContains, arrayOverlaps, asc, between, desc, eq, getTableName, gt, gte, ilike, inArray, isNotNull, isNull, isSQLWrapper, like, lt, lte, ne, not, notBetween, notIlike, notInArray, notLike, or, sql, sql as sql$1 } from "drizzle-orm";
6
5
  import * as pg$2 from "drizzle-orm/pg-core";
7
6
  import { alias, check, customType, foreignKey, index, pgEnum, pgSchema, pgTable, unique, uniqueIndex } from "drizzle-orm/pg-core";
7
+ import * as drizzle from "drizzle-orm";
8
+ import { and, arrayContained, arrayContains, arrayOverlaps, asc, between, desc, eq, getTableName, gt, gte, ilike, inArray, isNotNull, isNull, isSQLWrapper, like, lt, lte, ne, not, notBetween, notIlike, notInArray, notLike, or, sql, sql as sql$1 } from "drizzle-orm";
8
9
  import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
9
10
  import { $logger } from "alepha/logger";
10
- import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
11
11
  import { $lock } from "alepha/lock";
12
12
  import { randomUUID } from "node:crypto";
13
13
  import * as pg$1 from "drizzle-orm/sqlite-core";
@@ -18,6 +18,7 @@ import postgres from "postgres";
18
18
  import { drizzle as drizzle$2 } from "drizzle-orm/sqlite-proxy";
19
19
  import { migrate as migrate$1 } from "drizzle-orm/sqlite-proxy/migrator";
20
20
  import { migrate as migrate$2 } from "drizzle-orm/pglite/migrator";
21
+ import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
21
22
  import { $retry } from "alepha/retry";
22
23
 
23
24
  export * from "drizzle-orm/pg-core"
@@ -139,74 +140,6 @@ var DbError = class extends AlephaError {
139
140
  }
140
141
  };
141
142
 
142
- //#endregion
143
- //#region ../../src/orm/errors/DbConflictError.ts
144
- var DbConflictError = class extends DbError {
145
- name = "DbConflictError";
146
- status = 409;
147
- };
148
-
149
- //#endregion
150
- //#region ../../src/orm/errors/DbEntityNotFoundError.ts
151
- var DbEntityNotFoundError = class extends DbError {
152
- name = "DbEntityNotFoundError";
153
- status = 404;
154
- constructor(entityName) {
155
- super(`Entity from '${entityName}' was not found`);
156
- }
157
- };
158
-
159
- //#endregion
160
- //#region ../../src/orm/errors/DbVersionMismatchError.ts
161
- /**
162
- * Error thrown when there is a version mismatch.
163
- * It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
164
- * This is used for optimistic concurrency control.
165
- */
166
- var DbVersionMismatchError = class extends DbError {
167
- name = "DbVersionMismatchError";
168
- constructor(table, id) {
169
- super(`Version mismatch for table '${table}' and id '${id}'`);
170
- }
171
- };
172
-
173
- //#endregion
174
- //#region ../../src/orm/helpers/pgAttr.ts
175
- /**
176
- * Decorates a typebox schema with a Postgres attribute.
177
- *
178
- * > It's just a fancy way to add Symbols to a field.
179
- *
180
- * @example
181
- * ```ts
182
- * import { t } from "alepha";
183
- * import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
184
- *
185
- * export const updatedAtSchema = pgAttr(
186
- * t.datetime(), PG_UPDATED_AT,
187
- * );
188
- * ```
189
- */
190
- const pgAttr = (type, attr, value) => {
191
- Object.assign(type, { [attr]: value ?? {} });
192
- return type;
193
- };
194
- /**
195
- * Retrieves the fields of a schema that have a specific attribute.
196
- */
197
- const getAttrFields = (schema$1, name) => {
198
- const fields = [];
199
- for (const key of Object.keys(schema$1.properties)) {
200
- const value = schema$1.properties[key];
201
- if (name in value) fields.push({
202
- type: value,
203
- key,
204
- data: value[name]
205
- });
206
- }
207
- return fields;
208
- };
209
-
210
143
  //#endregion
211
144
  //#region ../../src/orm/providers/drivers/DatabaseProvider.ts
212
145
  var DatabaseProvider = class {
@@ -218,6 +151,9 @@ var DatabaseProvider = class {
218
151
  get name() {
219
152
  return "default";
220
153
  }
154
+ get driver() {
155
+ return this.dialect;
156
+ }
221
157
  get schema() {
222
158
  return "public";
223
159
  }
@@ -302,2126 +238,2195 @@ var DatabaseProvider = class {
302
238
  };
303
239
 
304
240
  //#endregion
305
- //#region ../../src/orm/services/PgRelationManager.ts
306
- var PgRelationManager = class {
241
+ //#region ../../src/orm/primitives/$sequence.ts
242
+ /**
243
+ * Creates a PostgreSQL sequence primitive for generating unique numeric values.
244
+ */
245
+ const $sequence = (options = {}) => {
246
+ return createPrimitive(SequencePrimitive, options);
247
+ };
248
+ var SequencePrimitive = class extends Primitive {
249
+ provider = this.$provider();
250
+ onInit() {
251
+ this.provider.registerSequence(this);
252
+ }
253
+ get name() {
254
+ return this.options.name ?? this.config.propertyKey;
255
+ }
256
+ async next() {
257
+ return this.provider.execute(sql$1`SELECT nextval('${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
258
+ }
259
+ async current() {
260
+ return this.provider.execute(sql$1`SELECT last_value FROM ${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
261
+ }
262
+ $provider() {
263
+ return this.options.provider ?? this.alepha.inject(DatabaseProvider);
264
+ }
265
+ };
266
+ $sequence[KIND] = SequencePrimitive;
267
+
268
+ //#endregion
269
+ //#region ../../src/orm/providers/DrizzleKitProvider.ts
270
+ var DrizzleKitProvider = class {
271
+ log = $logger();
272
+ alepha = $inject(Alepha);
307
273
  /**
308
- * Recursively build joins for the query builder based on the relations map
274
+ * Synchronize database with current schema definitions.
275
+ *
276
+ * In development mode, it will generate and execute migrations based on the current state.
277
+ * In testing mode, it will generate migrations from scratch without applying them.
278
+ *
279
+ * Does nothing in production mode, you must handle migrations manually.
309
280
  */
310
- buildJoins(provider, builder, joins, withRelations, table, parentKey) {
311
- for (const [key, join] of Object.entries(withRelations)) {
312
- const from = provider.table(join.join);
313
- const on = isSQLWrapper$1(join.on) ? join.on : sql$1`${table[join.on[0]]} = ${from[join.on[1].name]}`;
314
- if (join.type === "right") builder.rightJoin(from, on);
315
- else if (join.type === "inner") builder.innerJoin(from, on);
316
- else builder.leftJoin(from, on);
317
- joins.push({
318
- key,
319
- table: getTableName(from),
320
- schema: join.join.schema,
321
- col: (name) => from[name],
322
- parent: parentKey
323
- });
324
- if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
281
+ async synchronize(provider) {
282
+ if (this.alepha.isProduction()) {
283
+ this.log.warn("Synchronization skipped in production mode.");
284
+ return;
285
+ }
286
+ if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
287
+ const now = Date.now();
288
+ if (this.alepha.isTest()) {
289
+ const { statements } = await this.generateMigration(provider);
290
+ await this.executeStatements(statements, provider);
291
+ } else {
292
+ const entry = await this.loadDevMigrations(provider);
293
+ const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
294
+ await this.executeStatements(statements, provider, true);
295
+ await this.saveDevMigrations(provider, snapshot, entry);
325
296
  }
297
+ this.log.info(`Db '${provider.name}' synchronization OK [${Date.now() - now}ms]`);
326
298
  }
327
299
  /**
328
- * Map a row with its joined relations based on the joins definition
300
+ * Mostly used for testing purposes. You can generate SQL migration statements without executing them.
329
301
  */
330
- mapRowWithJoins(record, row, schema$1, joins, parentKey) {
331
- for (const join of joins) if (join.parent === parentKey) {
332
- const joinedData = row[join.table];
333
- if (this.isAllNull(joinedData)) record[join.key] = void 0;
334
- else {
335
- record[join.key] = joinedData;
336
- this.mapRowWithJoins(record[join.key], row, schema$1, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
302
+ async generateMigration(provider, prevSnapshot) {
303
+ const kit = this.importDrizzleKit();
304
+ const models = this.getModels(provider);
305
+ if (Object.keys(models).length > 0) {
306
+ if (provider.dialect === "sqlite") {
307
+ const prev$1 = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
308
+ const curr$1 = await kit.generateSQLiteDrizzleJson(models);
309
+ return {
310
+ models,
311
+ statements: await kit.generateSQLiteMigration(prev$1, curr$1),
312
+ snapshot: curr$1
313
+ };
337
314
  }
315
+ const prev = prevSnapshot ?? await kit.generateDrizzleJson({});
316
+ const curr = await kit.generateDrizzleJson(models);
317
+ return {
318
+ models,
319
+ statements: await kit.generateMigration(prev, curr),
320
+ snapshot: curr
321
+ };
338
322
  }
339
- return record;
323
+ return {
324
+ models,
325
+ statements: [],
326
+ snapshot: {}
327
+ };
340
328
  }
341
329
  /**
342
- * Check if all values in an object are null (indicates a left join with no match)
330
+ * Load all tables, enums, sequences, etc. from the provider's repositories.
343
331
  */
344
- isAllNull(obj) {
345
- if (obj === null || obj === void 0) return true;
346
- if (typeof obj !== "object") return false;
347
- return Object.values(obj).every((val) => val === null);
332
+ getModels(provider) {
333
+ const models = {};
334
+ for (const [key, value] of provider.tables.entries()) {
335
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
336
+ models[key] = value;
337
+ }
338
+ for (const [key, value] of provider.enums.entries()) {
339
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
340
+ models[key] = value;
341
+ }
342
+ for (const [key, value] of provider.sequences.entries()) {
343
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
344
+ models[key] = value;
345
+ }
346
+ return models;
348
347
  }
349
348
  /**
350
- * Build a schema that includes all join properties recursively
349
+ * Load the migration snapshot from the database.
351
350
  */
352
- buildSchemaWithJoins(baseSchema, joins, parentPath) {
353
- const schema$1 = Value.Clone(baseSchema);
354
- const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
355
- for (const join of joinsAtThisLevel) {
356
- const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
357
- const childJoins = joins.filter((j) => j.parent === joinPath);
358
- let joinSchema = join.schema;
359
- if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
360
- schema$1.properties[join.key] = t.optional(joinSchema);
351
+ async loadDevMigrations(provider) {
352
+ const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
353
+ if (provider.url.includes(":memory:")) {
354
+ this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
355
+ return;
361
356
  }
362
- return schema$1;
357
+ if (provider.dialect === "sqlite") {
358
+ try {
359
+ const text = await readFile(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
360
+ return this.alepha.codec.decode(devMigrationsSchema, text);
361
+ } catch (e) {
362
+ this.log.trace(`No existing migration snapshot for '${name}'`, e);
363
+ }
364
+ return;
365
+ }
366
+ await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
367
+ await provider.execute(sql$1`
368
+ CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
369
+ "id" SERIAL PRIMARY KEY,
370
+ "name" TEXT NOT NULL,
371
+ "created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
372
+ "snapshot" TEXT NOT NULL
373
+ );
374
+ `);
375
+ const rows = await provider.run(sql$1`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
376
+ if (rows.length === 0) {
377
+ this.log.trace(`No existing migration snapshot for '${name}'`);
378
+ return;
379
+ }
380
+ return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
363
381
  }
364
- };
365
-
366
- //#endregion
367
- //#region ../../src/orm/services/QueryManager.ts
368
- var QueryManager = class {
369
- alepha = $inject(Alepha);
370
- /**
371
- * Convert a query object to a SQL query.
372
- */
373
- toSQL(query, options) {
374
- const { schema: schema$1, col, joins } = options;
375
- const conditions = [];
376
- if (isSQLWrapper(query)) conditions.push(query);
382
+ async saveDevMigrations(provider, curr, devMigrations) {
383
+ if (provider.url.includes(":memory:")) {
384
+ this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
385
+ return;
386
+ }
387
+ const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
388
+ if (provider.dialect === "sqlite") {
389
+ const filePath = `node_modules/.alepha/sqlite-${name}.json`;
390
+ await mkdir("node_modules/.alepha", { recursive: true }).catch(() => null);
391
+ await writeFile(filePath, JSON.stringify({
392
+ id: devMigrations?.id ?? 1,
393
+ name,
394
+ created_at: /* @__PURE__ */ new Date(),
395
+ snapshot: JSON.stringify(curr)
396
+ }, null, 2));
397
+ this.log.debug(`Saved migration snapshot to '${filePath}'`);
398
+ return;
399
+ }
400
+ if (!devMigrations) await provider.execute(sql$1`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
377
401
  else {
378
- const keys = Object.keys(query);
379
- for (const key of keys) {
380
- const operator = query[key];
381
- if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
382
- const matchingJoins = joins.filter((j) => j.key === key);
383
- if (matchingJoins.length > 0) {
384
- const join = matchingJoins[0];
385
- const joinPath = join.parent ? `${join.parent}.${key}` : key;
386
- const recursiveJoins = joins.filter((j) => {
387
- if (!j.parent) return false;
388
- return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
389
- }).map((j) => {
390
- const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
391
- return {
392
- ...j,
393
- parent: newParent
394
- };
395
- });
396
- const sql$2 = this.toSQL(query[key], {
397
- schema: join.schema,
398
- col: join.col,
399
- joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
400
- dialect: options.dialect
401
- });
402
- if (sql$2) conditions.push(sql$2);
403
- continue;
404
- }
405
- }
406
- if (Array.isArray(operator)) {
407
- const operations = operator.map((it) => {
408
- if (isSQLWrapper(it)) return it;
409
- return this.toSQL(it, {
410
- schema: schema$1,
411
- col,
412
- joins,
413
- dialect: options.dialect
414
- });
415
- }).filter((it) => it != null);
416
- if (key === "and") return and(...operations);
417
- if (key === "or") return or(...operations);
418
- }
419
- if (key === "not") {
420
- const where = this.toSQL(operator, {
421
- schema: schema$1,
422
- col,
423
- joins,
424
- dialect: options.dialect
425
- });
426
- if (where) return not(where);
427
- }
428
- if (operator) {
429
- const column = col(key);
430
- const sql$2 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
431
- if (sql$2) conditions.push(sql$2);
432
- }
402
+ const newSnapshot = JSON.stringify(curr);
403
+ if (devMigrations.snapshot !== newSnapshot) await provider.execute(sql$1`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
404
+ }
405
+ }
406
+ async executeStatements(statements, provider, catchErrors = false) {
407
+ let nErrors = 0;
408
+ for (const statement of statements) {
409
+ if (statement.startsWith("DROP SCHEMA")) continue;
410
+ try {
411
+ await provider.execute(sql$1.raw(statement));
412
+ } catch (error) {
413
+ const errorMessage = `Error executing statement: ${statement}`;
414
+ if (catchErrors) {
415
+ nErrors++;
416
+ this.log.warn(errorMessage, { context: [error] });
417
+ } else throw error;
433
418
  }
434
419
  }
435
- if (conditions.length === 1) return conditions[0];
436
- return and(...conditions);
420
+ if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
437
421
  }
438
- /**
439
- * Check if an object has any filter operator properties.
440
- */
441
- hasFilterOperatorProperties(obj) {
442
- if (!obj || typeof obj !== "object") return false;
443
- return [
444
- "eq",
445
- "ne",
446
- "gt",
447
- "gte",
448
- "lt",
449
- "lte",
450
- "inArray",
451
- "notInArray",
452
- "isNull",
453
- "isNotNull",
454
- "like",
455
- "notLike",
456
- "ilike",
457
- "notIlike",
458
- "contains",
459
- "startsWith",
460
- "endsWith",
461
- "between",
462
- "notBetween",
463
- "arrayContains",
464
- "arrayContained",
465
- "arrayOverlaps"
466
- ].some((key) => key in obj);
422
+ async createSchemaIfNotExists(provider, schemaName) {
423
+ if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
424
+ const sqlSchema = sql$1.raw(schemaName);
425
+ if (schemaName.startsWith("test_")) {
426
+ this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
427
+ await provider.execute(sql$1`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
428
+ }
429
+ this.log.debug(`Ensuring schema '${schemaName}' exists`);
430
+ await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
467
431
  }
468
432
  /**
469
- * Map a filter operator to a SQL query.
433
+ * Try to load the official Drizzle Kit API.
434
+ * If not available, fallback to the local kit import.
470
435
  */
471
- mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
472
- const encodeValue = (value) => {
473
- if (value == null) return value;
474
- if (columnSchema && columnName) try {
475
- const fieldSchema = columnSchema.properties[columnName];
476
- if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
477
- } catch (error) {}
478
- return value;
479
- };
480
- const encodeArray = (values) => {
481
- return values.map((v) => encodeValue(v));
482
- };
483
- if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return eq(column, encodeValue(operator));
484
- const conditions = [];
485
- if (operator?.eq != null) conditions.push(eq(column, encodeValue(operator.eq)));
486
- if (operator?.ne != null) conditions.push(ne(column, encodeValue(operator.ne)));
487
- if (operator?.gt != null) conditions.push(gt(column, encodeValue(operator.gt)));
488
- if (operator?.gte != null) conditions.push(gte(column, encodeValue(operator.gte)));
489
- if (operator?.lt != null) conditions.push(lt(column, encodeValue(operator.lt)));
490
- if (operator?.lte != null) conditions.push(lte(column, encodeValue(operator.lte)));
491
- if (operator?.inArray != null) {
492
- if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new AlephaError("inArray operator requires at least one value");
493
- conditions.push(inArray(column, encodeArray(operator.inArray)));
494
- }
495
- if (operator?.notInArray != null) {
496
- if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new AlephaError("notInArray operator requires at least one value");
497
- conditions.push(notInArray(column, encodeArray(operator.notInArray)));
498
- }
499
- if (operator?.isNull != null) conditions.push(isNull(column));
500
- if (operator?.isNotNull != null) conditions.push(isNotNull(column));
501
- if (operator?.like != null) conditions.push(like(column, encodeValue(operator.like)));
502
- if (operator?.notLike != null) conditions.push(notLike(column, encodeValue(operator.notLike)));
503
- if (operator?.ilike != null) conditions.push(ilike(column, encodeValue(operator.ilike)));
504
- if (operator?.notIlike != null) conditions.push(notIlike(column, encodeValue(operator.notIlike)));
505
- if (operator?.contains != null) {
506
- const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
507
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
508
- else conditions.push(ilike(column, encodeValue(`%${escapedValue}%`)));
509
- }
510
- if (operator?.startsWith != null) {
511
- const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
512
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
513
- else conditions.push(ilike(column, encodeValue(`${escapedValue}%`)));
514
- }
515
- if (operator?.endsWith != null) {
516
- const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
517
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
518
- else conditions.push(ilike(column, encodeValue(`%${escapedValue}`)));
519
- }
520
- if (operator?.between != null) {
521
- if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
522
- conditions.push(between(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
523
- }
524
- if (operator?.notBetween != null) {
525
- if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
526
- conditions.push(notBetween(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
436
+ importDrizzleKit() {
437
+ try {
438
+ return createRequire(import.meta.url)("drizzle-kit/api");
439
+ } catch (_) {
440
+ throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
527
441
  }
528
- if (operator?.arrayContains != null) conditions.push(arrayContains(column, encodeValue(operator.arrayContains)));
529
- if (operator?.arrayContained != null) conditions.push(arrayContained(column, encodeValue(operator.arrayContained)));
530
- if (operator?.arrayOverlaps != null) conditions.push(arrayOverlaps(column, encodeValue(operator.arrayOverlaps)));
531
- if (conditions.length === 0) return;
532
- if (conditions.length === 1) return conditions[0];
533
- return and(...conditions);
534
442
  }
443
+ };
444
+ const devMigrationsSchema = t.object({
445
+ id: t.number(),
446
+ name: t.text(),
447
+ snapshot: t.string(),
448
+ created_at: t.string()
449
+ });
450
+
451
+ //#endregion
452
+ //#region ../../src/orm/errors/DbMigrationError.ts
453
+ var DbMigrationError = class extends DbError {
454
+ name = "DbMigrationError";
455
+ constructor(cause) {
456
+ super("Failed to migrate database", cause);
457
+ }
458
+ };
459
+
460
+ //#endregion
461
+ //#region ../../src/orm/types/byte.ts
462
+ /**
463
+ * Postgres bytea type.
464
+ */
465
+ const byte = customType({ dataType: () => "bytea" });
466
+
467
+ //#endregion
468
+ //#region ../../src/orm/services/ModelBuilder.ts
469
+ /**
470
+ * Abstract base class for transforming Alepha Primitives (Entity, Sequence, etc...)
471
+ * into drizzle models (tables, enums, sequences, etc...).
472
+ */
473
+ var ModelBuilder = class {
535
474
  /**
536
- * Parse pagination sort string to orderBy format.
537
- * Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
538
- * - Columns separated by comma
539
- * - Prefix with '-' for DESC direction
540
- *
541
- * @param sort Pagination sort string
542
- * @returns OrderBy array or single object
475
+ * Convert camelCase to snake_case for column names.
543
476
  */
544
- parsePaginationSort(sort) {
545
- const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
546
- if (field.startsWith("-")) return {
547
- column: field.substring(1),
548
- direction: "desc"
549
- };
550
- return {
551
- column: field,
552
- direction: "asc"
553
- };
554
- });
555
- return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
477
+ toColumnName(str) {
478
+ return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
556
479
  }
557
480
  /**
558
- * Normalize orderBy parameter to array format.
559
- * Supports 3 modes:
560
- * 1. String: "name" -> [{ column: "name", direction: "asc" }]
561
- * 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
562
- * 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
563
- *
564
- * @param orderBy The orderBy parameter
565
- * @returns Normalized array of order by clauses
566
- */
567
- normalizeOrderBy(orderBy) {
568
- if (typeof orderBy === "string") return [{
569
- column: orderBy,
570
- direction: "asc"
571
- }];
572
- if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
573
- column: orderBy.column,
574
- direction: orderBy.direction ?? "asc"
575
- }];
576
- if (Array.isArray(orderBy)) return orderBy.map((item) => ({
577
- column: item.column,
578
- direction: item.direction ?? "asc"
579
- }));
580
- return [];
581
- }
582
- /**
583
- * Create a pagination object.
584
- *
585
- * @deprecated Use `createPagination` from alepha instead.
586
- * This method now delegates to the framework-level helper.
481
+ * Build the table configuration function for any database.
482
+ * This includes indexes, foreign keys, constraints, and custom config.
587
483
  *
588
- * @param entities The entities to paginate.
589
- * @param limit The limit of the pagination.
590
- * @param offset The offset of the pagination.
591
- * @param sort Optional sort metadata to include in response.
484
+ * @param entity - The entity primitive
485
+ * @param builders - Database-specific builder functions
486
+ * @param tableResolver - Function to resolve entity references to table columns
487
+ * @param customConfigHandler - Optional handler for custom config
592
488
  */
593
- createPagination(entities, limit = 10, offset = 0, sort) {
594
- return createPagination(entities, limit, offset, sort);
489
+ buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
490
+ if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
491
+ return (self) => {
492
+ const configs = [];
493
+ if (entity.options.indexes) {
494
+ for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
495
+ const columnName = this.toColumnName(indexDef);
496
+ const indexName = `${entity.name}_${columnName}_idx`;
497
+ if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
498
+ } else if (typeof indexDef === "object" && indexDef !== null) {
499
+ if ("column" in indexDef) {
500
+ const columnName = this.toColumnName(indexDef.column);
501
+ const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
502
+ if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
503
+ else configs.push(builders.index(indexName).on(self[indexDef.column]));
504
+ } else if ("columns" in indexDef) {
505
+ const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
506
+ const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
507
+ const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
508
+ if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
509
+ else configs.push(builders.index(indexName).on(...cols));
510
+ }
511
+ }
512
+ }
513
+ if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
514
+ const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
515
+ const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
516
+ if (cols.length === fkDef.columns.length) {
517
+ const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
518
+ const foreignColumns = fkDef.foreignColumns.map((colRef) => {
519
+ const entityCol = colRef();
520
+ if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
521
+ if (tableResolver) {
522
+ const foreignTable = tableResolver(entityCol.entity.name);
523
+ if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
524
+ return foreignTable[entityCol.name];
525
+ }
526
+ return entityCol;
527
+ });
528
+ configs.push(builders.foreignKey({
529
+ name: fkName,
530
+ columns: cols,
531
+ foreignColumns
532
+ }));
533
+ }
534
+ }
535
+ if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
536
+ const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
537
+ const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
538
+ if (cols.length === constraintDef.columns.length) {
539
+ if (constraintDef.unique) {
540
+ const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
541
+ configs.push(builders.unique(constraintName).on(...cols));
542
+ }
543
+ if (constraintDef.check) {
544
+ const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
545
+ configs.push(builders.check(constraintName, constraintDef.check));
546
+ }
547
+ }
548
+ }
549
+ if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
550
+ else if (entity.options.config) {
551
+ const customConfigs = entity.options.config(self);
552
+ if (Array.isArray(customConfigs)) configs.push(...customConfigs);
553
+ }
554
+ return configs;
555
+ };
595
556
  }
596
557
  };
597
558
 
598
559
  //#endregion
599
- //#region ../../src/orm/services/Repository.ts
600
- var Repository = class {
601
- entity;
602
- provider;
603
- relationManager = $inject(PgRelationManager);
604
- queryManager = $inject(QueryManager);
605
- dateTimeProvider = $inject(DateTimeProvider);
606
- alepha = $inject(Alepha);
607
- constructor(entity, provider = DatabaseProvider) {
608
- this.entity = entity;
609
- this.provider = this.alepha.inject(provider);
610
- this.provider.registerEntity(entity);
611
- }
612
- /**
613
- * Represents the primary key of the table.
614
- * - Key is the name of the primary key column.
615
- * - Type is the type (TypeBox) of the primary key column.
616
- *
617
- * ID is mandatory. If the table does not have a primary key, it will throw an error.
618
- */
619
- get id() {
620
- return this.getPrimaryKey(this.entity.schema);
621
- }
622
- /**
623
- * Get Drizzle table object.
624
- */
625
- get table() {
626
- return this.provider.table(this.entity);
560
+ //#region ../../src/orm/services/PostgresModelBuilder.ts
561
+ var PostgresModelBuilder = class extends ModelBuilder {
562
+ schemas = /* @__PURE__ */ new Map();
563
+ getPgSchema(name) {
564
+ if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
565
+ const nsp = name !== "public" ? this.schemas.get(name) : {
566
+ enum: pgEnum,
567
+ table: pgTable
568
+ };
569
+ if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
570
+ return nsp;
627
571
  }
628
- /**
629
- * Get SQL table name. (from Drizzle table object)
630
- */
631
- get tableName() {
632
- return this.entity.name;
572
+ buildTable(entity, options) {
573
+ const tableName = entity.name;
574
+ if (options.tables.has(tableName)) return;
575
+ const nsp = this.getPgSchema(options.schema);
576
+ const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
577
+ const configFn = this.getTableConfig(entity, options.tables);
578
+ const table = nsp.table(tableName, columns, configFn);
579
+ options.tables.set(tableName, table);
633
580
  }
634
- /**
635
- * Getter for the database connection from the database provider.
636
- */
637
- get db() {
638
- return this.provider.db;
581
+ buildSequence(sequence, options) {
582
+ const sequenceName = sequence.name;
583
+ if (options.sequences.has(sequenceName)) return;
584
+ const nsp = this.getPgSchema(options.schema);
585
+ options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
639
586
  }
640
587
  /**
641
- * Execute a SQL query.
642
- *
643
- * This method allows executing raw SQL queries against the database.
644
- * This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
645
- *
646
- * You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
647
- *
648
- * @example
649
- * ```ts
650
- * class App {
651
- * repository = $repository({ ... });
652
- * async getAdults() {
653
- * const users = repository.table; // Drizzle table object
654
- * await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
655
- * // or better
656
- * await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
657
- * }
658
- * }
659
- * ```
588
+ * Get PostgreSQL-specific config builder for the table.
660
589
  */
661
- async query(query, schema$1) {
662
- const raw = typeof query === "function" ? query(this.table, this.db) : query;
663
- if (typeof raw === "string" && raw.includes("[object Object]")) throw new AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
664
- return (await this.provider.execute(raw)).map((it) => {
665
- return this.clean(this.mapRawFieldsToEntity(it), schema$1 ?? this.entity.schema);
666
- });
590
+ getTableConfig(entity, tables) {
591
+ const pgBuilders = {
592
+ index,
593
+ uniqueIndex,
594
+ unique,
595
+ check,
596
+ foreignKey
597
+ };
598
+ const tableResolver = (entityName) => {
599
+ return tables.get(entityName);
600
+ };
601
+ return this.buildTableConfig(entity, pgBuilders, tableResolver);
667
602
  }
668
- /**
669
- * Map raw database fields to entity fields. (handles column name differences)
670
- */
671
- mapRawFieldsToEntity(row) {
672
- const entity = {};
673
- for (const key of Object.keys(row)) {
674
- entity[key] = row[key];
675
- for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
676
- entity[colKey] = row[key];
677
- break;
603
+ schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
604
+ return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
605
+ let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
606
+ if ("default" in value && value.default != null) col = col.default(value.default);
607
+ if (PG_PRIMARY_KEY in value) col = col.primaryKey();
608
+ if (PG_REF in value) {
609
+ const config = value[PG_REF];
610
+ col = col.references(() => {
611
+ const ref = config.ref();
612
+ const table = tables.get(ref.entity.name);
613
+ if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
614
+ const target = table[ref.name];
615
+ if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
616
+ return target;
617
+ }, config.actions);
618
+ }
619
+ if (schema$1.required?.includes(key)) col = col.notNull();
620
+ return {
621
+ ...columns,
622
+ [key]: col
623
+ };
624
+ }, {});
625
+ };
626
+ mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
627
+ const key = this.toColumnName(fieldName);
628
+ if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
629
+ if (t.schema.isInteger(value)) {
630
+ if (PG_SERIAL in value) return pg$2.serial(key);
631
+ if (PG_IDENTITY in value) {
632
+ const options = value[PG_IDENTITY];
633
+ if (options.mode === "byDefault") return pg$2.integer().generatedByDefaultAsIdentity(options);
634
+ return pg$2.integer().generatedAlwaysAsIdentity(options);
678
635
  }
636
+ return pg$2.integer(key);
679
637
  }
680
- return entity;
681
- }
682
- /**
683
- * Get a Drizzle column from the table by his name.
684
- */
685
- col(name) {
686
- const column = this.table[name];
687
- if (!column) throw new AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
688
- return column;
689
- }
638
+ if (t.schema.isBigInt(value)) {
639
+ if (PG_IDENTITY in value) {
640
+ const options = value[PG_IDENTITY];
641
+ if (options.mode === "byDefault") return pg$2.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
642
+ return pg$2.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
643
+ }
644
+ }
645
+ if (t.schema.isNumber(value)) {
646
+ if (PG_IDENTITY in value) {
647
+ const options = value[PG_IDENTITY];
648
+ if (options.mode === "byDefault") return pg$2.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
649
+ return pg$2.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
650
+ }
651
+ if (value.format === "int64") return pg$2.bigint(key, { mode: "number" });
652
+ return pg$2.numeric(key);
653
+ }
654
+ if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
655
+ if (t.schema.isBoolean(value)) return pg$2.boolean(key);
656
+ if (t.schema.isObject(value)) return schema(key, value);
657
+ if (t.schema.isRecord(value)) return schema(key, value);
658
+ const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
659
+ if (t.schema.isArray(value)) {
660
+ if (t.schema.isObject(value.items)) return schema(key, value);
661
+ if (t.schema.isRecord(value.items)) return schema(key, value);
662
+ if (t.schema.isString(value.items)) return pg$2.text(key).array();
663
+ if (t.schema.isInteger(value.items)) return pg$2.integer(key).array();
664
+ if (t.schema.isNumber(value.items)) return pg$2.numeric(key).array();
665
+ if (t.schema.isBoolean(value.items)) return pg$2.boolean(key).array();
666
+ if (isTypeEnum(value.items)) return pg$2.text(key).array();
667
+ }
668
+ if (isTypeEnum(value)) {
669
+ if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
670
+ if (PG_ENUM in value && value[PG_ENUM]) {
671
+ const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
672
+ if (enums.has(enumName)) {
673
+ const values = enums.get(enumName).enumValues.join(",");
674
+ const newValues = value.enum.join(",");
675
+ if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
676
+ }
677
+ enums.set(enumName, nsp.enum(enumName, value.enum));
678
+ return enums.get(enumName)(key);
679
+ }
680
+ return this.mapStringToColumn(key, value);
681
+ }
682
+ throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
683
+ };
690
684
  /**
691
- * Run a transaction.
685
+ * Map a string to a PG column.
686
+ *
687
+ * @param key The key of the field.
688
+ * @param value The value of the field.
692
689
  */
693
- async transaction(transaction, config) {
694
- return await this.db.transaction(transaction, config);
690
+ mapStringToColumn = (key, value) => {
691
+ if ("format" in value) {
692
+ if (value.format === "uuid") {
693
+ if (PG_PRIMARY_KEY in value) return pg$2.uuid(key).defaultRandom();
694
+ return pg$2.uuid(key);
695
+ }
696
+ if (value.format === "byte") return byte(key);
697
+ if (value.format === "date-time") {
698
+ if (PG_CREATED_AT in value) return pg$2.timestamp(key, {
699
+ mode: "string",
700
+ withTimezone: true
701
+ }).defaultNow();
702
+ if (PG_UPDATED_AT in value) return pg$2.timestamp(key, {
703
+ mode: "string",
704
+ withTimezone: true
705
+ }).defaultNow();
706
+ return pg$2.timestamp(key, {
707
+ mode: "string",
708
+ withTimezone: true
709
+ });
710
+ }
711
+ if (value.format === "date") return pg$2.date(key, { mode: "string" });
712
+ }
713
+ return pg$2.text(key);
714
+ };
715
+ };
716
+
717
+ //#endregion
718
+ //#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
719
+ const envSchema$4 = t.object({
720
+ DATABASE_URL: t.optional(t.text()),
721
+ POSTGRES_SCHEMA: t.optional(t.text())
722
+ });
723
+ /**
724
+ * Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
725
+ *
726
+ * This provider uses Bun's built-in SQL class for PostgreSQL connections,
727
+ * which provides excellent performance on the Bun runtime.
728
+ *
729
+ * @example
730
+ * ```ts
731
+ * // Set DATABASE_URL environment variable
732
+ * // DATABASE_URL=postgres://user:password@localhost:5432/database
733
+ *
734
+ * // Or configure programmatically
735
+ * alepha.with({
736
+ * provide: DatabaseProvider,
737
+ * use: BunPostgresProvider,
738
+ * });
739
+ * ```
740
+ */
741
+ var BunPostgresProvider = class extends DatabaseProvider {
742
+ log = $logger();
743
+ env = $env(envSchema$4);
744
+ kit = $inject(DrizzleKitProvider);
745
+ builder = $inject(PostgresModelBuilder);
746
+ client;
747
+ bunDb;
748
+ dialect = "postgresql";
749
+ get name() {
750
+ return "postgres";
695
751
  }
696
752
  /**
697
- * Start a SELECT query on the table.
753
+ * In testing mode, the schema name will be generated and deleted after the test.
698
754
  */
699
- rawSelect(opts = {}) {
700
- return (opts.tx ?? this.db).select().from(this.table);
755
+ schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
756
+ get url() {
757
+ if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
758
+ return this.env.DATABASE_URL;
701
759
  }
702
760
  /**
703
- * Start a SELECT DISTINCT query on the table.
761
+ * Execute a SQL statement.
704
762
  */
705
- rawSelectDistinct(opts = {}, columns = []) {
706
- const db$1 = opts.tx ?? this.db;
707
- const table = this.table;
708
- const fields = {};
709
- for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
710
- return db$1.selectDistinct(fields).from(table);
763
+ execute(statement) {
764
+ try {
765
+ return this.db.execute(statement);
766
+ } catch (error) {
767
+ throw new DbError("Error executing statement", error);
768
+ }
711
769
  }
712
770
  /**
713
- * Start an INSERT query on the table.
771
+ * Get Postgres schema used by this provider.
714
772
  */
715
- rawInsert(opts = {}) {
716
- return (opts.tx ?? this.db).insert(this.table);
773
+ get schema() {
774
+ if (this.schemaForTesting) return this.schemaForTesting;
775
+ if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
776
+ return "public";
717
777
  }
718
778
  /**
719
- * Start an UPDATE query on the table.
779
+ * Get the Drizzle Postgres database instance.
720
780
  */
721
- rawUpdate(opts = {}) {
722
- return (opts.tx ?? this.db).update(this.table);
781
+ get db() {
782
+ if (!this.bunDb) throw new AlephaError("Database not initialized");
783
+ return this.bunDb;
723
784
  }
724
- /**
725
- * Start a DELETE query on the table.
726
- */
727
- rawDelete(opts = {}) {
728
- return (opts.tx ?? this.db).delete(this.table);
785
+ async executeMigrations(migrationsFolder) {
786
+ const { migrate: migrate$3 } = await import("drizzle-orm/bun-sql/migrator");
787
+ await migrate$3(this.bunDb, { migrationsFolder });
729
788
  }
730
- /**
731
- * Create a Drizzle `select` query based on a JSON query object.
732
- *
733
- * > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
734
- */
735
- async findMany(query = {}, opts = {}) {
736
- await this.alepha.events.emit("repository:read:before", {
737
- tableName: this.tableName,
738
- query
739
- });
740
- const columns = query.columns ?? query.distinct;
741
- const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
742
- const joins = [];
743
- if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
744
- const where = this.withDeletedAt(query.where ?? {}, opts);
745
- builder.where(() => this.toSQL(where, joins));
746
- if (query.offset) {
747
- builder.offset(query.offset);
748
- if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
749
- }
750
- if (query.limit) builder.limit(query.limit);
751
- if (query.orderBy) {
752
- const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
753
- builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? desc(this.col(clause.column)) : asc(this.col(clause.column))));
754
- }
755
- if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
756
- if (opts.for) {
757
- if (typeof opts.for === "string") builder.for(opts.for);
758
- else if (opts.for) builder.for(opts.for.strength, opts.for.config);
789
+ onStart = $hook({
790
+ on: "start",
791
+ handler: async () => {
792
+ await this.connect();
793
+ if (!this.alepha.isServerless()) try {
794
+ await this.migrateLock.run();
795
+ } catch (error) {
796
+ throw new DbMigrationError(error);
797
+ }
759
798
  }
760
- try {
761
- let rows = await builder.execute();
762
- let schema$1 = this.entity.schema;
763
- if (columns) schema$1 = t.pick(schema$1, columns);
764
- if (joins.length) rows = rows.map((row) => {
765
- const rowSchema = {
766
- ...schema$1,
767
- properties: { ...schema$1.properties }
768
- };
769
- return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
770
- });
771
- rows = rows.map((row) => {
772
- if (joins.length) {
773
- const joinedSchema = this.relationManager.buildSchemaWithJoins(schema$1, joins);
774
- return this.cleanWithJoins(row, joinedSchema, joins);
775
- }
776
- return this.clean(row, schema$1);
777
- });
778
- await this.alepha.events.emit("repository:read:after", {
779
- tableName: this.tableName,
780
- query,
781
- entities: rows
782
- });
783
- return rows;
784
- } catch (error) {
785
- throw new DbError("Query select has failed", error);
799
+ });
800
+ onStop = $hook({
801
+ on: "stop",
802
+ handler: async () => {
803
+ if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
804
+ if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
805
+ this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
806
+ await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
807
+ this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
808
+ }
809
+ await this.close();
786
810
  }
811
+ });
812
+ async connect() {
813
+ this.log.debug("Connect ..");
814
+ if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
815
+ const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sql");
816
+ this.client = new Bun.SQL(this.url);
817
+ await this.client.unsafe("SELECT 1");
818
+ this.bunDb = drizzle$3({
819
+ client: this.client,
820
+ logger: { logQuery: (query, params) => {
821
+ this.log.trace(query, { params });
822
+ } }
823
+ });
824
+ this.log.info("Connection OK");
787
825
  }
788
- /**
789
- * Find a single entity.
790
- */
791
- async findOne(query, opts = {}) {
792
- const [entity] = await this.findMany({
793
- limit: 1,
794
- ...query
795
- }, opts);
796
- if (!entity) throw new DbEntityNotFoundError(this.tableName);
797
- return entity;
798
- }
799
- /**
800
- * Find entities with pagination.
801
- *
802
- * It uses the same parameters as `find()`, but adds pagination metadata to the response.
803
- *
804
- * > Pagination CAN also do a count query to get the total number of elements.
805
- */
806
- async paginate(pagination = {}, query = {}, opts = {}) {
807
- const limit = query.limit ?? pagination.size ?? 10;
808
- const page = pagination.page ?? 0;
809
- const offset = query.offset ?? page * limit;
810
- let orderBy = query.orderBy;
811
- if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
812
- const now = Date.now();
813
- const timers = {
814
- query: now,
815
- count: now
816
- };
817
- const tasks = [];
818
- tasks.push(this.findMany({
819
- offset,
820
- limit: limit + 1,
821
- orderBy,
822
- ...query
823
- }, opts).then((it) => {
824
- timers.query = Date.now() - timers.query;
825
- return it;
826
- }));
827
- if (opts.count) {
828
- const where = isSQLWrapper(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
829
- tasks.push(this.db.$count(this.table, where).then((it) => {
830
- timers.count = Date.now() - timers.count;
831
- return it;
832
- }));
826
+ async close() {
827
+ if (this.client) {
828
+ this.log.debug("Close...");
829
+ await this.client.close();
830
+ this.client = void 0;
831
+ this.bunDb = void 0;
832
+ this.log.info("Connection closed");
833
833
  }
834
- const [entities, countResult] = await Promise.all(tasks);
835
- let sortMetadata;
836
- if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
837
- const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
838
- response.page.totalElements = countResult;
839
- if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
840
- return response;
841
- }
842
- /**
843
- * Find an entity by ID.
844
- *
845
- * This is a convenience method for `findOne` with a where clause on the primary key.
846
- * If you need more complex queries, use `findOne` instead.
847
- */
848
- async findById(id, opts = {}) {
849
- return await this.findOne({ where: this.getWhereId(id) }, opts);
850
834
  }
851
- /**
852
- * Helper to create a type-safe query object.
853
- */
854
- createQuery() {
855
- return {};
835
+ migrateLock = $lock({ handler: async () => {
836
+ await this.migrate();
837
+ } });
838
+ };
839
+
840
+ //#endregion
841
+ //#region ../../src/orm/services/SqliteModelBuilder.ts
842
+ var SqliteModelBuilder = class extends ModelBuilder {
843
+ buildTable(entity, options) {
844
+ const tableName = entity.name;
845
+ if (options.tables.has(tableName)) return;
846
+ const table = sqliteTable(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
847
+ options.tables.set(tableName, table);
856
848
  }
857
- /**
858
- * Helper to create a type-safe where clause.
859
- */
860
- createQueryWhere() {
861
- return {};
849
+ buildSequence(sequence, options) {
850
+ throw new AlephaError("SQLite does not support sequences");
862
851
  }
863
852
  /**
864
- * Create an entity.
865
- *
866
- * @param data The entity to create.
867
- * @param opts The options for creating the entity.
868
- * @returns The ID of the created entity.
853
+ * Get SQLite-specific config builder for the table.
869
854
  */
870
- async create(data, opts = {}) {
871
- await this.alepha.events.emit("repository:create:before", {
872
- tableName: this.tableName,
873
- data
855
+ getTableConfig(entity, tables) {
856
+ const sqliteBuilders = {
857
+ index: index$1,
858
+ uniqueIndex: uniqueIndex$1,
859
+ unique: unique$1,
860
+ check: check$1,
861
+ foreignKey: foreignKey$1
862
+ };
863
+ const tableResolver = (entityName) => {
864
+ return tables.get(entityName);
865
+ };
866
+ return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
867
+ const customConfigs = config(self);
868
+ return Array.isArray(customConfigs) ? customConfigs : [];
874
869
  });
875
- try {
876
- const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
877
- await this.alepha.events.emit("repository:create:after", {
878
- tableName: this.tableName,
879
- data,
880
- entity
881
- });
882
- return entity;
883
- } catch (error) {
884
- throw this.handleError(error, "Insert query has failed");
885
- }
886
870
  }
887
- /**
888
- * Create many entities.
889
- *
890
- * Inserts are batched in chunks of 1000 to avoid hitting database limits.
891
- *
892
- * @param values The entities to create.
893
- * @param opts The statement options.
894
- * @returns The created entities.
895
- */
896
- async createMany(values, opts = {}) {
897
- if (values.length === 0) return [];
898
- await this.alepha.events.emit("repository:create:before", {
899
- tableName: this.tableName,
900
- data: values
901
- });
902
- const batchSize = opts.batchSize ?? 1e3;
903
- const allEntities = [];
904
- try {
905
- for (let i = 0; i < values.length; i += batchSize) {
906
- const batch = values.slice(i, i + batchSize);
907
- const entities = await this.rawInsert(opts).values(batch.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
908
- allEntities.push(...entities);
871
+ schemaToSqliteColumns = (tableName, schema$1, enums, tables) => {
872
+ return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
873
+ let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
874
+ if ("default" in value && value.default != null) col = col.default(value.default);
875
+ if (PG_PRIMARY_KEY in value) col = col.primaryKey();
876
+ if (PG_REF in value) {
877
+ const config = value[PG_REF];
878
+ col = col.references(() => {
879
+ const ref = config.ref();
880
+ const table = tables.get(ref.entity.name);
881
+ if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
882
+ const target = table[ref.name];
883
+ if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
884
+ return target;
885
+ }, config.actions);
909
886
  }
910
- await this.alepha.events.emit("repository:create:after", {
911
- tableName: this.tableName,
912
- data: values,
913
- entity: allEntities
914
- });
915
- return allEntities;
916
- } catch (error) {
917
- throw this.handleError(error, "Insert query has failed");
887
+ if (schema$1.required?.includes(key)) col = col.notNull();
888
+ return {
889
+ ...columns,
890
+ [key]: col
891
+ };
892
+ }, {});
893
+ };
894
+ mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
895
+ const key = this.toColumnName(fieldName);
896
+ if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
897
+ if (t.schema.isInteger(value)) {
898
+ if (PG_SERIAL in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
899
+ return pg$1.integer(key);
918
900
  }
919
- }
920
- /**
921
- * Find an entity and update it.
922
- */
923
- async updateOne(where, data, opts = {}) {
924
- await this.alepha.events.emit("repository:update:before", {
925
- tableName: this.tableName,
926
- where,
927
- data
928
- });
929
- let row = data;
930
- const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
931
- if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
932
- where = this.withDeletedAt(where, opts);
933
- row = this.cast(row, false);
934
- delete row[this.id.key];
935
- const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
936
- throw this.handleError(error, "Update query has failed");
937
- });
938
- if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
939
- try {
940
- const entity = this.clean(response[0], this.entity.schema);
941
- await this.alepha.events.emit("repository:update:after", {
942
- tableName: this.tableName,
943
- where,
944
- data,
945
- entities: [entity]
946
- });
947
- return entity;
948
- } catch (error) {
949
- throw this.handleError(error, "Update query has failed");
901
+ if (t.schema.isBigInt(value)) {
902
+ if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
903
+ return pg$1.integer(key, { mode: "number" });
950
904
  }
951
- }
952
- /**
953
- * Save a given entity.
954
- *
955
- * @example
956
- * ```ts
957
- * const entity = await repository.findById(1);
958
- * entity.name = "New Name"; // update a field
959
- * delete entity.description; // delete a field
960
- * await repository.save(entity);
961
- * ```
962
- *
963
- * Difference with `updateById/updateOne`:
964
- *
965
- * - requires the entity to be fetched first (whole object is expected)
966
- * - check pg.version() if present -> optimistic locking
967
- * - validate entity against schema
968
- * - undefined values will be set to null, not ignored!
969
- *
970
- * @see {@link DbVersionMismatchError}
971
- */
972
- async save(entity, opts = {}) {
973
- const row = entity;
974
- const id = row[this.id.key];
975
- if (id == null) throw new AlephaError("Cannot save entity without ID - missing primary key in value");
976
- for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
977
- let where = this.createQueryWhere();
978
- where.id = { eq: id };
979
- const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
980
- if (versionField && typeof row[versionField.key] === "number") {
981
- where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
982
- row[versionField.key] += 1;
905
+ if (t.schema.isNumber(value)) {
906
+ if (PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
907
+ return pg$1.numeric(key);
983
908
  }
984
- try {
985
- const newValue = await this.updateOne(where, row, opts);
986
- for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
987
- Object.assign(row, newValue);
988
- } catch (error) {
989
- if (error instanceof DbEntityNotFoundError && versionField) try {
990
- await this.findById(id);
991
- throw new DbVersionMismatchError(this.tableName, id);
992
- } catch (lookupError) {
993
- if (lookupError instanceof DbEntityNotFoundError) throw error;
994
- if (lookupError instanceof DbVersionMismatchError) throw lookupError;
995
- throw lookupError;
996
- }
997
- throw error;
998
- }
999
- }
1000
- /**
1001
- * Find an entity by ID and update it.
1002
- */
1003
- async updateById(id, data, opts = {}) {
1004
- return await this.updateOne(this.getWhereId(id), data, opts);
1005
- }
1006
- /**
1007
- * Find many entities and update all of them.
1008
- */
1009
- async updateMany(where, data, opts = {}) {
1010
- await this.alepha.events.emit("repository:update:before", {
1011
- tableName: this.tableName,
1012
- where,
1013
- data
1014
- });
1015
- const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
1016
- if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
1017
- where = this.withDeletedAt(where, opts);
1018
- data = this.cast(data, false);
1019
- try {
1020
- const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
1021
- await this.alepha.events.emit("repository:update:after", {
1022
- tableName: this.tableName,
1023
- where,
1024
- data,
1025
- entities
1026
- });
1027
- return entities.map((it) => it[this.id.key]);
1028
- } catch (error) {
1029
- throw this.handleError(error, "Update query has failed");
909
+ if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
910
+ if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
911
+ if (t.schema.isObject(value)) return this.sqliteJson(key, value);
912
+ if (t.schema.isRecord(value)) return this.sqliteJson(key, value);
913
+ if (t.schema.isAny(value)) return this.sqliteJson(key, value);
914
+ if (t.schema.isArray(value)) {
915
+ if (t.schema.isObject(value.items)) return this.sqliteJson(key, value);
916
+ if (t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
917
+ if (t.schema.isAny(value.items)) return this.sqliteJson(key, value);
918
+ if (t.schema.isString(value.items)) return this.sqliteJson(key, value);
919
+ if (t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
920
+ if (t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
921
+ if (t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
1030
922
  }
1031
- }
1032
- /**
1033
- * Find many and delete all of them.
1034
- * @returns Array of deleted entity IDs
1035
- */
1036
- async deleteMany(where = {}, opts = {}) {
1037
- const deletedAt = this.deletedAt();
1038
- if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
1039
- await this.alepha.events.emit("repository:delete:before", {
1040
- tableName: this.tableName,
1041
- where
1042
- });
1043
- try {
1044
- const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
1045
- await this.alepha.events.emit("repository:delete:after", {
1046
- tableName: this.tableName,
1047
- where,
1048
- ids
1049
- });
1050
- return ids;
1051
- } catch (error) {
1052
- throw new DbError("Delete query has failed", error);
923
+ if (t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
924
+ throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
925
+ };
926
+ mapStringToSqliteColumn = (key, value) => {
927
+ if (value.format === "uuid") {
928
+ if (PG_PRIMARY_KEY in value) return pg$1.text(key).primaryKey().$defaultFn(() => randomUUID());
929
+ return pg$1.text(key);
1053
930
  }
1054
- }
1055
- /**
1056
- * Delete all entities.
1057
- * @returns Array of deleted entity IDs
1058
- */
1059
- clear(opts = {}) {
1060
- return this.deleteMany({}, opts);
1061
- }
1062
- /**
1063
- * Delete the given entity.
1064
- *
1065
- * You must fetch the entity first in order to delete it.
1066
- * @returns Array containing the deleted entity ID
1067
- */
1068
- async destroy(entity, opts = {}) {
1069
- const id = entity[this.id.key];
1070
- if (id == null) throw new AlephaError("Cannot destroy entity without ID");
1071
- const deletedAt = this.deletedAt();
1072
- if (deletedAt && !opts.force) {
1073
- opts.now ??= this.dateTimeProvider.nowISOString();
1074
- entity[deletedAt.key] = opts.now;
931
+ if (value.format === "byte") return this.sqliteJson(key, value);
932
+ if (value.format === "date-time") {
933
+ if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
934
+ if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
935
+ return this.sqliteDateTime(key, {});
1075
936
  }
1076
- return await this.deleteById(id, opts);
1077
- }
1078
- /**
1079
- * Find an entity and delete it.
1080
- * @returns Array of deleted entity IDs (should contain at most one ID)
1081
- */
1082
- async deleteOne(where = {}, opts = {}) {
1083
- return await this.deleteMany(where, opts);
1084
- }
1085
- /**
1086
- * Find an entity by ID and delete it.
1087
- * @returns Array containing the deleted entity ID
1088
- * @throws DbEntityNotFoundError if the entity is not found
1089
- */
1090
- async deleteById(id, opts = {}) {
1091
- const result = await this.deleteMany(this.getWhereId(id), opts);
1092
- if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
1093
- return result;
1094
- }
1095
- /**
1096
- * Count entities.
1097
- */
1098
- async count(where = {}, opts = {}) {
1099
- where = this.withDeletedAt(where, opts);
1100
- return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
1101
- }
1102
- conflictMessagePattern = "duplicate key value violates unique constraint";
1103
- handleError(error, message) {
1104
- if (!(error instanceof Error)) return new DbError(message);
1105
- if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
1106
- return new DbError(message, error);
1107
- }
1108
- withDeletedAt(where, opts = {}) {
1109
- if (opts.force) return where;
1110
- const deletedAt = this.deletedAt();
1111
- if (!deletedAt) return where;
1112
- return { and: [where, { [deletedAt.key]: { isNull: true } }] };
1113
- }
1114
- deletedAt() {
1115
- const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
1116
- if (deletedAtFields.length > 0) return deletedAtFields[0];
1117
- }
1118
- /**
1119
- * Convert something to valid Pg Insert Value.
1120
- */
1121
- cast(data, insert) {
1122
- const schema$1 = insert ? this.entity.insertSchema : t.partial(this.entity.updateSchema);
1123
- return this.alepha.codec.encode(schema$1, data);
1124
- }
1125
- /**
1126
- * Transform a row from the database into a clean entity.
1127
- */
1128
- clean(row, schema$1) {
1129
- for (const key of Object.keys(schema$1.properties)) {
1130
- const value = schema$1.properties[key];
1131
- if (typeof row[key] === "string") {
1132
- if (t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
1133
- else if (t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
1134
- }
1135
- if (typeof row[key] === "bigint" && t.schema.isBigInt(value)) row[key] = row[key].toString();
937
+ if (value.format === "date") return this.sqliteDate(key, {});
938
+ return pg$1.text(key);
939
+ };
940
+ sqliteJson = (name, document) => pg$1.customType({
941
+ dataType: () => "text",
942
+ toDriver: (value) => JSON.stringify(value),
943
+ fromDriver: (value) => {
944
+ return value && typeof value === "string" ? JSON.parse(value) : value;
1136
945
  }
1137
- return this.alepha.codec.decode(schema$1, row);
1138
- }
1139
- /**
1140
- * Clean a row with joins recursively
1141
- */
1142
- cleanWithJoins(row, schema$1, joins, parentPath) {
1143
- const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
1144
- const cleanRow = { ...row };
1145
- const joinedData = {};
1146
- for (const join of joinsAtThisLevel) {
1147
- joinedData[join.key] = cleanRow[join.key];
1148
- delete cleanRow[join.key];
946
+ })(name, { document }).$type();
947
+ sqliteDateTime = pg$1.customType({
948
+ dataType: () => "integer",
949
+ toDriver: (value) => new Date(value).getTime(),
950
+ fromDriver: (value) => {
951
+ return new Date(value).toISOString();
1149
952
  }
1150
- const entity = this.clean(cleanRow, schema$1);
1151
- for (const join of joinsAtThisLevel) {
1152
- const joinedValue = joinedData[join.key];
1153
- if (joinedValue != null) {
1154
- const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
1155
- if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
1156
- else entity[join.key] = this.clean(joinedValue, join.schema);
1157
- } else entity[join.key] = void 0;
953
+ });
954
+ sqliteBool = pg$1.customType({
955
+ dataType: () => "integer",
956
+ toDriver: (value) => value ? 1 : 0,
957
+ fromDriver: (value) => value === 1
958
+ });
959
+ sqliteDate = pg$1.customType({
960
+ dataType: () => "integer",
961
+ toDriver: (value) => new Date(value).getTime(),
962
+ fromDriver: (value) => {
963
+ return new Date(value).toISOString().split("T")[0];
1158
964
  }
1159
- return entity;
1160
- }
1161
- /**
1162
- * Convert a where clause to SQL.
1163
- */
1164
- toSQL(where, joins) {
1165
- return this.queryManager.toSQL(where, {
1166
- schema: this.entity.schema,
1167
- col: (name) => {
1168
- return this.col(name);
1169
- },
1170
- joins,
1171
- dialect: this.provider.dialect
1172
- });
1173
- }
1174
- /**
1175
- * Get the where clause for an ID.
1176
- *
1177
- * @param id The ID to get the where clause for.
1178
- * @returns The where clause for the ID.
1179
- */
1180
- getWhereId(id) {
1181
- return { [this.id.key]: { eq: t.schema.isString(this.id.type) ? String(id) : Number(id) } };
1182
- }
1183
- /**
1184
- * Find a primary key in the schema.
1185
- */
1186
- getPrimaryKey(schema$1) {
1187
- const primaryKeys = getAttrFields(schema$1, PG_PRIMARY_KEY);
1188
- if (primaryKeys.length === 0) throw new AlephaError("Primary key not found in schema");
1189
- if (primaryKeys.length > 1) throw new AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
1190
- return {
1191
- key: primaryKeys[0].key,
1192
- col: this.col(primaryKeys[0].key),
1193
- type: primaryKeys[0].type
1194
- };
1195
- }
965
+ });
1196
966
  };
1197
967
 
1198
968
  //#endregion
1199
- //#region ../../src/orm/providers/RepositoryProvider.ts
1200
- var RepositoryProvider = class {
1201
- alepha = $inject(Alepha);
1202
- registry = /* @__PURE__ */ new Map();
1203
- getRepositories(provider) {
1204
- const repositories = this.alepha.services(Repository);
1205
- if (provider) return repositories.filter((it) => it.provider === provider);
1206
- return repositories;
969
+ //#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
970
+ const envSchema$3 = t.object({ DATABASE_URL: t.optional(t.text()) });
971
+ /**
972
+ * Configuration options for the Bun SQLite database provider.
973
+ */
974
+ const bunSqliteOptions = $atom({
975
+ name: "alepha.postgres.bun-sqlite.options",
976
+ schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
977
+ default: {}
978
+ });
979
+ /**
980
+ * Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
981
+ *
982
+ * This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
983
+ * which provides excellent performance on the Bun runtime.
984
+ *
985
+ * @example
986
+ * ```ts
987
+ * // Set DATABASE_URL environment variable
988
+ * // DATABASE_URL=sqlite://./my-database.db
989
+ *
990
+ * // Or configure programmatically
991
+ * alepha.with({
992
+ * provide: DatabaseProvider,
993
+ * use: BunSqliteProvider,
994
+ * });
995
+ *
996
+ * // Or use options atom
997
+ * alepha.store.mut(bunSqliteOptions, (old) => ({
998
+ * ...old,
999
+ * path: ":memory:",
1000
+ * }));
1001
+ * ```
1002
+ */
1003
+ var BunSqliteProvider = class extends DatabaseProvider {
1004
+ kit = $inject(DrizzleKitProvider);
1005
+ log = $logger();
1006
+ env = $env(envSchema$3);
1007
+ builder = $inject(SqliteModelBuilder);
1008
+ options = $use(bunSqliteOptions);
1009
+ sqlite;
1010
+ bunDb;
1011
+ get name() {
1012
+ return "sqlite";
1207
1013
  }
1208
- getRepository(entity) {
1209
- const RepositoryClass = this.createClassRepository(entity);
1210
- return this.alepha.inject(RepositoryClass);
1014
+ dialect = "sqlite";
1015
+ get url() {
1016
+ const path = this.options.path ?? this.env.DATABASE_URL;
1017
+ if (path) {
1018
+ if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
1019
+ return path;
1020
+ }
1021
+ if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
1022
+ else return "node_modules/.alepha/bun-sqlite.db";
1211
1023
  }
1212
- createClassRepository(entity) {
1213
- let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
1214
- if (name.endsWith("s")) name = name.slice(0, -1);
1215
- name = `${name}Repository`;
1216
- if (this.registry.has(entity)) return this.registry.get(entity);
1217
- class GenericRepository extends Repository {
1218
- constructor() {
1219
- super(entity);
1024
+ get db() {
1025
+ if (!this.bunDb) throw new AlephaError("Database not initialized");
1026
+ return this.bunDb;
1027
+ }
1028
+ async execute(query) {
1029
+ return this.bunDb.all(query);
1030
+ }
1031
+ onStart = $hook({
1032
+ on: "start",
1033
+ handler: async () => {
1034
+ if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
1035
+ const { Database } = await import("bun:sqlite");
1036
+ const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sqlite");
1037
+ const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
1038
+ if (filepath !== ":memory:" && filepath !== "") {
1039
+ const dirname = filepath.split("/").slice(0, -1).join("/");
1040
+ if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
1041
+ }
1042
+ this.sqlite = new Database(filepath);
1043
+ this.bunDb = drizzle$3({
1044
+ client: this.sqlite,
1045
+ logger: { logQuery: (query, params) => {
1046
+ this.log.trace(query, { params });
1047
+ } }
1048
+ });
1049
+ await this.migrate();
1050
+ this.log.info(`Using Bun SQLite database at ${filepath}`);
1051
+ }
1052
+ });
1053
+ onStop = $hook({
1054
+ on: "stop",
1055
+ handler: async () => {
1056
+ if (this.sqlite) {
1057
+ this.log.debug("Closing Bun SQLite connection...");
1058
+ this.sqlite.close();
1059
+ this.sqlite = void 0;
1060
+ this.bunDb = void 0;
1061
+ this.log.info("Bun SQLite connection closed");
1220
1062
  }
1221
1063
  }
1222
- Object.defineProperty(GenericRepository, "name", { value: name });
1223
- this.registry.set(entity, GenericRepository);
1224
- return GenericRepository;
1064
+ });
1065
+ async executeMigrations(migrationsFolder) {
1066
+ const { migrate: migrate$3 } = await import("drizzle-orm/bun-sqlite/migrator");
1067
+ await migrate$3(this.bunDb, { migrationsFolder });
1225
1068
  }
1226
1069
  };
1227
1070
 
1228
1071
  //#endregion
1229
- //#region ../../src/orm/primitives/$repository.ts
1230
- /**
1231
- * Get the repository for the given entity.
1232
- */
1233
- const $repository = (entity) => {
1234
- const { alepha } = $context();
1235
- return $inject(alepha.inject(RepositoryProvider).createClassRepository(entity));
1236
- };
1237
-
1238
- //#endregion
1239
- //#region ../../src/orm/primitives/$sequence.ts
1072
+ //#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
1240
1073
  /**
1241
- * Creates a PostgreSQL sequence primitive for generating unique numeric values.
1074
+ * Cloudflare D1 SQLite provider using Drizzle ORM.
1075
+ *
1076
+ * This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
1077
+ * The binding is typically obtained from the Cloudflare Workers environment.
1078
+ *
1079
+ * @example
1080
+ * ```ts
1081
+ * // In your Cloudflare Worker
1082
+ * alepha.set(cloudflareD1Options, { binding: env.DB });
1083
+ * ```
1242
1084
  */
1243
- const $sequence = (options = {}) => {
1244
- return createPrimitive(SequencePrimitive, options);
1245
- };
1246
- var SequencePrimitive = class extends Primitive {
1247
- provider = this.$provider();
1248
- onInit() {
1249
- this.provider.registerSequence(this);
1250
- }
1085
+ var CloudflareD1Provider = class extends DatabaseProvider {
1086
+ kit = $inject(DrizzleKitProvider);
1087
+ log = $logger();
1088
+ builder = $inject(SqliteModelBuilder);
1089
+ env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
1090
+ d1;
1091
+ drizzleDb;
1251
1092
  get name() {
1252
- return this.options.name ?? this.config.propertyKey;
1093
+ return "sqlite";
1253
1094
  }
1254
- async next() {
1255
- return this.provider.execute(sql$1`SELECT nextval('${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
1095
+ get driver() {
1096
+ return "d1";
1256
1097
  }
1257
- async current() {
1258
- return this.provider.execute(sql$1`SELECT last_value FROM ${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
1098
+ dialect = "sqlite";
1099
+ get url() {
1100
+ return this.env.DATABASE_URL;
1259
1101
  }
1260
- $provider() {
1261
- return this.options.provider ?? this.alepha.inject(DatabaseProvider);
1102
+ get db() {
1103
+ if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
1104
+ return this.drizzleDb;
1105
+ }
1106
+ async execute(query) {
1107
+ const { rows } = await this.db.run(query);
1108
+ return rows;
1109
+ }
1110
+ onStart = $hook({
1111
+ on: "start",
1112
+ handler: async () => {
1113
+ const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
1114
+ const cloudflareEnv = this.alepha.store.get("cloudflare.env");
1115
+ if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
1116
+ const binding = cloudflareEnv[bindingName];
1117
+ if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
1118
+ this.d1 = binding;
1119
+ const { drizzle: drizzle$3 } = await import("drizzle-orm/d1");
1120
+ this.drizzleDb = drizzle$3(this.d1);
1121
+ await this.migrate();
1122
+ this.log.info("Using Cloudflare D1 database");
1123
+ }
1124
+ });
1125
+ async executeMigrations(migrationsFolder) {
1126
+ const { migrate: migrate$3 } = await import("drizzle-orm/d1/migrator");
1127
+ await migrate$3(this.db, { migrationsFolder });
1128
+ }
1129
+ /**
1130
+ * Override development migration to skip sync (not supported on D1).
1131
+ * D1 requires proper migrations to be applied.
1132
+ */
1133
+ async runDevelopmentMigration(migrationsFolder) {
1134
+ await this.executeMigrations(migrationsFolder);
1135
+ }
1136
+ /**
1137
+ * Override test migration to run migrations instead of sync.
1138
+ * D1 doesn't support schema synchronization.
1139
+ */
1140
+ async runTestMigration() {
1141
+ const migrationsFolder = this.getMigrationsFolder();
1142
+ try {
1143
+ await this.executeMigrations(migrationsFolder);
1144
+ } catch {
1145
+ this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
1146
+ }
1262
1147
  }
1263
1148
  };
1264
- $sequence[KIND] = SequencePrimitive;
1265
1149
 
1266
1150
  //#endregion
1267
- //#region ../../src/orm/providers/DrizzleKitProvider.ts
1268
- var DrizzleKitProvider = class {
1151
+ //#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
1152
+ const envSchema$2 = t.object({
1153
+ DATABASE_URL: t.optional(t.text()),
1154
+ POSTGRES_SCHEMA: t.optional(t.text())
1155
+ });
1156
+ var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
1157
+ static SSL_MODES = [
1158
+ "require",
1159
+ "allow",
1160
+ "prefer",
1161
+ "verify-full"
1162
+ ];
1269
1163
  log = $logger();
1270
- alepha = $inject(Alepha);
1164
+ env = $env(envSchema$2);
1165
+ kit = $inject(DrizzleKitProvider);
1166
+ builder = $inject(PostgresModelBuilder);
1167
+ client;
1168
+ pg;
1169
+ dialect = "postgresql";
1170
+ get name() {
1171
+ return "postgres";
1172
+ }
1271
1173
  /**
1272
- * Synchronize database with current schema definitions.
1273
- *
1274
- * In development mode, it will generate and execute migrations based on the current state.
1275
- * In testing mode, it will generate migrations from scratch without applying them.
1276
- *
1277
- * Does nothing in production mode, you must handle migrations manually.
1174
+ * In testing mode, the schema name will be generated and deleted after the test.
1278
1175
  */
1279
- async synchronize(provider) {
1280
- if (this.alepha.isProduction()) {
1281
- this.log.warn("Synchronization skipped in production mode.");
1282
- return;
1283
- }
1284
- if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
1285
- const now = Date.now();
1286
- if (this.alepha.isTest()) {
1287
- const { statements } = await this.generateMigration(provider);
1288
- await this.executeStatements(statements, provider);
1289
- } else {
1290
- const entry = await this.loadDevMigrations(provider);
1291
- const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
1292
- await this.executeStatements(statements, provider, true);
1293
- await this.saveDevMigrations(provider, snapshot, entry);
1294
- }
1295
- this.log.info(`Db '${provider.name}' synchronization OK [${Date.now() - now}ms]`);
1176
+ schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
1177
+ get url() {
1178
+ if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
1179
+ return this.env.DATABASE_URL;
1296
1180
  }
1297
1181
  /**
1298
- * Mostly used for testing purposes. You can generate SQL migration statements without executing them.
1182
+ * Execute a SQL statement.
1299
1183
  */
1300
- async generateMigration(provider, prevSnapshot) {
1301
- const kit = this.importDrizzleKit();
1302
- const models = this.getModels(provider);
1303
- if (Object.keys(models).length > 0) {
1304
- if (provider.dialect === "sqlite") {
1305
- const prev$1 = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
1306
- const curr$1 = await kit.generateSQLiteDrizzleJson(models);
1307
- return {
1308
- models,
1309
- statements: await kit.generateSQLiteMigration(prev$1, curr$1),
1310
- snapshot: curr$1
1311
- };
1312
- }
1313
- const prev = prevSnapshot ?? await kit.generateDrizzleJson({});
1314
- const curr = await kit.generateDrizzleJson(models);
1315
- return {
1316
- models,
1317
- statements: await kit.generateMigration(prev, curr),
1318
- snapshot: curr
1319
- };
1184
+ execute(statement) {
1185
+ try {
1186
+ return this.db.execute(statement);
1187
+ } catch (error) {
1188
+ throw new DbError("Error executing statement", error);
1320
1189
  }
1321
- return {
1322
- models,
1323
- statements: [],
1324
- snapshot: {}
1325
- };
1326
1190
  }
1327
1191
  /**
1328
- * Load all tables, enums, sequences, etc. from the provider's repositories.
1192
+ * Get Postgres schema used by this provider.
1329
1193
  */
1330
- getModels(provider) {
1331
- const models = {};
1332
- for (const [key, value] of provider.tables.entries()) {
1333
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1334
- models[key] = value;
1194
+ get schema() {
1195
+ if (this.schemaForTesting) return this.schemaForTesting;
1196
+ if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
1197
+ return "public";
1198
+ }
1199
+ /**
1200
+ * Get the Drizzle Postgres database instance.
1201
+ */
1202
+ get db() {
1203
+ if (!this.pg) throw new AlephaError("Database not initialized");
1204
+ return this.pg;
1205
+ }
1206
+ async executeMigrations(migrationsFolder) {
1207
+ await migrate(this.db, { migrationsFolder });
1208
+ }
1209
+ onStart = $hook({
1210
+ on: "start",
1211
+ handler: async () => {
1212
+ await this.connect();
1213
+ if (!this.alepha.isServerless()) try {
1214
+ await this.migrateLock.run();
1215
+ } catch (error) {
1216
+ throw new DbMigrationError(error);
1217
+ }
1335
1218
  }
1336
- for (const [key, value] of provider.enums.entries()) {
1337
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1338
- models[key] = value;
1219
+ });
1220
+ onStop = $hook({
1221
+ on: "stop",
1222
+ handler: async () => {
1223
+ if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
1224
+ if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
1225
+ this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
1226
+ await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
1227
+ this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
1228
+ }
1229
+ await this.close();
1339
1230
  }
1340
- for (const [key, value] of provider.sequences.entries()) {
1341
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1342
- models[key] = value;
1231
+ });
1232
+ async connect() {
1233
+ this.log.debug("Connect ..");
1234
+ const client = postgres(this.getClientOptions());
1235
+ await client`SELECT 1`;
1236
+ this.client = client;
1237
+ this.pg = drizzle$1(client, { logger: { logQuery: (query, params) => {
1238
+ this.log.trace(query, { params });
1239
+ } } });
1240
+ this.log.info("Connection OK");
1241
+ }
1242
+ async close() {
1243
+ if (this.client) {
1244
+ this.log.debug("Close...");
1245
+ await this.client.end();
1246
+ this.client = void 0;
1247
+ this.pg = void 0;
1248
+ this.log.info("Connection closed");
1343
1249
  }
1344
- return models;
1345
1250
  }
1251
+ migrateLock = $lock({ handler: async () => {
1252
+ await this.migrate();
1253
+ } });
1346
1254
  /**
1347
- * Load the migration snapshot from the database.
1255
+ * Map the DATABASE_URL to postgres client options.
1348
1256
  */
1349
- async loadDevMigrations(provider) {
1350
- const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
1351
- if (provider.url.includes(":memory:")) {
1352
- this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
1353
- return;
1257
+ getClientOptions() {
1258
+ const url = new URL(this.url);
1259
+ return {
1260
+ host: url.hostname,
1261
+ user: decodeURIComponent(url.username),
1262
+ database: decodeURIComponent(url.pathname.replace("/", "")),
1263
+ password: decodeURIComponent(url.password),
1264
+ port: Number(url.port || 5432),
1265
+ ssl: this.ssl(url),
1266
+ onnotice: () => {}
1267
+ };
1268
+ }
1269
+ ssl(url) {
1270
+ const mode = url.searchParams.get("sslmode");
1271
+ for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
1272
+ }
1273
+ };
1274
+
1275
+ //#endregion
1276
+ //#region ../../src/orm/providers/drivers/NodeSqliteProvider.ts
1277
+ const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
1278
+ /**
1279
+ * Configuration options for the Node.js SQLite database provider.
1280
+ */
1281
+ const nodeSqliteOptions = $atom({
1282
+ name: "alepha.postgres.node-sqlite.options",
1283
+ schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
1284
+ default: {}
1285
+ });
1286
+ /**
1287
+ * Add a fake support for SQLite in Node.js based on Postgres interfaces.
1288
+ *
1289
+ * This is NOT a real SQLite provider, it's a workaround to use SQLite with Drizzle ORM.
1290
+ * This is NOT recommended for production use.
1291
+ */
1292
+ var NodeSqliteProvider = class extends DatabaseProvider {
1293
+ kit = $inject(DrizzleKitProvider);
1294
+ log = $logger();
1295
+ env = $env(envSchema$1);
1296
+ builder = $inject(SqliteModelBuilder);
1297
+ options = $use(nodeSqliteOptions);
1298
+ sqlite;
1299
+ get name() {
1300
+ return "sqlite";
1301
+ }
1302
+ dialect = "sqlite";
1303
+ get url() {
1304
+ const path = this.options.path ?? this.env.DATABASE_URL;
1305
+ if (path) {
1306
+ if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
1307
+ return path;
1354
1308
  }
1355
- if (provider.dialect === "sqlite") {
1356
- try {
1357
- const text = await readFile(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
1358
- return this.alepha.codec.decode(devMigrationsSchema, text);
1359
- } catch (e) {
1360
- this.log.trace(`No existing migration snapshot for '${name}'`, e);
1361
- }
1362
- return;
1309
+ if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
1310
+ else return "node_modules/.alepha/sqlite.db";
1311
+ }
1312
+ async execute(query) {
1313
+ const { sql: sql$2, params, method } = this.db.all(query).getQuery();
1314
+ this.log.trace(`${sql$2}`, params);
1315
+ const statement = this.sqlite.prepare(sql$2);
1316
+ if (method === "run") {
1317
+ statement.run(...params);
1318
+ return [];
1363
1319
  }
1364
- await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
1365
- await provider.execute(sql$1`
1366
- CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
1367
- "id" SERIAL PRIMARY KEY,
1368
- "name" TEXT NOT NULL,
1369
- "created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
1370
- "snapshot" TEXT NOT NULL
1371
- );
1372
- `);
1373
- const rows = await provider.run(sql$1`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
1374
- if (rows.length === 0) {
1375
- this.log.trace(`No existing migration snapshot for '${name}'`);
1376
- return;
1320
+ if (method === "get") {
1321
+ const data = statement.get(...params);
1322
+ return data ? [{ ...data }] : [];
1377
1323
  }
1378
- return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
1324
+ return statement.all(...params);
1379
1325
  }
1380
- async saveDevMigrations(provider, curr, devMigrations) {
1381
- if (provider.url.includes(":memory:")) {
1382
- this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
1383
- return;
1384
- }
1385
- const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
1386
- if (provider.dialect === "sqlite") {
1387
- const filePath = `node_modules/.alepha/sqlite-${name}.json`;
1388
- await mkdir("node_modules/.alepha", { recursive: true }).catch(() => null);
1389
- await writeFile(filePath, JSON.stringify({
1390
- id: devMigrations?.id ?? 1,
1391
- name,
1392
- created_at: /* @__PURE__ */ new Date(),
1393
- snapshot: JSON.stringify(curr)
1394
- }, null, 2));
1395
- this.log.debug(`Saved migration snapshot to '${filePath}'`);
1396
- return;
1326
+ db = drizzle$2(async (sql$2, params, method) => {
1327
+ const statement = this.sqlite.prepare(sql$2);
1328
+ this.log.trace(`${sql$2}`, { params });
1329
+ if (method === "get") {
1330
+ const data = statement.get(...params);
1331
+ return { rows: data ? [{ ...data }] : [] };
1397
1332
  }
1398
- if (!devMigrations) await provider.execute(sql$1`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
1399
- else {
1400
- const newSnapshot = JSON.stringify(curr);
1401
- if (devMigrations.snapshot !== newSnapshot) await provider.execute(sql$1`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
1333
+ if (method === "run") {
1334
+ statement.run(...params);
1335
+ return { rows: [] };
1402
1336
  }
1403
- }
1404
- async executeStatements(statements, provider, catchErrors = false) {
1405
- let nErrors = 0;
1406
- for (const statement of statements) {
1407
- if (statement.startsWith("DROP SCHEMA")) continue;
1408
- try {
1409
- await provider.execute(sql$1.raw(statement));
1410
- } catch (error) {
1411
- const errorMessage = `Error executing statement: ${statement}`;
1412
- if (catchErrors) {
1413
- nErrors++;
1414
- this.log.warn(errorMessage, { context: [error] });
1415
- } else throw error;
1337
+ if (method === "all") return { rows: statement.all(...params).map((row) => Object.values(row)) };
1338
+ if (method === "values") return { rows: statement.all(...params).map((row) => Object.values(row)) };
1339
+ throw new AlephaError(`Unsupported method: ${method}`);
1340
+ });
1341
+ onStart = $hook({
1342
+ on: "start",
1343
+ handler: async () => {
1344
+ const { DatabaseSync } = await import("node:sqlite");
1345
+ const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
1346
+ if (filepath !== ":memory:" && filepath !== "") {
1347
+ const dirname = filepath.split("/").slice(0, -1).join("/");
1348
+ if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
1416
1349
  }
1350
+ this.sqlite = new DatabaseSync(filepath);
1351
+ await this.migrate();
1352
+ this.log.info(`Using SQLite database at ${filepath}`);
1417
1353
  }
1418
- if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
1419
- }
1420
- async createSchemaIfNotExists(provider, schemaName) {
1421
- if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
1422
- const sqlSchema = sql$1.raw(schemaName);
1423
- if (schemaName.startsWith("test_")) {
1424
- this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
1425
- await provider.execute(sql$1`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
1426
- }
1427
- this.log.debug(`Ensuring schema '${schemaName}' exists`);
1428
- await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
1354
+ });
1355
+ async executeMigrations(migrationsFolder) {
1356
+ await migrate$1(this.db, async (migrationQueries) => {
1357
+ this.log.debug("Executing migration queries", { migrationQueries });
1358
+ for (const query of migrationQueries) this.sqlite.prepare(query).run();
1359
+ }, { migrationsFolder });
1429
1360
  }
1430
- /**
1431
- * Try to load the official Drizzle Kit API.
1432
- * If not available, fallback to the local kit import.
1433
- */
1434
- importDrizzleKit() {
1361
+ };
1362
+
1363
+ //#endregion
1364
+ //#region ../../src/orm/providers/drivers/PglitePostgresProvider.ts
1365
+ const envSchema = t.object({ DATABASE_URL: t.optional(t.text()) });
1366
+ var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
1367
+ static importPglite() {
1435
1368
  try {
1436
- return createRequire(import.meta.url)("drizzle-kit/api");
1437
- } catch (_) {
1438
- throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
1369
+ return createRequire(import.meta.url)("@electric-sql/pglite");
1370
+ } catch {}
1371
+ }
1372
+ env = $env(envSchema);
1373
+ log = $logger();
1374
+ kit = $inject(DrizzleKitProvider);
1375
+ builder = $inject(PostgresModelBuilder);
1376
+ client;
1377
+ pglite;
1378
+ get name() {
1379
+ return "postgres";
1380
+ }
1381
+ get driver() {
1382
+ return "pglite";
1383
+ }
1384
+ dialect = "postgresql";
1385
+ get url() {
1386
+ let path = this.env.DATABASE_URL;
1387
+ if (!path) if (this.alepha.isTest()) path = ":memory:";
1388
+ else path = "node_modules/.alepha/pglite";
1389
+ else if (path.includes(":memory:")) path = ":memory:";
1390
+ else if (path.startsWith("file://")) path = path.replace("file://", "");
1391
+ return path;
1392
+ }
1393
+ get db() {
1394
+ if (!this.pglite) throw new AlephaError("Database not initialized");
1395
+ return this.pglite;
1396
+ }
1397
+ async execute(statement) {
1398
+ const { rows } = await this.db.execute(statement);
1399
+ return rows;
1400
+ }
1401
+ onStart = $hook({
1402
+ on: "start",
1403
+ handler: async () => {
1404
+ if (Object.keys(this.kit.getModels(this)).length === 0) return;
1405
+ const module = PglitePostgresProvider.importPglite();
1406
+ if (!module) throw new AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
1407
+ const { drizzle: drizzle$3 } = createRequire(import.meta.url)("drizzle-orm/pglite");
1408
+ const path = this.url;
1409
+ if (path !== ":memory:") {
1410
+ await mkdir(path, { recursive: true }).catch(() => null);
1411
+ this.client = new module.PGlite(path);
1412
+ } else this.client = new module.PGlite();
1413
+ this.pglite = drizzle$3({ client: this.client });
1414
+ await this.migrate();
1415
+ this.log.info(`Using PGlite database at ${path}`);
1416
+ }
1417
+ });
1418
+ onStop = $hook({
1419
+ on: "stop",
1420
+ handler: async () => {
1421
+ if (this.client) {
1422
+ this.log.debug("Closing PGlite connection...");
1423
+ await this.client.close();
1424
+ this.client = void 0;
1425
+ this.pglite = void 0;
1426
+ this.log.info("PGlite connection closed");
1427
+ }
1439
1428
  }
1429
+ });
1430
+ async executeMigrations(migrationsFolder) {
1431
+ await migrate$2(this.db, { migrationsFolder });
1440
1432
  }
1441
1433
  };
1442
- const devMigrationsSchema = t.object({
1443
- id: t.number(),
1444
- name: t.text(),
1445
- snapshot: t.string(),
1446
- created_at: t.string()
1447
- });
1448
1434
 
1449
1435
  //#endregion
1450
- //#region ../../src/orm/errors/DbMigrationError.ts
1451
- var DbMigrationError = class extends DbError {
1452
- name = "DbMigrationError";
1453
- constructor(cause) {
1454
- super("Failed to migrate database", cause);
1436
+ //#region ../../src/orm/errors/DbConflictError.ts
1437
+ var DbConflictError = class extends DbError {
1438
+ name = "DbConflictError";
1439
+ status = 409;
1440
+ };
1441
+
1442
+ //#endregion
1443
+ //#region ../../src/orm/errors/DbEntityNotFoundError.ts
1444
+ var DbEntityNotFoundError = class extends DbError {
1445
+ name = "DbEntityNotFoundError";
1446
+ status = 404;
1447
+ constructor(entityName) {
1448
+ super(`Entity from '${entityName}' was not found`);
1455
1449
  }
1456
1450
  };
1457
1451
 
1458
1452
  //#endregion
1459
- //#region ../../src/orm/types/byte.ts
1453
+ //#region ../../src/orm/errors/DbVersionMismatchError.ts
1460
1454
  /**
1461
- * Postgres bytea type.
1455
+ * Error thrown when there is a version mismatch.
1456
+ * It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
1457
+ * This is used for optimistic concurrency control.
1462
1458
  */
1463
- const byte = customType({ dataType: () => "bytea" });
1459
+ var DbVersionMismatchError = class extends DbError {
1460
+ name = "DbVersionMismatchError";
1461
+ constructor(table, id) {
1462
+ super(`Version mismatch for table '${table}' and id '${id}'`);
1463
+ }
1464
+ };
1464
1465
 
1465
1466
  //#endregion
1466
- //#region ../../src/orm/services/ModelBuilder.ts
1467
+ //#region ../../src/orm/helpers/pgAttr.ts
1467
1468
  /**
1468
- * Abstract base class for transforming Alepha Primitives (Entity, Sequence, etc...)
1469
- * into drizzle models (tables, enums, sequences, etc...).
1469
+ * Decorates a typebox schema with a Postgres attribute.
1470
+ *
1471
+ * > It's just a fancy way to add Symbols to a field.
1472
+ *
1473
+ * @example
1474
+ * ```ts
1475
+ * import { t } from "alepha";
1476
+ * import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
1477
+ *
1478
+ * export const updatedAtSchema = pgAttr(
1479
+ * t.datetime(), PG_UPDATED_AT,
1480
+ * );
1481
+ * ```
1470
1482
  */
1471
- var ModelBuilder = class {
1483
+ const pgAttr = (type, attr, value) => {
1484
+ Object.assign(type, { [attr]: value ?? {} });
1485
+ return type;
1486
+ };
1487
+ /**
1488
+ * Retrieves the fields of a schema that have a specific attribute.
1489
+ */
1490
+ const getAttrFields = (schema$1, name) => {
1491
+ const fields = [];
1492
+ for (const key of Object.keys(schema$1.properties)) {
1493
+ const value = schema$1.properties[key];
1494
+ if (name in value) fields.push({
1495
+ type: value,
1496
+ key,
1497
+ data: value[name]
1498
+ });
1499
+ }
1500
+ return fields;
1501
+ };
1502
+
1503
+ //#endregion
1504
+ //#region ../../src/orm/services/PgRelationManager.ts
1505
+ var PgRelationManager = class {
1472
1506
  /**
1473
- * Convert camelCase to snake_case for column names.
1507
+ * Recursively build joins for the query builder based on the relations map
1474
1508
  */
1475
- toColumnName(str) {
1476
- return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
1509
+ buildJoins(provider, builder, joins, withRelations, table, parentKey) {
1510
+ for (const [key, join] of Object.entries(withRelations)) {
1511
+ const from = provider.table(join.join);
1512
+ const on = isSQLWrapper$1(join.on) ? join.on : sql$1`${table[join.on[0]]} = ${from[join.on[1].name]}`;
1513
+ if (join.type === "right") builder.rightJoin(from, on);
1514
+ else if (join.type === "inner") builder.innerJoin(from, on);
1515
+ else builder.leftJoin(from, on);
1516
+ joins.push({
1517
+ key,
1518
+ table: getTableName(from),
1519
+ schema: join.join.schema,
1520
+ col: (name) => from[name],
1521
+ parent: parentKey
1522
+ });
1523
+ if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
1524
+ }
1477
1525
  }
1478
1526
  /**
1479
- * Build the table configuration function for any database.
1480
- * This includes indexes, foreign keys, constraints, and custom config.
1481
- *
1482
- * @param entity - The entity primitive
1483
- * @param builders - Database-specific builder functions
1484
- * @param tableResolver - Function to resolve entity references to table columns
1485
- * @param customConfigHandler - Optional handler for custom config
1527
+ * Map a row with its joined relations based on the joins definition
1486
1528
  */
1487
- buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
1488
- if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
1489
- return (self) => {
1490
- const configs = [];
1491
- if (entity.options.indexes) {
1492
- for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
1493
- const columnName = this.toColumnName(indexDef);
1494
- const indexName = `${entity.name}_${columnName}_idx`;
1495
- if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
1496
- } else if (typeof indexDef === "object" && indexDef !== null) {
1497
- if ("column" in indexDef) {
1498
- const columnName = this.toColumnName(indexDef.column);
1499
- const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
1500
- if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
1501
- else configs.push(builders.index(indexName).on(self[indexDef.column]));
1502
- } else if ("columns" in indexDef) {
1503
- const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
1504
- const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
1505
- const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
1506
- if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
1507
- else configs.push(builders.index(indexName).on(...cols));
1529
+ mapRowWithJoins(record, row, schema$1, joins, parentKey) {
1530
+ for (const join of joins) if (join.parent === parentKey) {
1531
+ const joinedData = row[join.table];
1532
+ if (this.isAllNull(joinedData)) record[join.key] = void 0;
1533
+ else {
1534
+ record[join.key] = joinedData;
1535
+ this.mapRowWithJoins(record[join.key], row, schema$1, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
1536
+ }
1537
+ }
1538
+ return record;
1539
+ }
1540
+ /**
1541
+ * Check if all values in an object are null (indicates a left join with no match)
1542
+ */
1543
+ isAllNull(obj) {
1544
+ if (obj === null || obj === void 0) return true;
1545
+ if (typeof obj !== "object") return false;
1546
+ return Object.values(obj).every((val) => val === null);
1547
+ }
1548
+ /**
1549
+ * Build a schema that includes all join properties recursively
1550
+ */
1551
+ buildSchemaWithJoins(baseSchema, joins, parentPath) {
1552
+ const schema$1 = Value.Clone(baseSchema);
1553
+ const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
1554
+ for (const join of joinsAtThisLevel) {
1555
+ const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
1556
+ const childJoins = joins.filter((j) => j.parent === joinPath);
1557
+ let joinSchema = join.schema;
1558
+ if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
1559
+ schema$1.properties[join.key] = t.optional(joinSchema);
1560
+ }
1561
+ return schema$1;
1562
+ }
1563
+ };
1564
+
1565
+ //#endregion
1566
+ //#region ../../src/orm/services/QueryManager.ts
1567
+ var QueryManager = class {
1568
+ alepha = $inject(Alepha);
1569
+ /**
1570
+ * Convert a query object to a SQL query.
1571
+ */
1572
+ toSQL(query, options) {
1573
+ const { schema: schema$1, col, joins } = options;
1574
+ const conditions = [];
1575
+ if (isSQLWrapper(query)) conditions.push(query);
1576
+ else {
1577
+ const keys = Object.keys(query);
1578
+ for (const key of keys) {
1579
+ const operator = query[key];
1580
+ if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
1581
+ const matchingJoins = joins.filter((j) => j.key === key);
1582
+ if (matchingJoins.length > 0) {
1583
+ const join = matchingJoins[0];
1584
+ const joinPath = join.parent ? `${join.parent}.${key}` : key;
1585
+ const recursiveJoins = joins.filter((j) => {
1586
+ if (!j.parent) return false;
1587
+ return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
1588
+ }).map((j) => {
1589
+ const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
1590
+ return {
1591
+ ...j,
1592
+ parent: newParent
1593
+ };
1594
+ });
1595
+ const sql$2 = this.toSQL(query[key], {
1596
+ schema: join.schema,
1597
+ col: join.col,
1598
+ joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
1599
+ dialect: options.dialect
1600
+ });
1601
+ if (sql$2) conditions.push(sql$2);
1602
+ continue;
1508
1603
  }
1509
1604
  }
1510
- }
1511
- if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
1512
- const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
1513
- const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
1514
- if (cols.length === fkDef.columns.length) {
1515
- const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
1516
- const foreignColumns = fkDef.foreignColumns.map((colRef) => {
1517
- const entityCol = colRef();
1518
- if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
1519
- if (tableResolver) {
1520
- const foreignTable = tableResolver(entityCol.entity.name);
1521
- if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
1522
- return foreignTable[entityCol.name];
1523
- }
1524
- return entityCol;
1525
- });
1526
- configs.push(builders.foreignKey({
1527
- name: fkName,
1528
- columns: cols,
1529
- foreignColumns
1530
- }));
1605
+ if (Array.isArray(operator)) {
1606
+ const operations = operator.map((it) => {
1607
+ if (isSQLWrapper(it)) return it;
1608
+ return this.toSQL(it, {
1609
+ schema: schema$1,
1610
+ col,
1611
+ joins,
1612
+ dialect: options.dialect
1613
+ });
1614
+ }).filter((it) => it != null);
1615
+ if (key === "and") return and(...operations);
1616
+ if (key === "or") return or(...operations);
1531
1617
  }
1532
- }
1533
- if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
1534
- const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
1535
- const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
1536
- if (cols.length === constraintDef.columns.length) {
1537
- if (constraintDef.unique) {
1538
- const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
1539
- configs.push(builders.unique(constraintName).on(...cols));
1540
- }
1541
- if (constraintDef.check) {
1542
- const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
1543
- configs.push(builders.check(constraintName, constraintDef.check));
1544
- }
1618
+ if (key === "not") {
1619
+ const where = this.toSQL(operator, {
1620
+ schema: schema$1,
1621
+ col,
1622
+ joins,
1623
+ dialect: options.dialect
1624
+ });
1625
+ if (where) return not(where);
1626
+ }
1627
+ if (operator) {
1628
+ const column = col(key);
1629
+ const sql$2 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
1630
+ if (sql$2) conditions.push(sql$2);
1545
1631
  }
1546
1632
  }
1547
- if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
1548
- else if (entity.options.config) {
1549
- const customConfigs = entity.options.config(self);
1550
- if (Array.isArray(customConfigs)) configs.push(...customConfigs);
1551
- }
1552
- return configs;
1553
- };
1554
- }
1555
- };
1556
-
1557
- //#endregion
1558
- //#region ../../src/orm/services/PostgresModelBuilder.ts
1559
- var PostgresModelBuilder = class extends ModelBuilder {
1560
- schemas = /* @__PURE__ */ new Map();
1561
- getPgSchema(name) {
1562
- if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
1563
- const nsp = name !== "public" ? this.schemas.get(name) : {
1564
- enum: pgEnum,
1565
- table: pgTable
1566
- };
1567
- if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
1568
- return nsp;
1569
- }
1570
- buildTable(entity, options) {
1571
- const tableName = entity.name;
1572
- if (options.tables.has(tableName)) return;
1573
- const nsp = this.getPgSchema(options.schema);
1574
- const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
1575
- const configFn = this.getTableConfig(entity, options.tables);
1576
- const table = nsp.table(tableName, columns, configFn);
1577
- options.tables.set(tableName, table);
1633
+ }
1634
+ if (conditions.length === 1) return conditions[0];
1635
+ return and(...conditions);
1578
1636
  }
1579
- buildSequence(sequence, options) {
1580
- const sequenceName = sequence.name;
1581
- if (options.sequences.has(sequenceName)) return;
1582
- const nsp = this.getPgSchema(options.schema);
1583
- options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
1637
+ /**
1638
+ * Check if an object has any filter operator properties.
1639
+ */
1640
+ hasFilterOperatorProperties(obj) {
1641
+ if (!obj || typeof obj !== "object") return false;
1642
+ return [
1643
+ "eq",
1644
+ "ne",
1645
+ "gt",
1646
+ "gte",
1647
+ "lt",
1648
+ "lte",
1649
+ "inArray",
1650
+ "notInArray",
1651
+ "isNull",
1652
+ "isNotNull",
1653
+ "like",
1654
+ "notLike",
1655
+ "ilike",
1656
+ "notIlike",
1657
+ "contains",
1658
+ "startsWith",
1659
+ "endsWith",
1660
+ "between",
1661
+ "notBetween",
1662
+ "arrayContains",
1663
+ "arrayContained",
1664
+ "arrayOverlaps"
1665
+ ].some((key) => key in obj);
1584
1666
  }
1585
1667
  /**
1586
- * Get PostgreSQL-specific config builder for the table.
1668
+ * Map a filter operator to a SQL query.
1587
1669
  */
1588
- getTableConfig(entity, tables) {
1589
- const pgBuilders = {
1590
- index,
1591
- uniqueIndex,
1592
- unique,
1593
- check,
1594
- foreignKey
1670
+ mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
1671
+ const encodeValue = (value) => {
1672
+ if (value == null) return value;
1673
+ if (columnSchema && columnName) try {
1674
+ const fieldSchema = columnSchema.properties[columnName];
1675
+ if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
1676
+ } catch (error) {}
1677
+ return value;
1595
1678
  };
1596
- const tableResolver = (entityName) => {
1597
- return tables.get(entityName);
1679
+ const encodeArray = (values) => {
1680
+ return values.map((v) => encodeValue(v));
1598
1681
  };
1599
- return this.buildTableConfig(entity, pgBuilders, tableResolver);
1600
- }
1601
- schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
1602
- return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
1603
- let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
1604
- if ("default" in value && value.default != null) col = col.default(value.default);
1605
- if (PG_PRIMARY_KEY in value) col = col.primaryKey();
1606
- if (PG_REF in value) {
1607
- const config = value[PG_REF];
1608
- col = col.references(() => {
1609
- const ref = config.ref();
1610
- const table = tables.get(ref.entity.name);
1611
- if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
1612
- const target = table[ref.name];
1613
- if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
1614
- return target;
1615
- }, config.actions);
1616
- }
1617
- if (schema$1.required?.includes(key)) col = col.notNull();
1618
- return {
1619
- ...columns,
1620
- [key]: col
1621
- };
1622
- }, {});
1623
- };
1624
- mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
1625
- const key = this.toColumnName(fieldName);
1626
- if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
1627
- if (t.schema.isInteger(value)) {
1628
- if (PG_SERIAL in value) return pg$2.serial(key);
1629
- if (PG_IDENTITY in value) {
1630
- const options = value[PG_IDENTITY];
1631
- if (options.mode === "byDefault") return pg$2.integer().generatedByDefaultAsIdentity(options);
1632
- return pg$2.integer().generatedAlwaysAsIdentity(options);
1633
- }
1634
- return pg$2.integer(key);
1682
+ if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return eq(column, encodeValue(operator));
1683
+ const conditions = [];
1684
+ if (operator?.eq != null) conditions.push(eq(column, encodeValue(operator.eq)));
1685
+ if (operator?.ne != null) conditions.push(ne(column, encodeValue(operator.ne)));
1686
+ if (operator?.gt != null) conditions.push(gt(column, encodeValue(operator.gt)));
1687
+ if (operator?.gte != null) conditions.push(gte(column, encodeValue(operator.gte)));
1688
+ if (operator?.lt != null) conditions.push(lt(column, encodeValue(operator.lt)));
1689
+ if (operator?.lte != null) conditions.push(lte(column, encodeValue(operator.lte)));
1690
+ if (operator?.inArray != null) {
1691
+ if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new AlephaError("inArray operator requires at least one value");
1692
+ conditions.push(inArray(column, encodeArray(operator.inArray)));
1635
1693
  }
1636
- if (t.schema.isBigInt(value)) {
1637
- if (PG_IDENTITY in value) {
1638
- const options = value[PG_IDENTITY];
1639
- if (options.mode === "byDefault") return pg$2.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
1640
- return pg$2.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
1641
- }
1694
+ if (operator?.notInArray != null) {
1695
+ if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new AlephaError("notInArray operator requires at least one value");
1696
+ conditions.push(notInArray(column, encodeArray(operator.notInArray)));
1642
1697
  }
1643
- if (t.schema.isNumber(value)) {
1644
- if (PG_IDENTITY in value) {
1645
- const options = value[PG_IDENTITY];
1646
- if (options.mode === "byDefault") return pg$2.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
1647
- return pg$2.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
1648
- }
1649
- if (value.format === "int64") return pg$2.bigint(key, { mode: "number" });
1650
- return pg$2.numeric(key);
1698
+ if (operator?.isNull != null) conditions.push(isNull(column));
1699
+ if (operator?.isNotNull != null) conditions.push(isNotNull(column));
1700
+ if (operator?.like != null) conditions.push(like(column, encodeValue(operator.like)));
1701
+ if (operator?.notLike != null) conditions.push(notLike(column, encodeValue(operator.notLike)));
1702
+ if (operator?.ilike != null) conditions.push(ilike(column, encodeValue(operator.ilike)));
1703
+ if (operator?.notIlike != null) conditions.push(notIlike(column, encodeValue(operator.notIlike)));
1704
+ if (operator?.contains != null) {
1705
+ const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1706
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
1707
+ else conditions.push(ilike(column, encodeValue(`%${escapedValue}%`)));
1651
1708
  }
1652
- if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
1653
- if (t.schema.isBoolean(value)) return pg$2.boolean(key);
1654
- if (t.schema.isObject(value)) return schema(key, value);
1655
- if (t.schema.isRecord(value)) return schema(key, value);
1656
- const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
1657
- if (t.schema.isArray(value)) {
1658
- if (t.schema.isObject(value.items)) return schema(key, value);
1659
- if (t.schema.isRecord(value.items)) return schema(key, value);
1660
- if (t.schema.isString(value.items)) return pg$2.text(key).array();
1661
- if (t.schema.isInteger(value.items)) return pg$2.integer(key).array();
1662
- if (t.schema.isNumber(value.items)) return pg$2.numeric(key).array();
1663
- if (t.schema.isBoolean(value.items)) return pg$2.boolean(key).array();
1664
- if (isTypeEnum(value.items)) return pg$2.text(key).array();
1709
+ if (operator?.startsWith != null) {
1710
+ const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1711
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
1712
+ else conditions.push(ilike(column, encodeValue(`${escapedValue}%`)));
1665
1713
  }
1666
- if (isTypeEnum(value)) {
1667
- if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
1668
- if (PG_ENUM in value && value[PG_ENUM]) {
1669
- const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
1670
- if (enums.has(enumName)) {
1671
- const values = enums.get(enumName).enumValues.join(",");
1672
- const newValues = value.enum.join(",");
1673
- if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
1674
- }
1675
- enums.set(enumName, nsp.enum(enumName, value.enum));
1676
- return enums.get(enumName)(key);
1677
- }
1678
- return this.mapStringToColumn(key, value);
1714
+ if (operator?.endsWith != null) {
1715
+ const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1716
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
1717
+ else conditions.push(ilike(column, encodeValue(`%${escapedValue}`)));
1679
1718
  }
1680
- throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
1681
- };
1719
+ if (operator?.between != null) {
1720
+ if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
1721
+ conditions.push(between(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
1722
+ }
1723
+ if (operator?.notBetween != null) {
1724
+ if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
1725
+ conditions.push(notBetween(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
1726
+ }
1727
+ if (operator?.arrayContains != null) conditions.push(arrayContains(column, encodeValue(operator.arrayContains)));
1728
+ if (operator?.arrayContained != null) conditions.push(arrayContained(column, encodeValue(operator.arrayContained)));
1729
+ if (operator?.arrayOverlaps != null) conditions.push(arrayOverlaps(column, encodeValue(operator.arrayOverlaps)));
1730
+ if (conditions.length === 0) return;
1731
+ if (conditions.length === 1) return conditions[0];
1732
+ return and(...conditions);
1733
+ }
1734
+ /**
1735
+ * Parse pagination sort string to orderBy format.
1736
+ * Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
1737
+ * - Columns separated by comma
1738
+ * - Prefix with '-' for DESC direction
1739
+ *
1740
+ * @param sort Pagination sort string
1741
+ * @returns OrderBy array or single object
1742
+ */
1743
+ parsePaginationSort(sort) {
1744
+ const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
1745
+ if (field.startsWith("-")) return {
1746
+ column: field.substring(1),
1747
+ direction: "desc"
1748
+ };
1749
+ return {
1750
+ column: field,
1751
+ direction: "asc"
1752
+ };
1753
+ });
1754
+ return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
1755
+ }
1756
+ /**
1757
+ * Normalize orderBy parameter to array format.
1758
+ * Supports 3 modes:
1759
+ * 1. String: "name" -> [{ column: "name", direction: "asc" }]
1760
+ * 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
1761
+ * 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
1762
+ *
1763
+ * @param orderBy The orderBy parameter
1764
+ * @returns Normalized array of order by clauses
1765
+ */
1766
+ normalizeOrderBy(orderBy) {
1767
+ if (typeof orderBy === "string") return [{
1768
+ column: orderBy,
1769
+ direction: "asc"
1770
+ }];
1771
+ if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
1772
+ column: orderBy.column,
1773
+ direction: orderBy.direction ?? "asc"
1774
+ }];
1775
+ if (Array.isArray(orderBy)) return orderBy.map((item) => ({
1776
+ column: item.column,
1777
+ direction: item.direction ?? "asc"
1778
+ }));
1779
+ return [];
1780
+ }
1682
1781
  /**
1683
- * Map a string to a PG column.
1782
+ * Create a pagination object.
1684
1783
  *
1685
- * @param key The key of the field.
1686
- * @param value The value of the field.
1784
+ * @deprecated Use `createPagination` from alepha instead.
1785
+ * This method now delegates to the framework-level helper.
1786
+ *
1787
+ * @param entities The entities to paginate.
1788
+ * @param limit The limit of the pagination.
1789
+ * @param offset The offset of the pagination.
1790
+ * @param sort Optional sort metadata to include in response.
1687
1791
  */
1688
- mapStringToColumn = (key, value) => {
1689
- if ("format" in value) {
1690
- if (value.format === "uuid") {
1691
- if (PG_PRIMARY_KEY in value) return pg$2.uuid(key).defaultRandom();
1692
- return pg$2.uuid(key);
1693
- }
1694
- if (value.format === "byte") return byte(key);
1695
- if (value.format === "date-time") {
1696
- if (PG_CREATED_AT in value) return pg$2.timestamp(key, {
1697
- mode: "string",
1698
- withTimezone: true
1699
- }).defaultNow();
1700
- if (PG_UPDATED_AT in value) return pg$2.timestamp(key, {
1701
- mode: "string",
1702
- withTimezone: true
1703
- }).defaultNow();
1704
- return pg$2.timestamp(key, {
1705
- mode: "string",
1706
- withTimezone: true
1707
- });
1708
- }
1709
- if (value.format === "date") return pg$2.date(key, { mode: "string" });
1710
- }
1711
- return pg$2.text(key);
1712
- };
1792
+ createPagination(entities, limit = 10, offset = 0, sort) {
1793
+ return createPagination(entities, limit, offset, sort);
1794
+ }
1713
1795
  };
1714
1796
 
1715
1797
  //#endregion
1716
- //#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
1717
- const envSchema$4 = t.object({
1718
- DATABASE_URL: t.optional(t.text()),
1719
- POSTGRES_SCHEMA: t.optional(t.text())
1720
- });
1721
- /**
1722
- * Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
1723
- *
1724
- * This provider uses Bun's built-in SQL class for PostgreSQL connections,
1725
- * which provides excellent performance on the Bun runtime.
1726
- *
1727
- * @example
1728
- * ```ts
1729
- * // Set DATABASE_URL environment variable
1730
- * // DATABASE_URL=postgres://user:password@localhost:5432/database
1731
- *
1732
- * // Or configure programmatically
1733
- * alepha.with({
1734
- * provide: DatabaseProvider,
1735
- * use: BunPostgresProvider,
1736
- * });
1737
- * ```
1738
- */
1739
- var BunPostgresProvider = class extends DatabaseProvider {
1798
+ //#region ../../src/orm/services/Repository.ts
1799
+ var Repository = class {
1800
+ entity;
1801
+ provider;
1740
1802
  log = $logger();
1741
- env = $env(envSchema$4);
1742
- kit = $inject(DrizzleKitProvider);
1743
- builder = $inject(PostgresModelBuilder);
1744
- client;
1745
- bunDb;
1746
- dialect = "postgresql";
1747
- get name() {
1748
- return "bun-postgres";
1803
+ relationManager = $inject(PgRelationManager);
1804
+ queryManager = $inject(QueryManager);
1805
+ dateTimeProvider = $inject(DateTimeProvider);
1806
+ alepha = $inject(Alepha);
1807
+ constructor(entity, provider = DatabaseProvider) {
1808
+ this.entity = entity;
1809
+ this.provider = this.alepha.inject(provider);
1810
+ this.provider.registerEntity(entity);
1749
1811
  }
1750
1812
  /**
1751
- * In testing mode, the schema name will be generated and deleted after the test.
1813
+ * Represents the primary key of the table.
1814
+ * - Key is the name of the primary key column.
1815
+ * - Type is the type (TypeBox) of the primary key column.
1816
+ *
1817
+ * ID is mandatory. If the table does not have a primary key, it will throw an error.
1752
1818
  */
1753
- schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
1754
- get url() {
1755
- if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
1756
- return this.env.DATABASE_URL;
1819
+ get id() {
1820
+ return this.getPrimaryKey(this.entity.schema);
1757
1821
  }
1758
1822
  /**
1759
- * Execute a SQL statement.
1823
+ * Get Drizzle table object.
1760
1824
  */
1761
- execute(statement) {
1762
- try {
1763
- return this.db.execute(statement);
1764
- } catch (error) {
1765
- throw new DbError("Error executing statement", error);
1766
- }
1825
+ get table() {
1826
+ return this.provider.table(this.entity);
1767
1827
  }
1768
1828
  /**
1769
- * Get Postgres schema used by this provider.
1829
+ * Get SQL table name. (from Drizzle table object)
1770
1830
  */
1771
- get schema() {
1772
- if (this.schemaForTesting) return this.schemaForTesting;
1773
- if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
1774
- return "public";
1831
+ get tableName() {
1832
+ return this.entity.name;
1775
1833
  }
1776
1834
  /**
1777
- * Get the Drizzle Postgres database instance.
1835
+ * Getter for the database connection from the database provider.
1778
1836
  */
1779
1837
  get db() {
1780
- if (!this.bunDb) throw new AlephaError("Database not initialized");
1781
- return this.bunDb;
1838
+ return this.provider.db;
1782
1839
  }
1783
- async executeMigrations(migrationsFolder) {
1784
- const { migrate: migrate$3 } = await import("drizzle-orm/bun-sql/migrator");
1785
- await migrate$3(this.bunDb, { migrationsFolder });
1840
+ /**
1841
+ * Execute a SQL query.
1842
+ *
1843
+ * This method allows executing raw SQL queries against the database.
1844
+ * This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
1845
+ *
1846
+ * You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
1847
+ *
1848
+ * @example
1849
+ * ```ts
1850
+ * class App {
1851
+ * repository = $repository({ ... });
1852
+ * async getAdults() {
1853
+ * const users = repository.table; // Drizzle table object
1854
+ * await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
1855
+ * // or better
1856
+ * await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
1857
+ * }
1858
+ * }
1859
+ * ```
1860
+ */
1861
+ async query(query, schema$1) {
1862
+ const raw = typeof query === "function" ? query(this.table, this.db) : query;
1863
+ if (typeof raw === "string" && raw.includes("[object Object]")) throw new AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
1864
+ return (await this.provider.execute(raw)).map((it) => {
1865
+ return this.clean(this.mapRawFieldsToEntity(it), schema$1 ?? this.entity.schema);
1866
+ });
1786
1867
  }
1787
- onStart = $hook({
1788
- on: "start",
1789
- handler: async () => {
1790
- await this.connect();
1791
- if (!this.alepha.isServerless()) try {
1792
- await this.migrateLock.run();
1793
- } catch (error) {
1794
- throw new DbMigrationError(error);
1795
- }
1796
- }
1797
- });
1798
- onStop = $hook({
1799
- on: "stop",
1800
- handler: async () => {
1801
- if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
1802
- if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
1803
- this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
1804
- await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
1805
- this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
1868
+ /**
1869
+ * Map raw database fields to entity fields. (handles column name differences)
1870
+ */
1871
+ mapRawFieldsToEntity(row) {
1872
+ const entity = {};
1873
+ for (const key of Object.keys(row)) {
1874
+ entity[key] = row[key];
1875
+ for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
1876
+ entity[colKey] = row[key];
1877
+ break;
1806
1878
  }
1807
- await this.close();
1808
1879
  }
1809
- });
1810
- async connect() {
1811
- this.log.debug("Connect ..");
1812
- if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
1813
- const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sql");
1814
- const { SQL: SQL$1 } = await import("bun");
1815
- this.client = new SQL$1(this.url);
1816
- await this.client.unsafe("SELECT 1");
1817
- this.bunDb = drizzle$3({
1818
- client: this.client,
1819
- logger: { logQuery: (query, params) => {
1820
- this.log.trace(query, { params });
1821
- } }
1822
- });
1823
- this.log.info("Connection OK");
1880
+ return entity;
1824
1881
  }
1825
- async close() {
1826
- if (this.client) {
1827
- this.log.debug("Close...");
1828
- await this.client.close();
1829
- this.client = void 0;
1830
- this.bunDb = void 0;
1831
- this.log.info("Connection closed");
1882
+ /**
1883
+ * Get a Drizzle column from the table by his name.
1884
+ */
1885
+ col(name) {
1886
+ const column = this.table[name];
1887
+ if (!column) throw new AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
1888
+ return column;
1889
+ }
1890
+ /**
1891
+ * Run a transaction.
1892
+ */
1893
+ async transaction(transaction, config) {
1894
+ if (this.provider.driver === "pglite") {
1895
+ this.log.warn("Transactions are not supported with pglite driver");
1896
+ return await transaction(null);
1832
1897
  }
1898
+ this.log.debug(`Starting transaction on table ${this.tableName}`);
1899
+ return await this.db.transaction(transaction, config);
1833
1900
  }
1834
- migrateLock = $lock({ handler: async () => {
1835
- await this.migrate();
1836
- } });
1837
- };
1838
-
1839
- //#endregion
1840
- //#region ../../src/orm/services/SqliteModelBuilder.ts
1841
- var SqliteModelBuilder = class extends ModelBuilder {
1842
- buildTable(entity, options) {
1843
- const tableName = entity.name;
1844
- if (options.tables.has(tableName)) return;
1845
- const table = sqliteTable(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
1846
- options.tables.set(tableName, table);
1901
+ /**
1902
+ * Start a SELECT query on the table.
1903
+ */
1904
+ rawSelect(opts = {}) {
1905
+ return (opts.tx ?? this.db).select().from(this.table);
1906
+ }
1907
+ /**
1908
+ * Start a SELECT DISTINCT query on the table.
1909
+ */
1910
+ rawSelectDistinct(opts = {}, columns = []) {
1911
+ const db$1 = opts.tx ?? this.db;
1912
+ const table = this.table;
1913
+ const fields = {};
1914
+ for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
1915
+ return db$1.selectDistinct(fields).from(table);
1916
+ }
1917
+ /**
1918
+ * Start an INSERT query on the table.
1919
+ */
1920
+ rawInsert(opts = {}) {
1921
+ return (opts.tx ?? this.db).insert(this.table);
1922
+ }
1923
+ /**
1924
+ * Start an UPDATE query on the table.
1925
+ */
1926
+ rawUpdate(opts = {}) {
1927
+ return (opts.tx ?? this.db).update(this.table);
1847
1928
  }
1848
- buildSequence(sequence, options) {
1849
- throw new AlephaError("SQLite does not support sequences");
1929
+ /**
1930
+ * Start a DELETE query on the table.
1931
+ */
1932
+ rawDelete(opts = {}) {
1933
+ return (opts.tx ?? this.db).delete(this.table);
1850
1934
  }
1851
1935
  /**
1852
- * Get SQLite-specific config builder for the table.
1936
+ * Create a Drizzle `select` query based on a JSON query object.
1937
+ *
1938
+ * > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
1853
1939
  */
1854
- getTableConfig(entity, tables) {
1855
- const sqliteBuilders = {
1856
- index: index$1,
1857
- uniqueIndex: uniqueIndex$1,
1858
- unique: unique$1,
1859
- check: check$1,
1860
- foreignKey: foreignKey$1
1861
- };
1862
- const tableResolver = (entityName) => {
1863
- return tables.get(entityName);
1864
- };
1865
- return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
1866
- const customConfigs = config(self);
1867
- return Array.isArray(customConfigs) ? customConfigs : [];
1940
+ async findMany(query = {}, opts = {}) {
1941
+ await this.alepha.events.emit("repository:read:before", {
1942
+ tableName: this.tableName,
1943
+ query
1868
1944
  });
1869
- }
1870
- schemaToSqliteColumns = (tableName, schema$1, enums, tables) => {
1871
- return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
1872
- let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
1873
- if ("default" in value && value.default != null) col = col.default(value.default);
1874
- if (PG_PRIMARY_KEY in value) col = col.primaryKey();
1875
- if (PG_REF in value) {
1876
- const config = value[PG_REF];
1877
- col = col.references(() => {
1878
- const ref = config.ref();
1879
- const table = tables.get(ref.entity.name);
1880
- if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
1881
- const target = table[ref.name];
1882
- if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
1883
- return target;
1884
- }, config.actions);
1885
- }
1886
- if (schema$1.required?.includes(key)) col = col.notNull();
1887
- return {
1888
- ...columns,
1889
- [key]: col
1890
- };
1891
- }, {});
1892
- };
1893
- mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
1894
- const key = this.toColumnName(fieldName);
1895
- if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
1896
- if (t.schema.isInteger(value)) {
1897
- if (PG_SERIAL in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1898
- return pg$1.integer(key);
1899
- }
1900
- if (t.schema.isBigInt(value)) {
1901
- if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1902
- return pg$1.integer(key, { mode: "number" });
1903
- }
1904
- if (t.schema.isNumber(value)) {
1905
- if (PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1906
- return pg$1.numeric(key);
1907
- }
1908
- if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
1909
- if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
1910
- if (t.schema.isObject(value)) return this.sqliteJson(key, value);
1911
- if (t.schema.isRecord(value)) return this.sqliteJson(key, value);
1912
- if (t.schema.isAny(value)) return this.sqliteJson(key, value);
1913
- if (t.schema.isArray(value)) {
1914
- if (t.schema.isObject(value.items)) return this.sqliteJson(key, value);
1915
- if (t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
1916
- if (t.schema.isAny(value.items)) return this.sqliteJson(key, value);
1917
- if (t.schema.isString(value.items)) return this.sqliteJson(key, value);
1918
- if (t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
1919
- if (t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
1920
- if (t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
1921
- }
1922
- if (t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
1923
- throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
1924
- };
1925
- mapStringToSqliteColumn = (key, value) => {
1926
- if (value.format === "uuid") {
1927
- if (PG_PRIMARY_KEY in value) return pg$1.text(key).primaryKey().$defaultFn(() => randomUUID());
1928
- return pg$1.text(key);
1929
- }
1930
- if (value.format === "byte") return this.sqliteJson(key, value);
1931
- if (value.format === "date-time") {
1932
- if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
1933
- if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
1934
- return this.sqliteDateTime(key, {});
1945
+ const columns = query.columns ?? query.distinct;
1946
+ const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
1947
+ const joins = [];
1948
+ if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
1949
+ const where = this.withDeletedAt(query.where ?? {}, opts);
1950
+ builder.where(() => this.toSQL(where, joins));
1951
+ if (query.offset) {
1952
+ builder.offset(query.offset);
1953
+ if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
1935
1954
  }
1936
- if (value.format === "date") return this.sqliteDate(key, {});
1937
- return pg$1.text(key);
1938
- };
1939
- sqliteJson = (name, document) => pg$1.customType({
1940
- dataType: () => "text",
1941
- toDriver: (value) => JSON.stringify(value),
1942
- fromDriver: (value) => {
1943
- return value && typeof value === "string" ? JSON.parse(value) : value;
1955
+ if (query.limit) builder.limit(query.limit);
1956
+ if (query.orderBy) {
1957
+ const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
1958
+ builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? desc(this.col(clause.column)) : asc(this.col(clause.column))));
1944
1959
  }
1945
- })(name, { document }).$type();
1946
- sqliteDateTime = pg$1.customType({
1947
- dataType: () => "integer",
1948
- toDriver: (value) => new Date(value).getTime(),
1949
- fromDriver: (value) => {
1950
- return new Date(value).toISOString();
1960
+ if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
1961
+ if (opts.for) {
1962
+ if (typeof opts.for === "string") builder.for(opts.for);
1963
+ else if (opts.for) builder.for(opts.for.strength, opts.for.config);
1951
1964
  }
1952
- });
1953
- sqliteBool = pg$1.customType({
1954
- dataType: () => "integer",
1955
- toDriver: (value) => value ? 1 : 0,
1956
- fromDriver: (value) => value === 1
1957
- });
1958
- sqliteDate = pg$1.customType({
1959
- dataType: () => "integer",
1960
- toDriver: (value) => new Date(value).getTime(),
1961
- fromDriver: (value) => {
1962
- return new Date(value).toISOString().split("T")[0];
1965
+ try {
1966
+ let rows = await builder.execute();
1967
+ let schema$1 = this.entity.schema;
1968
+ if (columns) schema$1 = t.pick(schema$1, columns);
1969
+ if (joins.length) rows = rows.map((row) => {
1970
+ const rowSchema = {
1971
+ ...schema$1,
1972
+ properties: { ...schema$1.properties }
1973
+ };
1974
+ return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
1975
+ });
1976
+ rows = rows.map((row) => {
1977
+ if (joins.length) {
1978
+ const joinedSchema = this.relationManager.buildSchemaWithJoins(schema$1, joins);
1979
+ return this.cleanWithJoins(row, joinedSchema, joins);
1980
+ }
1981
+ return this.clean(row, schema$1);
1982
+ });
1983
+ await this.alepha.events.emit("repository:read:after", {
1984
+ tableName: this.tableName,
1985
+ query,
1986
+ entities: rows
1987
+ });
1988
+ return rows;
1989
+ } catch (error) {
1990
+ throw new DbError("Query select has failed", error);
1963
1991
  }
1964
- });
1965
- };
1966
-
1967
- //#endregion
1968
- //#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
1969
- const envSchema$3 = t.object({ DATABASE_URL: t.optional(t.text()) });
1970
- /**
1971
- * Configuration options for the Bun SQLite database provider.
1972
- */
1973
- const bunSqliteOptions = $atom({
1974
- name: "alepha.postgres.bun-sqlite.options",
1975
- schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
1976
- default: {}
1977
- });
1978
- /**
1979
- * Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
1980
- *
1981
- * This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
1982
- * which provides excellent performance on the Bun runtime.
1983
- *
1984
- * @example
1985
- * ```ts
1986
- * // Set DATABASE_URL environment variable
1987
- * // DATABASE_URL=sqlite://./my-database.db
1988
- *
1989
- * // Or configure programmatically
1990
- * alepha.with({
1991
- * provide: DatabaseProvider,
1992
- * use: BunSqliteProvider,
1993
- * });
1994
- *
1995
- * // Or use options atom
1996
- * alepha.store.mut(bunSqliteOptions, (old) => ({
1997
- * ...old,
1998
- * path: ":memory:",
1999
- * }));
2000
- * ```
2001
- */
2002
- var BunSqliteProvider = class extends DatabaseProvider {
2003
- kit = $inject(DrizzleKitProvider);
2004
- log = $logger();
2005
- env = $env(envSchema$3);
2006
- builder = $inject(SqliteModelBuilder);
2007
- options = $use(bunSqliteOptions);
2008
- sqlite;
2009
- bunDb;
2010
- get name() {
2011
- return "bun-sqlite";
2012
1992
  }
2013
- dialect = "sqlite";
2014
- get url() {
2015
- const path = this.options.path ?? this.env.DATABASE_URL;
2016
- if (path) {
2017
- if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
2018
- return path;
1993
+ /**
1994
+ * Find a single entity.
1995
+ */
1996
+ async findOne(query, opts = {}) {
1997
+ const [entity] = await this.findMany({
1998
+ limit: 1,
1999
+ ...query
2000
+ }, opts);
2001
+ if (!entity) throw new DbEntityNotFoundError(this.tableName);
2002
+ return entity;
2003
+ }
2004
+ /**
2005
+ * Find entities with pagination.
2006
+ *
2007
+ * It uses the same parameters as `find()`, but adds pagination metadata to the response.
2008
+ *
2009
+ * > Pagination CAN also do a count query to get the total number of elements.
2010
+ */
2011
+ async paginate(pagination = {}, query = {}, opts = {}) {
2012
+ const limit = query.limit ?? pagination.size ?? 10;
2013
+ const page = pagination.page ?? 0;
2014
+ const offset = query.offset ?? page * limit;
2015
+ let orderBy = query.orderBy;
2016
+ if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
2017
+ const now = Date.now();
2018
+ const timers = {
2019
+ query: now,
2020
+ count: now
2021
+ };
2022
+ const tasks = [];
2023
+ tasks.push(this.findMany({
2024
+ offset,
2025
+ limit: limit + 1,
2026
+ orderBy,
2027
+ ...query
2028
+ }, opts).then((it) => {
2029
+ timers.query = Date.now() - timers.query;
2030
+ return it;
2031
+ }));
2032
+ if (opts.count) {
2033
+ const where = isSQLWrapper(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
2034
+ tasks.push(this.db.$count(this.table, where).then((it) => {
2035
+ timers.count = Date.now() - timers.count;
2036
+ return it;
2037
+ }));
2019
2038
  }
2020
- if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
2021
- else return "node_modules/.alepha/bun-sqlite.db";
2039
+ const [entities, countResult] = await Promise.all(tasks);
2040
+ let sortMetadata;
2041
+ if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
2042
+ const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
2043
+ response.page.totalElements = countResult;
2044
+ if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
2045
+ return response;
2022
2046
  }
2023
- get db() {
2024
- if (!this.bunDb) throw new AlephaError("Database not initialized");
2025
- return this.bunDb;
2047
+ /**
2048
+ * Find an entity by ID.
2049
+ *
2050
+ * This is a convenience method for `findOne` with a where clause on the primary key.
2051
+ * If you need more complex queries, use `findOne` instead.
2052
+ */
2053
+ async findById(id, opts = {}) {
2054
+ return await this.findOne({ where: this.getWhereId(id) }, opts);
2026
2055
  }
2027
- async execute(query) {
2028
- return this.bunDb.all(query);
2056
+ /**
2057
+ * Helper to create a type-safe query object.
2058
+ */
2059
+ createQuery() {
2060
+ return {};
2029
2061
  }
2030
- onStart = $hook({
2031
- on: "start",
2032
- handler: async () => {
2033
- if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
2034
- const { Database } = await import("bun:sqlite");
2035
- const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sqlite");
2036
- const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
2037
- if (filepath !== ":memory:" && filepath !== "") {
2038
- const dirname = filepath.split("/").slice(0, -1).join("/");
2039
- if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
2040
- }
2041
- this.sqlite = new Database(filepath);
2042
- this.bunDb = drizzle$3({
2043
- client: this.sqlite,
2044
- logger: { logQuery: (query, params) => {
2045
- this.log.trace(query, { params });
2046
- } }
2062
+ /**
2063
+ * Helper to create a type-safe where clause.
2064
+ */
2065
+ createQueryWhere() {
2066
+ return {};
2067
+ }
2068
+ /**
2069
+ * Create an entity.
2070
+ *
2071
+ * @param data The entity to create.
2072
+ * @param opts The options for creating the entity.
2073
+ * @returns The ID of the created entity.
2074
+ */
2075
+ async create(data, opts = {}) {
2076
+ await this.alepha.events.emit("repository:create:before", {
2077
+ tableName: this.tableName,
2078
+ data
2079
+ });
2080
+ try {
2081
+ const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
2082
+ await this.alepha.events.emit("repository:create:after", {
2083
+ tableName: this.tableName,
2084
+ data,
2085
+ entity
2047
2086
  });
2048
- await this.migrate();
2049
- this.log.info(`Using Bun SQLite database at ${filepath}`);
2087
+ return entity;
2088
+ } catch (error) {
2089
+ throw this.handleError(error, "Insert query has failed");
2050
2090
  }
2051
- });
2052
- onStop = $hook({
2053
- on: "stop",
2054
- handler: async () => {
2055
- if (this.sqlite) {
2056
- this.log.debug("Closing Bun SQLite connection...");
2057
- this.sqlite.close();
2058
- this.sqlite = void 0;
2059
- this.bunDb = void 0;
2060
- this.log.info("Bun SQLite connection closed");
2091
+ }
2092
+ /**
2093
+ * Create many entities.
2094
+ *
2095
+ * Inserts are batched in chunks of 1000 to avoid hitting database limits.
2096
+ *
2097
+ * @param values The entities to create.
2098
+ * @param opts The statement options.
2099
+ * @returns The created entities.
2100
+ */
2101
+ async createMany(values, opts = {}) {
2102
+ if (values.length === 0) return [];
2103
+ await this.alepha.events.emit("repository:create:before", {
2104
+ tableName: this.tableName,
2105
+ data: values
2106
+ });
2107
+ const batchSize = opts.batchSize ?? 1e3;
2108
+ const allEntities = [];
2109
+ try {
2110
+ for (let i = 0; i < values.length; i += batchSize) {
2111
+ const batch = values.slice(i, i + batchSize);
2112
+ const entities = await this.rawInsert(opts).values(batch.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
2113
+ allEntities.push(...entities);
2061
2114
  }
2115
+ await this.alepha.events.emit("repository:create:after", {
2116
+ tableName: this.tableName,
2117
+ data: values,
2118
+ entity: allEntities
2119
+ });
2120
+ return allEntities;
2121
+ } catch (error) {
2122
+ throw this.handleError(error, "Insert query has failed");
2062
2123
  }
2063
- });
2064
- async executeMigrations(migrationsFolder) {
2065
- const { migrate: migrate$3 } = await import("drizzle-orm/bun-sqlite/migrator");
2066
- await migrate$3(this.bunDb, { migrationsFolder });
2067
2124
  }
2068
- };
2069
-
2070
- //#endregion
2071
- //#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
2072
- /**
2073
- * Cloudflare D1 SQLite provider using Drizzle ORM.
2074
- *
2075
- * This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
2076
- * The binding is typically obtained from the Cloudflare Workers environment.
2077
- *
2078
- * @example
2079
- * ```ts
2080
- * // In your Cloudflare Worker
2081
- * alepha.set(cloudflareD1Options, { binding: env.DB });
2082
- * ```
2083
- */
2084
- var CloudflareD1Provider = class extends DatabaseProvider {
2085
- kit = $inject(DrizzleKitProvider);
2086
- log = $logger();
2087
- builder = $inject(SqliteModelBuilder);
2088
- env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
2089
- d1;
2090
- drizzleDb;
2091
- get name() {
2092
- return "d1";
2093
- }
2094
- dialect = "sqlite";
2095
- get url() {
2096
- return this.env.DATABASE_URL;
2097
- }
2098
- get db() {
2099
- if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
2100
- return this.drizzleDb;
2101
- }
2102
- async execute(query) {
2103
- const { rows } = await this.db.run(query);
2104
- return rows;
2125
+ /**
2126
+ * Find an entity and update it.
2127
+ */
2128
+ async updateOne(where, data, opts = {}) {
2129
+ await this.alepha.events.emit("repository:update:before", {
2130
+ tableName: this.tableName,
2131
+ where,
2132
+ data
2133
+ });
2134
+ let row = data;
2135
+ const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
2136
+ if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
2137
+ where = this.withDeletedAt(where, opts);
2138
+ row = this.cast(row, false);
2139
+ delete row[this.id.key];
2140
+ const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
2141
+ throw this.handleError(error, "Update query has failed");
2142
+ });
2143
+ if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
2144
+ try {
2145
+ const entity = this.clean(response[0], this.entity.schema);
2146
+ await this.alepha.events.emit("repository:update:after", {
2147
+ tableName: this.tableName,
2148
+ where,
2149
+ data,
2150
+ entities: [entity]
2151
+ });
2152
+ return entity;
2153
+ } catch (error) {
2154
+ throw this.handleError(error, "Update query has failed");
2155
+ }
2105
2156
  }
2106
- onStart = $hook({
2107
- on: "start",
2108
- handler: async () => {
2109
- const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
2110
- const cloudflareEnv = this.alepha.store.get("cloudflare.env");
2111
- if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
2112
- const binding = cloudflareEnv[bindingName];
2113
- if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
2114
- this.d1 = binding;
2115
- const { drizzle: drizzle$3 } = await import("drizzle-orm/d1");
2116
- this.drizzleDb = drizzle$3(this.d1);
2117
- await this.migrate();
2118
- this.log.info("Using Cloudflare D1 database");
2157
+ /**
2158
+ * Save a given entity.
2159
+ *
2160
+ * @example
2161
+ * ```ts
2162
+ * const entity = await repository.findById(1);
2163
+ * entity.name = "New Name"; // update a field
2164
+ * delete entity.description; // delete a field
2165
+ * await repository.save(entity);
2166
+ * ```
2167
+ *
2168
+ * Difference with `updateById/updateOne`:
2169
+ *
2170
+ * - requires the entity to be fetched first (whole object is expected)
2171
+ * - check pg.version() if present -> optimistic locking
2172
+ * - validate entity against schema
2173
+ * - undefined values will be set to null, not ignored!
2174
+ *
2175
+ * @see {@link DbVersionMismatchError}
2176
+ */
2177
+ async save(entity, opts = {}) {
2178
+ const row = entity;
2179
+ const id = row[this.id.key];
2180
+ if (id == null) throw new AlephaError("Cannot save entity without ID - missing primary key in value");
2181
+ for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
2182
+ let where = this.createQueryWhere();
2183
+ where.id = { eq: id };
2184
+ const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
2185
+ if (versionField && typeof row[versionField.key] === "number") {
2186
+ where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
2187
+ row[versionField.key] += 1;
2188
+ }
2189
+ try {
2190
+ const newValue = await this.updateOne(where, row, opts);
2191
+ for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
2192
+ Object.assign(row, newValue);
2193
+ } catch (error) {
2194
+ if (error instanceof DbEntityNotFoundError && versionField) try {
2195
+ await this.findById(id);
2196
+ throw new DbVersionMismatchError(this.tableName, id);
2197
+ } catch (lookupError) {
2198
+ if (lookupError instanceof DbEntityNotFoundError) throw error;
2199
+ if (lookupError instanceof DbVersionMismatchError) throw lookupError;
2200
+ throw lookupError;
2201
+ }
2202
+ throw error;
2119
2203
  }
2120
- });
2121
- async executeMigrations(migrationsFolder) {
2122
- const { migrate: migrate$3 } = await import("drizzle-orm/d1/migrator");
2123
- await migrate$3(this.db, { migrationsFolder });
2124
2204
  }
2125
2205
  /**
2126
- * Override development migration to skip sync (not supported on D1).
2127
- * D1 requires proper migrations to be applied.
2206
+ * Find an entity by ID and update it.
2128
2207
  */
2129
- async runDevelopmentMigration(migrationsFolder) {
2130
- await this.executeMigrations(migrationsFolder);
2208
+ async updateById(id, data, opts = {}) {
2209
+ return await this.updateOne(this.getWhereId(id), data, opts);
2131
2210
  }
2132
2211
  /**
2133
- * Override test migration to run migrations instead of sync.
2134
- * D1 doesn't support schema synchronization.
2212
+ * Find many entities and update all of them.
2135
2213
  */
2136
- async runTestMigration() {
2137
- const migrationsFolder = this.getMigrationsFolder();
2214
+ async updateMany(where, data, opts = {}) {
2215
+ await this.alepha.events.emit("repository:update:before", {
2216
+ tableName: this.tableName,
2217
+ where,
2218
+ data
2219
+ });
2220
+ const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
2221
+ if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
2222
+ where = this.withDeletedAt(where, opts);
2223
+ data = this.cast(data, false);
2138
2224
  try {
2139
- await this.executeMigrations(migrationsFolder);
2140
- } catch {
2141
- this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
2225
+ const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
2226
+ await this.alepha.events.emit("repository:update:after", {
2227
+ tableName: this.tableName,
2228
+ where,
2229
+ data,
2230
+ entities
2231
+ });
2232
+ return entities.map((it) => it[this.id.key]);
2233
+ } catch (error) {
2234
+ throw this.handleError(error, "Update query has failed");
2142
2235
  }
2143
2236
  }
2144
- };
2145
-
2146
- //#endregion
2147
- //#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
2148
- const envSchema$2 = t.object({
2149
- DATABASE_URL: t.optional(t.text()),
2150
- POSTGRES_SCHEMA: t.optional(t.text())
2151
- });
2152
- var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
2153
- static SSL_MODES = [
2154
- "require",
2155
- "allow",
2156
- "prefer",
2157
- "verify-full"
2158
- ];
2159
- log = $logger();
2160
- env = $env(envSchema$2);
2161
- kit = $inject(DrizzleKitProvider);
2162
- builder = $inject(PostgresModelBuilder);
2163
- client;
2164
- pg;
2165
- dialect = "postgresql";
2166
- get name() {
2167
- return "postgres";
2237
+ /**
2238
+ * Find many and delete all of them.
2239
+ * @returns Array of deleted entity IDs
2240
+ */
2241
+ async deleteMany(where = {}, opts = {}) {
2242
+ const deletedAt = this.deletedAt();
2243
+ if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
2244
+ await this.alepha.events.emit("repository:delete:before", {
2245
+ tableName: this.tableName,
2246
+ where
2247
+ });
2248
+ try {
2249
+ const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
2250
+ await this.alepha.events.emit("repository:delete:after", {
2251
+ tableName: this.tableName,
2252
+ where,
2253
+ ids
2254
+ });
2255
+ return ids;
2256
+ } catch (error) {
2257
+ throw new DbError("Delete query has failed", error);
2258
+ }
2168
2259
  }
2169
2260
  /**
2170
- * In testing mode, the schema name will be generated and deleted after the test.
2261
+ * Delete all entities.
2262
+ * @returns Array of deleted entity IDs
2171
2263
  */
2172
- schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
2173
- get url() {
2174
- if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
2175
- return this.env.DATABASE_URL;
2264
+ clear(opts = {}) {
2265
+ return this.deleteMany({}, opts);
2176
2266
  }
2177
2267
  /**
2178
- * Execute a SQL statement.
2268
+ * Delete the given entity.
2269
+ *
2270
+ * You must fetch the entity first in order to delete it.
2271
+ * @returns Array containing the deleted entity ID
2179
2272
  */
2180
- execute(statement) {
2181
- try {
2182
- return this.db.execute(statement);
2183
- } catch (error) {
2184
- throw new DbError("Error executing statement", error);
2273
+ async destroy(entity, opts = {}) {
2274
+ const id = entity[this.id.key];
2275
+ if (id == null) throw new AlephaError("Cannot destroy entity without ID");
2276
+ const deletedAt = this.deletedAt();
2277
+ if (deletedAt && !opts.force) {
2278
+ opts.now ??= this.dateTimeProvider.nowISOString();
2279
+ entity[deletedAt.key] = opts.now;
2185
2280
  }
2281
+ return await this.deleteById(id, opts);
2186
2282
  }
2187
2283
  /**
2188
- * Get Postgres schema used by this provider.
2284
+ * Find an entity and delete it.
2285
+ * @returns Array of deleted entity IDs (should contain at most one ID)
2189
2286
  */
2190
- get schema() {
2191
- if (this.schemaForTesting) return this.schemaForTesting;
2192
- if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
2193
- return "public";
2287
+ async deleteOne(where = {}, opts = {}) {
2288
+ return await this.deleteMany(where, opts);
2194
2289
  }
2195
2290
  /**
2196
- * Get the Drizzle Postgres database instance.
2291
+ * Find an entity by ID and delete it.
2292
+ * @returns Array containing the deleted entity ID
2293
+ * @throws DbEntityNotFoundError if the entity is not found
2197
2294
  */
2198
- get db() {
2199
- if (!this.pg) throw new AlephaError("Database not initialized");
2200
- return this.pg;
2295
+ async deleteById(id, opts = {}) {
2296
+ const result = await this.deleteMany(this.getWhereId(id), opts);
2297
+ if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
2298
+ return result;
2201
2299
  }
2202
- async executeMigrations(migrationsFolder) {
2203
- await migrate(this.db, { migrationsFolder });
2300
+ /**
2301
+ * Count entities.
2302
+ */
2303
+ async count(where = {}, opts = {}) {
2304
+ where = this.withDeletedAt(where, opts);
2305
+ return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
2204
2306
  }
2205
- onStart = $hook({
2206
- on: "start",
2207
- handler: async () => {
2208
- await this.connect();
2209
- if (!this.alepha.isServerless()) try {
2210
- await this.migrateLock.run();
2211
- } catch (error) {
2212
- throw new DbMigrationError(error);
2213
- }
2214
- }
2215
- });
2216
- onStop = $hook({
2217
- on: "stop",
2218
- handler: async () => {
2219
- if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
2220
- if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
2221
- this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
2222
- await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
2223
- this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
2224
- }
2225
- await this.close();
2226
- }
2227
- });
2228
- async connect() {
2229
- this.log.debug("Connect ..");
2230
- const client = postgres(this.getClientOptions());
2231
- await client`SELECT 1`;
2232
- this.client = client;
2233
- this.pg = drizzle$1(client, { logger: { logQuery: (query, params) => {
2234
- this.log.trace(query, { params });
2235
- } } });
2236
- this.log.info("Connection OK");
2307
+ conflictMessagePattern = "duplicate key value violates unique constraint";
2308
+ handleError(error, message) {
2309
+ if (!(error instanceof Error)) return new DbError(message);
2310
+ if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
2311
+ return new DbError(message, error);
2237
2312
  }
2238
- async close() {
2239
- if (this.client) {
2240
- this.log.debug("Close...");
2241
- await this.client.end();
2242
- this.client = void 0;
2243
- this.pg = void 0;
2244
- this.log.info("Connection closed");
2245
- }
2313
+ withDeletedAt(where, opts = {}) {
2314
+ if (opts.force) return where;
2315
+ const deletedAt = this.deletedAt();
2316
+ if (!deletedAt) return where;
2317
+ return { and: [where, { [deletedAt.key]: { isNull: true } }] };
2318
+ }
2319
+ deletedAt() {
2320
+ const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
2321
+ if (deletedAtFields.length > 0) return deletedAtFields[0];
2246
2322
  }
2247
- migrateLock = $lock({ handler: async () => {
2248
- await this.migrate();
2249
- } });
2250
2323
  /**
2251
- * Map the DATABASE_URL to postgres client options.
2324
+ * Convert something to valid Pg Insert Value.
2252
2325
  */
2253
- getClientOptions() {
2254
- const url = new URL(this.url);
2255
- return {
2256
- host: url.hostname,
2257
- user: decodeURIComponent(url.username),
2258
- database: decodeURIComponent(url.pathname.replace("/", "")),
2259
- password: decodeURIComponent(url.password),
2260
- port: Number(url.port || 5432),
2261
- ssl: this.ssl(url),
2262
- onnotice: () => {}
2263
- };
2264
- }
2265
- ssl(url) {
2266
- const mode = url.searchParams.get("sslmode");
2267
- for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
2268
- }
2269
- };
2270
-
2271
- //#endregion
2272
- //#region ../../src/orm/providers/drivers/NodeSqliteProvider.ts
2273
- const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
2274
- /**
2275
- * Configuration options for the Node.js SQLite database provider.
2276
- */
2277
- const nodeSqliteOptions = $atom({
2278
- name: "alepha.postgres.node-sqlite.options",
2279
- schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
2280
- default: {}
2281
- });
2282
- /**
2283
- * Add a fake support for SQLite in Node.js based on Postgres interfaces.
2284
- *
2285
- * This is NOT a real SQLite provider, it's a workaround to use SQLite with Drizzle ORM.
2286
- * This is NOT recommended for production use.
2287
- */
2288
- var NodeSqliteProvider = class extends DatabaseProvider {
2289
- kit = $inject(DrizzleKitProvider);
2290
- log = $logger();
2291
- env = $env(envSchema$1);
2292
- builder = $inject(SqliteModelBuilder);
2293
- options = $use(nodeSqliteOptions);
2294
- sqlite;
2295
- get name() {
2296
- return "sqlite";
2326
+ cast(data, insert) {
2327
+ const schema$1 = insert ? this.entity.insertSchema : t.partial(this.entity.updateSchema);
2328
+ return this.alepha.codec.encode(schema$1, data);
2297
2329
  }
2298
- dialect = "sqlite";
2299
- get url() {
2300
- const path = this.options.path ?? this.env.DATABASE_URL;
2301
- if (path) {
2302
- if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
2303
- return path;
2330
+ /**
2331
+ * Transform a row from the database into a clean entity.
2332
+ */
2333
+ clean(row, schema$1) {
2334
+ for (const key of Object.keys(schema$1.properties)) {
2335
+ const value = schema$1.properties[key];
2336
+ if (typeof row[key] === "string") {
2337
+ if (t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
2338
+ else if (t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
2339
+ }
2340
+ if (typeof row[key] === "bigint" && t.schema.isBigInt(value)) row[key] = row[key].toString();
2304
2341
  }
2305
- if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
2306
- else return "node_modules/.alepha/sqlite.db";
2342
+ return this.alepha.codec.decode(schema$1, row);
2307
2343
  }
2308
- async execute(query) {
2309
- const { sql: sql$2, params, method } = this.db.all(query).getQuery();
2310
- this.log.trace(`${sql$2}`, params);
2311
- const statement = this.sqlite.prepare(sql$2);
2312
- if (method === "run") {
2313
- statement.run(...params);
2314
- return [];
2344
+ /**
2345
+ * Clean a row with joins recursively
2346
+ */
2347
+ cleanWithJoins(row, schema$1, joins, parentPath) {
2348
+ const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
2349
+ const cleanRow = { ...row };
2350
+ const joinedData = {};
2351
+ for (const join of joinsAtThisLevel) {
2352
+ joinedData[join.key] = cleanRow[join.key];
2353
+ delete cleanRow[join.key];
2315
2354
  }
2316
- if (method === "get") {
2317
- const data = statement.get(...params);
2318
- return data ? [{ ...data }] : [];
2355
+ const entity = this.clean(cleanRow, schema$1);
2356
+ for (const join of joinsAtThisLevel) {
2357
+ const joinedValue = joinedData[join.key];
2358
+ if (joinedValue != null) {
2359
+ const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
2360
+ if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
2361
+ else entity[join.key] = this.clean(joinedValue, join.schema);
2362
+ } else entity[join.key] = void 0;
2319
2363
  }
2320
- return statement.all(...params);
2364
+ return entity;
2321
2365
  }
2322
- db = drizzle$2(async (sql$2, params, method) => {
2323
- const statement = this.sqlite.prepare(sql$2);
2324
- this.log.trace(`${sql$2}`, { params });
2325
- if (method === "get") {
2326
- const data = statement.get(...params);
2327
- return { rows: data ? [{ ...data }] : [] };
2328
- }
2329
- if (method === "run") {
2330
- statement.run(...params);
2331
- return { rows: [] };
2332
- }
2333
- if (method === "all") return { rows: statement.all(...params).map((row) => Object.values(row)) };
2334
- if (method === "values") return { rows: statement.all(...params).map((row) => Object.values(row)) };
2335
- throw new AlephaError(`Unsupported method: ${method}`);
2336
- });
2337
- onStart = $hook({
2338
- on: "start",
2339
- handler: async () => {
2340
- const { DatabaseSync } = await import("node:sqlite");
2341
- const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
2342
- if (filepath !== ":memory:" && filepath !== "") {
2343
- const dirname = filepath.split("/").slice(0, -1).join("/");
2344
- if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
2345
- }
2346
- this.sqlite = new DatabaseSync(filepath);
2347
- await this.migrate();
2348
- this.log.info(`Using SQLite database at ${filepath}`);
2349
- }
2350
- });
2351
- async executeMigrations(migrationsFolder) {
2352
- await migrate$1(this.db, async (migrationQueries) => {
2353
- this.log.debug("Executing migration queries", { migrationQueries });
2354
- for (const query of migrationQueries) this.sqlite.prepare(query).run();
2355
- }, { migrationsFolder });
2366
+ /**
2367
+ * Convert a where clause to SQL.
2368
+ */
2369
+ toSQL(where, joins) {
2370
+ return this.queryManager.toSQL(where, {
2371
+ schema: this.entity.schema,
2372
+ col: (name) => {
2373
+ return this.col(name);
2374
+ },
2375
+ joins,
2376
+ dialect: this.provider.dialect
2377
+ });
2378
+ }
2379
+ /**
2380
+ * Get the where clause for an ID.
2381
+ *
2382
+ * @param id The ID to get the where clause for.
2383
+ * @returns The where clause for the ID.
2384
+ */
2385
+ getWhereId(id) {
2386
+ return { [this.id.key]: { eq: t.schema.isString(this.id.type) ? String(id) : Number(id) } };
2387
+ }
2388
+ /**
2389
+ * Find a primary key in the schema.
2390
+ */
2391
+ getPrimaryKey(schema$1) {
2392
+ const primaryKeys = getAttrFields(schema$1, PG_PRIMARY_KEY);
2393
+ if (primaryKeys.length === 0) throw new AlephaError("Primary key not found in schema");
2394
+ if (primaryKeys.length > 1) throw new AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
2395
+ return {
2396
+ key: primaryKeys[0].key,
2397
+ col: this.col(primaryKeys[0].key),
2398
+ type: primaryKeys[0].type
2399
+ };
2356
2400
  }
2357
2401
  };
2358
2402
 
2359
2403
  //#endregion
2360
- //#region ../../src/orm/providers/drivers/PglitePostgresProvider.ts
2361
- const envSchema = t.object({ DATABASE_URL: t.optional(t.text()) });
2362
- var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
2363
- static importPglite() {
2364
- try {
2365
- return createRequire(import.meta.url)("@electric-sql/pglite");
2366
- } catch {}
2367
- }
2368
- env = $env(envSchema);
2369
- log = $logger();
2370
- kit = $inject(DrizzleKitProvider);
2371
- builder = $inject(PostgresModelBuilder);
2372
- client;
2373
- pglite;
2374
- get name() {
2375
- return "pglite";
2376
- }
2377
- dialect = "postgresql";
2378
- get url() {
2379
- let path = this.env.DATABASE_URL;
2380
- if (!path) if (this.alepha.isTest()) path = ":memory:";
2381
- else path = "node_modules/.alepha/pglite";
2382
- else if (path.includes(":memory:")) path = ":memory:";
2383
- else if (path.startsWith("file://")) path = path.replace("file://", "");
2384
- return path;
2385
- }
2386
- get db() {
2387
- if (!this.pglite) throw new AlephaError("Database not initialized");
2388
- return this.pglite;
2404
+ //#region ../../src/orm/providers/RepositoryProvider.ts
2405
+ var RepositoryProvider = class {
2406
+ alepha = $inject(Alepha);
2407
+ registry = /* @__PURE__ */ new Map();
2408
+ getRepositories(provider) {
2409
+ const repositories = this.alepha.services(Repository);
2410
+ if (provider) return repositories.filter((it) => it.provider === provider);
2411
+ return repositories;
2389
2412
  }
2390
- async execute(statement) {
2391
- const { rows } = await this.db.execute(statement);
2392
- return rows;
2413
+ getRepository(entity) {
2414
+ const RepositoryClass = this.createClassRepository(entity);
2415
+ return this.alepha.inject(RepositoryClass);
2393
2416
  }
2394
- onStart = $hook({
2395
- on: "start",
2396
- handler: async () => {
2397
- if (Object.keys(this.kit.getModels(this)).length === 0) return;
2398
- const module = PglitePostgresProvider.importPglite();
2399
- if (!module) throw new AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
2400
- const { drizzle: drizzle$3 } = createRequire(import.meta.url)("drizzle-orm/pglite");
2401
- const path = this.url;
2402
- if (path !== ":memory:") {
2403
- await mkdir(path, { recursive: true }).catch(() => null);
2404
- this.client = new module.PGlite(path);
2405
- } else this.client = new module.PGlite();
2406
- this.pglite = drizzle$3({ client: this.client });
2407
- await this.migrate();
2408
- this.log.info(`Using PGlite database at ${path}`);
2409
- }
2410
- });
2411
- onStop = $hook({
2412
- on: "stop",
2413
- handler: async () => {
2414
- if (this.client) {
2415
- this.log.debug("Closing PGlite connection...");
2416
- await this.client.close();
2417
- this.client = void 0;
2418
- this.pglite = void 0;
2419
- this.log.info("PGlite connection closed");
2417
+ createClassRepository(entity) {
2418
+ let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
2419
+ if (name.endsWith("s")) name = name.slice(0, -1);
2420
+ name = `${name}Repository`;
2421
+ if (this.registry.has(entity)) return this.registry.get(entity);
2422
+ class GenericRepository extends Repository {
2423
+ constructor() {
2424
+ super(entity);
2420
2425
  }
2421
2426
  }
2422
- });
2423
- async executeMigrations(migrationsFolder) {
2424
- await migrate$2(this.db, { migrationsFolder });
2427
+ Object.defineProperty(GenericRepository, "name", { value: name });
2428
+ this.registry.set(entity, GenericRepository);
2429
+ return GenericRepository;
2425
2430
  }
2426
2431
  };
2427
2432
 
@@ -2734,32 +2739,6 @@ function buildQueryString(where) {
2734
2739
  return parts.join("&");
2735
2740
  }
2736
2741
 
2737
- //#endregion
2738
- //#region ../../src/orm/primitives/$transaction.ts
2739
- /**
2740
- * Creates a transaction primitive for database operations requiring atomicity and consistency.
2741
- *
2742
- * This primitive provides a convenient way to wrap database operations in PostgreSQL
2743
- * transactions, ensuring ACID properties and automatic retry logic for version conflicts.
2744
- * It integrates seamlessly with the repository pattern and provides built-in handling
2745
- * for optimistic locking scenarios with automatic retry on version mismatches.
2746
- *
2747
- * **Important Notes**:
2748
- * - All operations within the transaction handler are atomic
2749
- * - Automatic retry on `PgVersionMismatchError` for optimistic locking
2750
- * - Pass `{ tx }` option to all repository operations within the transaction
2751
- * - Transactions are automatically rolled back on any unhandled error
2752
- * - Use appropriate isolation levels based on your consistency requirements
2753
- */
2754
- const $transaction = (opts) => {
2755
- const { alepha } = $context();
2756
- const provider = alepha.inject(DatabaseProvider);
2757
- return $retry({
2758
- when: (err) => err instanceof DbVersionMismatchError,
2759
- handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
2760
- });
2761
- };
2762
-
2763
2742
  //#endregion
2764
2743
  //#region ../../src/orm/providers/DatabaseTypeProvider.ts
2765
2744
  var DatabaseTypeProvider = class {
@@ -2881,11 +2860,101 @@ const pg = db;
2881
2860
  */
2882
2861
  const legacyIdSchema = pgAttr(pgAttr(pgAttr(t.integer(), PG_PRIMARY_KEY), PG_SERIAL), PG_DEFAULT);
2883
2862
 
2863
+ //#endregion
2864
+ //#region ../../src/orm/primitives/$repository.ts
2865
+ /**
2866
+ * Get the repository for the given entity.
2867
+ */
2868
+ const $repository = (entity) => {
2869
+ const { alepha } = $context();
2870
+ return $inject(alepha.inject(RepositoryProvider).createClassRepository(entity));
2871
+ };
2872
+
2873
+ //#endregion
2874
+ //#region ../../src/orm/primitives/$transaction.ts
2875
+ /**
2876
+ * Creates a transaction primitive for database operations requiring atomicity and consistency.
2877
+ *
2878
+ * This primitive provides a convenient way to wrap database operations in PostgreSQL
2879
+ * transactions, ensuring ACID properties and automatic retry logic for version conflicts.
2880
+ * It integrates seamlessly with the repository pattern and provides built-in handling
2881
+ * for optimistic locking scenarios with automatic retry on version mismatches.
2882
+ *
2883
+ * **Important Notes**:
2884
+ * - All operations within the transaction handler are atomic
2885
+ * - Automatic retry on `PgVersionMismatchError` for optimistic locking
2886
+ * - Pass `{ tx }` option to all repository operations within the transaction
2887
+ * - Transactions are automatically rolled back on any unhandled error
2888
+ * - Use appropriate isolation levels based on your consistency requirements
2889
+ */
2890
+ const $transaction = (opts) => {
2891
+ const { alepha } = $context();
2892
+ const provider = alepha.inject(DatabaseProvider);
2893
+ return $retry({
2894
+ when: (err) => err instanceof DbVersionMismatchError,
2895
+ handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
2896
+ });
2897
+ };
2898
+
2884
2899
  //#endregion
2885
2900
  //#region ../../src/orm/index.ts
2901
+ var orm_exports = /* @__PURE__ */ __exportAll({
2902
+ $entity: () => $entity,
2903
+ $repository: () => $repository,
2904
+ $sequence: () => $sequence,
2905
+ $transaction: () => $transaction,
2906
+ AlephaPostgres: () => AlephaPostgres,
2907
+ BunPostgresProvider: () => BunPostgresProvider,
2908
+ BunSqliteProvider: () => BunSqliteProvider,
2909
+ CloudflareD1Provider: () => CloudflareD1Provider,
2910
+ DatabaseProvider: () => DatabaseProvider,
2911
+ DatabaseTypeProvider: () => DatabaseTypeProvider,
2912
+ DbConflictError: () => DbConflictError,
2913
+ DbEntityNotFoundError: () => DbEntityNotFoundError,
2914
+ DbError: () => DbError,
2915
+ DbMigrationError: () => DbMigrationError,
2916
+ DbVersionMismatchError: () => DbVersionMismatchError,
2917
+ DrizzleKitProvider: () => DrizzleKitProvider,
2918
+ EntityPrimitive: () => EntityPrimitive,
2919
+ NodePostgresProvider: () => NodePostgresProvider,
2920
+ NodeSqliteProvider: () => NodeSqliteProvider,
2921
+ PG_CREATED_AT: () => PG_CREATED_AT,
2922
+ PG_DEFAULT: () => PG_DEFAULT,
2923
+ PG_DELETED_AT: () => PG_DELETED_AT,
2924
+ PG_ENUM: () => PG_ENUM,
2925
+ PG_IDENTITY: () => PG_IDENTITY,
2926
+ PG_PRIMARY_KEY: () => PG_PRIMARY_KEY,
2927
+ PG_REF: () => PG_REF,
2928
+ PG_SERIAL: () => PG_SERIAL,
2929
+ PG_UPDATED_AT: () => PG_UPDATED_AT,
2930
+ PG_VERSION: () => PG_VERSION,
2931
+ Repository: () => Repository,
2932
+ RepositoryProvider: () => RepositoryProvider,
2933
+ SequencePrimitive: () => SequencePrimitive,
2934
+ buildQueryString: () => buildQueryString,
2935
+ bunSqliteOptions: () => bunSqliteOptions,
2936
+ db: () => db,
2937
+ drizzle: () => drizzle,
2938
+ getAttrFields: () => getAttrFields,
2939
+ insertSchema: () => insertSchema,
2940
+ legacyIdSchema: () => legacyIdSchema,
2941
+ nodeSqliteOptions: () => nodeSqliteOptions,
2942
+ pageQuerySchema: () => pageQuerySchema,
2943
+ pageSchema: () => pageSchema,
2944
+ parseQueryString: () => parseQueryString,
2945
+ pg: () => pg,
2946
+ pgAttr: () => pgAttr,
2947
+ schema: () => schema,
2948
+ sql: () => sql,
2949
+ updateSchema: () => updateSchema
2950
+ });
2886
2951
  /**
2887
2952
  * Postgres client based on Drizzle ORM, Alepha type-safe friendly.
2888
2953
  *
2954
+ * Automatically selects the appropriate provider based on runtime:
2955
+ * - Bun: Uses `BunPostgresProvider` or `BunSqliteProvider`
2956
+ * - Node.js: Uses `NodePostgresProvider` or `NodeSqliteProvider`
2957
+ *
2889
2958
  * ```ts
2890
2959
  * import { t } from "alepha";
2891
2960
  * import { $entity, $repository, db } from "alepha/postgres";
@@ -2924,6 +2993,10 @@ const legacyIdSchema = pgAttr(pgAttr(pgAttr(t.integer(), PG_PRIMARY_KEY), PG_SER
2924
2993
  * @see {@link $sequence}
2925
2994
  * @see {@link $repository}
2926
2995
  * @see {@link $transaction}
2996
+ * @see {@link NodePostgresProvider} - Node.js Postgres implementation
2997
+ * @see {@link NodeSqliteProvider} - Node.js SQLite implementation
2998
+ * @see {@link BunPostgresProvider} - Bun Postgres implementation
2999
+ * @see {@link BunSqliteProvider} - Bun SQLite implementation
2927
3000
  * @module alepha.postgres
2928
3001
  */
2929
3002
  const AlephaPostgres = $module({
@@ -2933,10 +3006,10 @@ const AlephaPostgres = $module({
2933
3006
  AlephaDateTime,
2934
3007
  DatabaseProvider,
2935
3008
  NodePostgresProvider,
2936
- PglitePostgresProvider,
2937
3009
  NodeSqliteProvider,
2938
3010
  BunPostgresProvider,
2939
3011
  BunSqliteProvider,
3012
+ PglitePostgresProvider,
2940
3013
  CloudflareD1Provider,
2941
3014
  SqliteModelBuilder,
2942
3015
  PostgresModelBuilder,
@@ -2956,6 +3029,7 @@ const AlephaPostgres = $module({
2956
3029
  const isSqlite = url?.startsWith("sqlite:");
2957
3030
  const isMemory = url?.includes(":memory:");
2958
3031
  const isFile = !!url && !isPostgres && !isMemory;
3032
+ const isBun = alepha.isBun();
2959
3033
  if (url?.startsWith("cloudflare-d1:")) {
2960
3034
  alepha.with({
2961
3035
  optional: true,
@@ -2976,14 +3050,14 @@ const AlephaPostgres = $module({
2976
3050
  alepha.with({
2977
3051
  optional: true,
2978
3052
  provide: DatabaseProvider,
2979
- use: alepha.isBun() ? BunPostgresProvider : NodePostgresProvider
3053
+ use: isBun ? BunPostgresProvider : NodePostgresProvider
2980
3054
  });
2981
3055
  return;
2982
3056
  }
2983
3057
  alepha.with({
2984
3058
  optional: true,
2985
3059
  provide: DatabaseProvider,
2986
- use: alepha.isBun() ? BunSqliteProvider : NodeSqliteProvider
3060
+ use: isBun ? BunSqliteProvider : NodeSqliteProvider
2987
3061
  });
2988
3062
  }
2989
3063
  });