alepha 0.14.4 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (322) hide show
  1. package/README.md +44 -102
  2. package/dist/api/audits/index.d.ts +331 -443
  3. package/dist/api/audits/index.d.ts.map +1 -1
  4. package/dist/api/audits/index.js +2 -2
  5. package/dist/api/audits/index.js.map +1 -1
  6. package/dist/api/files/index.d.ts +0 -113
  7. package/dist/api/files/index.d.ts.map +1 -1
  8. package/dist/api/files/index.js +2 -3
  9. package/dist/api/files/index.js.map +1 -1
  10. package/dist/api/jobs/index.d.ts +151 -262
  11. package/dist/api/jobs/index.d.ts.map +1 -1
  12. package/dist/api/notifications/index.browser.js +4 -4
  13. package/dist/api/notifications/index.browser.js.map +1 -1
  14. package/dist/api/notifications/index.d.ts +164 -276
  15. package/dist/api/notifications/index.d.ts.map +1 -1
  16. package/dist/api/notifications/index.js +4 -4
  17. package/dist/api/notifications/index.js.map +1 -1
  18. package/dist/api/parameters/index.d.ts +265 -377
  19. package/dist/api/parameters/index.d.ts.map +1 -1
  20. package/dist/api/users/index.browser.js +1 -2
  21. package/dist/api/users/index.browser.js.map +1 -1
  22. package/dist/api/users/index.d.ts +195 -301
  23. package/dist/api/users/index.d.ts.map +1 -1
  24. package/dist/api/users/index.js +203 -184
  25. package/dist/api/users/index.js.map +1 -1
  26. package/dist/api/verifications/index.d.ts.map +1 -1
  27. package/dist/batch/index.d.ts.map +1 -1
  28. package/dist/batch/index.js +1 -2
  29. package/dist/batch/index.js.map +1 -1
  30. package/dist/bucket/index.d.ts.map +1 -1
  31. package/dist/cache/core/index.d.ts.map +1 -1
  32. package/dist/cache/redis/index.d.ts.map +1 -1
  33. package/dist/cache/redis/index.js +2 -2
  34. package/dist/cache/redis/index.js.map +1 -1
  35. package/dist/cli/index.d.ts +5900 -165
  36. package/dist/cli/index.d.ts.map +1 -1
  37. package/dist/cli/index.js +1481 -639
  38. package/dist/cli/index.js.map +1 -1
  39. package/dist/command/index.d.ts +8 -4
  40. package/dist/command/index.d.ts.map +1 -1
  41. package/dist/command/index.js +29 -25
  42. package/dist/command/index.js.map +1 -1
  43. package/dist/core/index.browser.js +563 -54
  44. package/dist/core/index.browser.js.map +1 -1
  45. package/dist/core/index.d.ts +175 -8
  46. package/dist/core/index.d.ts.map +1 -1
  47. package/dist/core/index.js +564 -54
  48. package/dist/core/index.js.map +1 -1
  49. package/dist/core/index.native.js +563 -54
  50. package/dist/core/index.native.js.map +1 -1
  51. package/dist/datetime/index.d.ts.map +1 -1
  52. package/dist/datetime/index.js +4 -4
  53. package/dist/datetime/index.js.map +1 -1
  54. package/dist/email/index.d.ts +89 -42
  55. package/dist/email/index.d.ts.map +1 -1
  56. package/dist/email/index.js +129 -33
  57. package/dist/email/index.js.map +1 -1
  58. package/dist/fake/index.d.ts +7969 -2
  59. package/dist/fake/index.d.ts.map +1 -1
  60. package/dist/fake/index.js +22 -22
  61. package/dist/fake/index.js.map +1 -1
  62. package/dist/file/index.d.ts +134 -1
  63. package/dist/file/index.d.ts.map +1 -1
  64. package/dist/file/index.js +253 -1
  65. package/dist/file/index.js.map +1 -1
  66. package/dist/lock/core/index.d.ts.map +1 -1
  67. package/dist/lock/redis/index.d.ts.map +1 -1
  68. package/dist/logger/index.d.ts +1 -2
  69. package/dist/logger/index.d.ts.map +1 -1
  70. package/dist/logger/index.js +1 -5
  71. package/dist/logger/index.js.map +1 -1
  72. package/dist/mcp/index.d.ts +19 -1
  73. package/dist/mcp/index.d.ts.map +1 -1
  74. package/dist/mcp/index.js +28 -4
  75. package/dist/mcp/index.js.map +1 -1
  76. package/dist/orm/chunk-DH6iiROE.js +38 -0
  77. package/dist/orm/index.browser.js +9 -9
  78. package/dist/orm/index.browser.js.map +1 -1
  79. package/dist/orm/index.bun.js +2821 -0
  80. package/dist/orm/index.bun.js.map +1 -0
  81. package/dist/orm/index.d.ts +318 -169
  82. package/dist/orm/index.d.ts.map +1 -1
  83. package/dist/orm/index.js +2086 -1776
  84. package/dist/orm/index.js.map +1 -1
  85. package/dist/queue/core/index.d.ts +4 -4
  86. package/dist/queue/core/index.d.ts.map +1 -1
  87. package/dist/queue/redis/index.d.ts.map +1 -1
  88. package/dist/redis/index.bun.js +285 -0
  89. package/dist/redis/index.bun.js.map +1 -0
  90. package/dist/redis/index.d.ts +13 -31
  91. package/dist/redis/index.d.ts.map +1 -1
  92. package/dist/redis/index.js +18 -38
  93. package/dist/redis/index.js.map +1 -1
  94. package/dist/retry/index.d.ts.map +1 -1
  95. package/dist/router/index.d.ts.map +1 -1
  96. package/dist/scheduler/index.d.ts +83 -1
  97. package/dist/scheduler/index.d.ts.map +1 -1
  98. package/dist/scheduler/index.js +393 -1
  99. package/dist/scheduler/index.js.map +1 -1
  100. package/dist/security/index.browser.js +5 -1
  101. package/dist/security/index.browser.js.map +1 -1
  102. package/dist/security/index.d.ts +598 -112
  103. package/dist/security/index.d.ts.map +1 -1
  104. package/dist/security/index.js +1808 -97
  105. package/dist/security/index.js.map +1 -1
  106. package/dist/server/auth/index.d.ts +1200 -175
  107. package/dist/server/auth/index.d.ts.map +1 -1
  108. package/dist/server/auth/index.js +1268 -37
  109. package/dist/server/auth/index.js.map +1 -1
  110. package/dist/server/cache/index.d.ts +6 -3
  111. package/dist/server/cache/index.d.ts.map +1 -1
  112. package/dist/server/cache/index.js +1 -1
  113. package/dist/server/cache/index.js.map +1 -1
  114. package/dist/server/compress/index.d.ts.map +1 -1
  115. package/dist/server/cookies/index.d.ts.map +1 -1
  116. package/dist/server/cookies/index.js +3 -3
  117. package/dist/server/cookies/index.js.map +1 -1
  118. package/dist/server/core/index.d.ts +115 -13
  119. package/dist/server/core/index.d.ts.map +1 -1
  120. package/dist/server/core/index.js +321 -139
  121. package/dist/server/core/index.js.map +1 -1
  122. package/dist/server/cors/index.d.ts +0 -1
  123. package/dist/server/cors/index.d.ts.map +1 -1
  124. package/dist/server/health/index.d.ts +0 -1
  125. package/dist/server/health/index.d.ts.map +1 -1
  126. package/dist/server/helmet/index.d.ts.map +1 -1
  127. package/dist/server/links/index.browser.js +9 -1
  128. package/dist/server/links/index.browser.js.map +1 -1
  129. package/dist/server/links/index.d.ts +1 -2
  130. package/dist/server/links/index.d.ts.map +1 -1
  131. package/dist/server/links/index.js +14 -7
  132. package/dist/server/links/index.js.map +1 -1
  133. package/dist/server/metrics/index.d.ts +514 -1
  134. package/dist/server/metrics/index.d.ts.map +1 -1
  135. package/dist/server/metrics/index.js +4462 -4
  136. package/dist/server/metrics/index.js.map +1 -1
  137. package/dist/server/multipart/index.d.ts.map +1 -1
  138. package/dist/server/proxy/index.d.ts +0 -1
  139. package/dist/server/proxy/index.d.ts.map +1 -1
  140. package/dist/server/rate-limit/index.d.ts.map +1 -1
  141. package/dist/server/static/index.d.ts.map +1 -1
  142. package/dist/server/swagger/index.d.ts +1 -2
  143. package/dist/server/swagger/index.d.ts.map +1 -1
  144. package/dist/server/swagger/index.js +1 -2
  145. package/dist/server/swagger/index.js.map +1 -1
  146. package/dist/sms/index.d.ts +3 -1
  147. package/dist/sms/index.d.ts.map +1 -1
  148. package/dist/sms/index.js +10 -10
  149. package/dist/sms/index.js.map +1 -1
  150. package/dist/thread/index.d.ts +0 -1
  151. package/dist/thread/index.d.ts.map +1 -1
  152. package/dist/thread/index.js +2 -2
  153. package/dist/thread/index.js.map +1 -1
  154. package/dist/topic/core/index.d.ts.map +1 -1
  155. package/dist/topic/redis/index.d.ts.map +1 -1
  156. package/dist/vite/index.d.ts +6315 -149
  157. package/dist/vite/index.d.ts.map +1 -1
  158. package/dist/vite/index.js +140 -469
  159. package/dist/vite/index.js.map +1 -1
  160. package/dist/websocket/index.browser.js +9 -9
  161. package/dist/websocket/index.browser.js.map +1 -1
  162. package/dist/websocket/index.d.ts +28 -28
  163. package/dist/websocket/index.d.ts.map +1 -1
  164. package/dist/websocket/index.js +9 -9
  165. package/dist/websocket/index.js.map +1 -1
  166. package/package.json +13 -18
  167. package/src/api/files/controllers/AdminFileStatsController.ts +0 -1
  168. package/src/api/users/atoms/realmAuthSettingsAtom.ts +5 -0
  169. package/src/api/users/controllers/{UserRealmController.ts → RealmController.ts} +11 -11
  170. package/src/api/users/entities/users.ts +1 -1
  171. package/src/api/users/index.ts +8 -8
  172. package/src/api/users/primitives/{$userRealm.ts → $realm.ts} +17 -19
  173. package/src/api/users/providers/{UserRealmProvider.ts → RealmProvider.ts} +26 -30
  174. package/src/api/users/schemas/{userRealmConfigSchema.ts → realmConfigSchema.ts} +2 -2
  175. package/src/api/users/services/CredentialService.ts +7 -7
  176. package/src/api/users/services/IdentityService.ts +4 -4
  177. package/src/api/users/services/RegistrationService.spec.ts +25 -27
  178. package/src/api/users/services/RegistrationService.ts +38 -27
  179. package/src/api/users/services/SessionCrudService.ts +3 -3
  180. package/src/api/users/services/SessionService.spec.ts +3 -3
  181. package/src/api/users/services/SessionService.ts +27 -18
  182. package/src/api/users/services/UserService.ts +7 -7
  183. package/src/batch/providers/BatchProvider.ts +1 -2
  184. package/src/cli/apps/AlephaCli.ts +2 -2
  185. package/src/cli/apps/AlephaPackageBuilderCli.ts +47 -20
  186. package/src/cli/assets/apiHelloControllerTs.ts +19 -0
  187. package/src/cli/assets/apiIndexTs.ts +16 -0
  188. package/src/cli/assets/biomeJson.ts +2 -1
  189. package/src/cli/assets/claudeMd.ts +308 -0
  190. package/src/cli/assets/dummySpecTs.ts +2 -1
  191. package/src/cli/assets/editorconfig.ts +2 -1
  192. package/src/cli/assets/mainBrowserTs.ts +4 -3
  193. package/src/cli/assets/mainCss.ts +24 -0
  194. package/src/cli/assets/mainServerTs.ts +24 -0
  195. package/src/cli/assets/tsconfigJson.ts +2 -1
  196. package/src/cli/assets/webAppRouterTs.ts +16 -0
  197. package/src/cli/assets/webHelloComponentTsx.ts +20 -0
  198. package/src/cli/assets/webIndexTs.ts +16 -0
  199. package/src/cli/atoms/appEntryOptions.ts +13 -0
  200. package/src/cli/atoms/buildOptions.ts +1 -1
  201. package/src/cli/atoms/changelogOptions.ts +1 -1
  202. package/src/cli/commands/build.ts +97 -61
  203. package/src/cli/commands/db.ts +21 -18
  204. package/src/cli/commands/deploy.ts +17 -5
  205. package/src/cli/commands/dev.ts +26 -47
  206. package/src/cli/commands/gen/env.ts +1 -1
  207. package/src/cli/commands/init.ts +79 -25
  208. package/src/cli/commands/lint.ts +9 -3
  209. package/src/cli/commands/test.ts +8 -2
  210. package/src/cli/commands/typecheck.ts +5 -1
  211. package/src/cli/commands/verify.ts +4 -2
  212. package/src/cli/defineConfig.ts +9 -0
  213. package/src/cli/index.ts +2 -1
  214. package/src/cli/providers/AppEntryProvider.ts +131 -0
  215. package/src/cli/providers/ViteBuildProvider.ts +82 -0
  216. package/src/cli/providers/ViteDevServerProvider.ts +350 -0
  217. package/src/cli/providers/ViteTemplateProvider.ts +27 -0
  218. package/src/cli/services/AlephaCliUtils.ts +72 -602
  219. package/src/cli/services/PackageManagerUtils.ts +308 -0
  220. package/src/cli/services/ProjectScaffolder.ts +329 -0
  221. package/src/command/helpers/Runner.ts +15 -3
  222. package/src/core/Alepha.ts +2 -8
  223. package/src/core/__tests__/Alepha-graph.spec.ts +4 -0
  224. package/src/core/index.shared.ts +1 -0
  225. package/src/core/index.ts +2 -0
  226. package/src/core/primitives/$hook.ts +6 -2
  227. package/src/core/primitives/$module.spec.ts +4 -0
  228. package/src/core/primitives/$module.ts +12 -0
  229. package/src/core/providers/AlsProvider.ts +1 -1
  230. package/src/core/providers/CodecManager.spec.ts +12 -6
  231. package/src/core/providers/CodecManager.ts +26 -6
  232. package/src/core/providers/EventManager.ts +169 -13
  233. package/src/core/providers/KeylessJsonSchemaCodec.spec.ts +878 -0
  234. package/src/core/providers/KeylessJsonSchemaCodec.ts +789 -0
  235. package/src/core/providers/SchemaValidator.spec.ts +236 -0
  236. package/src/core/providers/StateManager.spec.ts +27 -16
  237. package/src/email/providers/LocalEmailProvider.spec.ts +111 -87
  238. package/src/email/providers/LocalEmailProvider.ts +52 -15
  239. package/src/email/providers/NodemailerEmailProvider.ts +167 -56
  240. package/src/file/errors/FileError.ts +7 -0
  241. package/src/file/index.ts +9 -1
  242. package/src/file/providers/MemoryFileSystemProvider.ts +393 -0
  243. package/src/logger/providers/PrettyFormatterProvider.ts +0 -9
  244. package/src/mcp/errors/McpError.ts +30 -0
  245. package/src/mcp/index.ts +3 -0
  246. package/src/mcp/transports/SseMcpTransport.ts +16 -6
  247. package/src/orm/index.browser.ts +1 -19
  248. package/src/orm/index.bun.ts +77 -0
  249. package/src/orm/index.shared-server.ts +22 -0
  250. package/src/orm/index.shared.ts +15 -0
  251. package/src/orm/index.ts +19 -39
  252. package/src/orm/providers/DrizzleKitProvider.ts +3 -5
  253. package/src/orm/providers/drivers/BunPostgresProvider.ts +3 -5
  254. package/src/orm/providers/drivers/BunSqliteProvider.ts +1 -1
  255. package/src/orm/providers/drivers/CloudflareD1Provider.ts +4 -0
  256. package/src/orm/providers/drivers/DatabaseProvider.ts +4 -0
  257. package/src/orm/providers/drivers/PglitePostgresProvider.ts +4 -0
  258. package/src/orm/services/Repository.ts +19 -0
  259. package/src/redis/index.bun.ts +35 -0
  260. package/src/redis/providers/BunRedisProvider.ts +12 -43
  261. package/src/redis/providers/BunRedisSubscriberProvider.ts +2 -3
  262. package/src/redis/providers/NodeRedisProvider.ts +16 -34
  263. package/src/{server/security → security}/__tests__/BasicAuth.spec.ts +11 -11
  264. package/src/{server/security → security}/__tests__/ServerSecurityProvider-realm.spec.ts +21 -16
  265. package/src/{server/security/providers → security/__tests__}/ServerSecurityProvider.spec.ts +5 -5
  266. package/src/security/index.browser.ts +5 -0
  267. package/src/security/index.ts +90 -7
  268. package/src/security/primitives/{$realm.spec.ts → $issuer.spec.ts} +11 -11
  269. package/src/security/primitives/{$realm.ts → $issuer.ts} +20 -17
  270. package/src/security/primitives/$role.ts +5 -5
  271. package/src/security/primitives/$serviceAccount.spec.ts +5 -5
  272. package/src/security/primitives/$serviceAccount.ts +3 -3
  273. package/src/{server/security → security}/providers/ServerSecurityProvider.ts +5 -7
  274. package/src/server/auth/primitives/$auth.ts +10 -10
  275. package/src/server/auth/primitives/$authCredentials.ts +3 -3
  276. package/src/server/auth/primitives/$authGithub.ts +3 -3
  277. package/src/server/auth/primitives/$authGoogle.ts +3 -3
  278. package/src/server/auth/providers/ServerAuthProvider.ts +13 -13
  279. package/src/server/cache/providers/ServerCacheProvider.ts +1 -1
  280. package/src/server/cookies/providers/ServerCookiesProvider.ts +3 -3
  281. package/src/server/core/index.ts +1 -1
  282. package/src/server/core/providers/BunHttpServerProvider.ts +1 -1
  283. package/src/server/core/providers/NodeHttpServerProvider.spec.ts +125 -0
  284. package/src/server/core/providers/NodeHttpServerProvider.ts +92 -24
  285. package/src/server/core/providers/ServerBodyParserProvider.ts +19 -23
  286. package/src/server/core/providers/ServerLoggerProvider.ts +23 -19
  287. package/src/server/core/providers/ServerProvider.ts +144 -24
  288. package/src/server/core/providers/ServerRouterProvider.ts +259 -115
  289. package/src/server/core/providers/ServerTimingProvider.ts +2 -2
  290. package/src/server/links/atoms/apiLinksAtom.ts +7 -0
  291. package/src/server/links/index.browser.ts +2 -0
  292. package/src/server/links/index.ts +3 -1
  293. package/src/server/links/providers/LinkProvider.ts +1 -1
  294. package/src/server/swagger/index.ts +1 -1
  295. package/src/sms/providers/LocalSmsProvider.spec.ts +153 -111
  296. package/src/sms/providers/LocalSmsProvider.ts +8 -7
  297. package/src/vite/index.ts +3 -2
  298. package/src/vite/tasks/buildClient.ts +0 -1
  299. package/src/vite/tasks/buildServer.ts +80 -22
  300. package/src/vite/tasks/copyAssets.ts +5 -4
  301. package/src/vite/tasks/generateCloudflare.ts +7 -0
  302. package/src/vite/tasks/generateSitemap.ts +64 -23
  303. package/src/vite/tasks/index.ts +0 -2
  304. package/src/vite/tasks/prerenderPages.ts +49 -24
  305. package/dist/server/security/index.browser.js +0 -13
  306. package/dist/server/security/index.browser.js.map +0 -1
  307. package/dist/server/security/index.d.ts +0 -173
  308. package/dist/server/security/index.d.ts.map +0 -1
  309. package/dist/server/security/index.js +0 -311
  310. package/dist/server/security/index.js.map +0 -1
  311. package/src/cli/assets/appRouterTs.ts +0 -9
  312. package/src/cli/assets/indexHtml.ts +0 -15
  313. package/src/cli/assets/mainTs.ts +0 -13
  314. package/src/cli/commands/format.ts +0 -17
  315. package/src/server/security/index.browser.ts +0 -10
  316. package/src/server/security/index.ts +0 -94
  317. package/src/vite/helpers/boot.ts +0 -106
  318. package/src/vite/plugins/viteAlephaDev.ts +0 -177
  319. package/src/vite/tasks/devServer.ts +0 -69
  320. package/src/vite/tasks/runAlepha.ts +0 -270
  321. /package/src/{server/security → security}/primitives/$basicAuth.ts +0 -0
  322. /package/src/{server/security → security}/providers/ServerBasicAuthProvider.ts +0 -0
package/dist/orm/index.js CHANGED
@@ -1,23 +1,24 @@
1
+ import { n as __reExport, t as __exportAll } from "./chunk-DH6iiROE.js";
1
2
  import { createRequire } from "node:module";
2
3
  import { $atom, $context, $env, $hook, $inject, $module, $use, Alepha, AlephaError, KIND, Primitive, Value, createPagination, createPrimitive, pageQuerySchema, pageSchema, pageSchema as pageSchema$1, t } from "alepha";
3
4
  import { AlephaDateTime, DateTimeProvider } from "alepha/datetime";
5
+ import * as pg$2 from "drizzle-orm/pg-core";
6
+ import { alias, check, customType, foreignKey, index, pgEnum, pgSchema, pgTable, unique, uniqueIndex } from "drizzle-orm/pg-core";
4
7
  import * as drizzle from "drizzle-orm";
5
8
  import { and, arrayContained, arrayContains, arrayOverlaps, asc, between, desc, eq, getTableName, gt, gte, ilike, inArray, isNotNull, isNull, isSQLWrapper, like, lt, lte, ne, not, notBetween, notIlike, notInArray, notLike, or, sql, sql as sql$1 } from "drizzle-orm";
6
- import * as pg$1 from "drizzle-orm/pg-core";
7
- import { alias, check, customType, foreignKey, index, pgEnum, pgSchema, pgTable, unique, uniqueIndex } from "drizzle-orm/pg-core";
8
9
  import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
9
10
  import { $logger } from "alepha/logger";
10
- import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
11
+ import { $lock } from "alepha/lock";
11
12
  import { randomUUID } from "node:crypto";
12
- import * as pg$2 from "drizzle-orm/sqlite-core";
13
+ import * as pg$1 from "drizzle-orm/sqlite-core";
13
14
  import { check as check$1, foreignKey as foreignKey$1, index as index$1, sqliteTable, unique as unique$1, uniqueIndex as uniqueIndex$1 } from "drizzle-orm/sqlite-core";
14
- import { $lock } from "alepha/lock";
15
15
  import { drizzle as drizzle$1 } from "drizzle-orm/postgres-js";
16
16
  import { migrate } from "drizzle-orm/postgres-js/migrator";
17
17
  import postgres from "postgres";
18
18
  import { drizzle as drizzle$2 } from "drizzle-orm/sqlite-proxy";
19
19
  import { migrate as migrate$1 } from "drizzle-orm/sqlite-proxy/migrator";
20
20
  import { migrate as migrate$2 } from "drizzle-orm/pglite/migrator";
21
+ import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
21
22
  import { $retry } from "alepha/retry";
22
23
 
23
24
  export * from "drizzle-orm/pg-core"
@@ -62,14 +63,14 @@ const insertSchema = (obj) => {
62
63
 
63
64
  //#endregion
64
65
  //#region ../../src/orm/schemas/updateSchema.ts
65
- const updateSchema = (schema$1) => {
66
+ const updateSchema = (schema) => {
66
67
  const newProperties = {};
67
- for (const key in schema$1.properties) {
68
- const prop = schema$1.properties[key];
68
+ for (const key in schema.properties) {
69
+ const prop = schema.properties[key];
69
70
  if (t.schema.isOptional(prop)) newProperties[key] = t.optional(t.union([prop, t.raw.Null()]));
70
71
  else newProperties[key] = prop;
71
72
  }
72
- return t.object(newProperties, "options" in schema$1 && typeof schema$1.options === "object" ? { ...schema$1.options } : {});
73
+ return t.object(newProperties, "options" in schema && typeof schema.options === "object" ? { ...schema.options } : {});
73
74
  };
74
75
 
75
76
  //#endregion
@@ -100,10 +101,10 @@ var EntityPrimitive = class EntityPrimitive {
100
101
  constructor(options) {
101
102
  this.options = options;
102
103
  }
103
- alias(alias$1) {
104
+ alias(alias) {
104
105
  const aliased = new EntityPrimitive(this.options);
105
106
  return new Proxy(aliased, { get(target, prop, receiver) {
106
- if (prop === "$alias") return alias$1;
107
+ if (prop === "$alias") return alias;
107
108
  return Reflect.get(target, prop, receiver);
108
109
  } });
109
110
  }
@@ -139,74 +140,6 @@ var DbError = class extends AlephaError {
139
140
  }
140
141
  };
141
142
 
142
- //#endregion
143
- //#region ../../src/orm/errors/DbConflictError.ts
144
- var DbConflictError = class extends DbError {
145
- name = "DbConflictError";
146
- status = 409;
147
- };
148
-
149
- //#endregion
150
- //#region ../../src/orm/errors/DbEntityNotFoundError.ts
151
- var DbEntityNotFoundError = class extends DbError {
152
- name = "DbEntityNotFoundError";
153
- status = 404;
154
- constructor(entityName) {
155
- super(`Entity from '${entityName}' was not found`);
156
- }
157
- };
158
-
159
- //#endregion
160
- //#region ../../src/orm/errors/DbVersionMismatchError.ts
161
- /**
162
- * Error thrown when there is a version mismatch.
163
- * It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
164
- * This is used for optimistic concurrency control.
165
- */
166
- var DbVersionMismatchError = class extends DbError {
167
- name = "DbVersionMismatchError";
168
- constructor(table, id) {
169
- super(`Version mismatch for table '${table}' and id '${id}'`);
170
- }
171
- };
172
-
173
- //#endregion
174
- //#region ../../src/orm/helpers/pgAttr.ts
175
- /**
176
- * Decorates a typebox schema with a Postgres attribute.
177
- *
178
- * > It's just a fancy way to add Symbols to a field.
179
- *
180
- * @example
181
- * ```ts
182
- * import { t } from "alepha";
183
- * import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
184
- *
185
- * export const updatedAtSchema = pgAttr(
186
- * t.datetime(), PG_UPDATED_AT,
187
- * );
188
- * ```
189
- */
190
- const pgAttr = (type, attr, value) => {
191
- Object.assign(type, { [attr]: value ?? {} });
192
- return type;
193
- };
194
- /**
195
- * Retrieves the fields of a schema that have a specific attribute.
196
- */
197
- const getAttrFields = (schema$1, name) => {
198
- const fields = [];
199
- for (const key of Object.keys(schema$1.properties)) {
200
- const value = schema$1.properties[key];
201
- if (name in value) fields.push({
202
- type: value,
203
- key,
204
- data: value[name]
205
- });
206
- }
207
- return fields;
208
- };
209
-
210
143
  //#endregion
211
144
  //#region ../../src/orm/providers/drivers/DatabaseProvider.ts
212
145
  var DatabaseProvider = class {
@@ -218,6 +151,9 @@ var DatabaseProvider = class {
218
151
  get name() {
219
152
  return "default";
220
153
  }
154
+ get driver() {
155
+ return this.dialect;
156
+ }
221
157
  get schema() {
222
158
  return "public";
223
159
  }
@@ -234,8 +170,8 @@ var DatabaseProvider = class {
234
170
  registerSequence(sequence) {
235
171
  this.builder.buildSequence(sequence, this);
236
172
  }
237
- async run(statement, schema$1) {
238
- return (await this.execute(statement)).map((row) => this.alepha.codec.decode(schema$1, row));
173
+ async run(statement, schema) {
174
+ return (await this.execute(statement)).map((row) => this.alepha.codec.decode(schema, row));
239
175
  }
240
176
  /**
241
177
  * Get migrations folder path - can be overridden
@@ -302,1899 +238,2202 @@ var DatabaseProvider = class {
302
238
  };
303
239
 
304
240
  //#endregion
305
- //#region ../../src/orm/services/PgRelationManager.ts
306
- var PgRelationManager = class {
241
+ //#region ../../src/orm/primitives/$sequence.ts
242
+ /**
243
+ * Creates a PostgreSQL sequence primitive for generating unique numeric values.
244
+ */
245
+ const $sequence = (options = {}) => {
246
+ return createPrimitive(SequencePrimitive, options);
247
+ };
248
+ var SequencePrimitive = class extends Primitive {
249
+ provider = this.$provider();
250
+ onInit() {
251
+ this.provider.registerSequence(this);
252
+ }
253
+ get name() {
254
+ return this.options.name ?? this.config.propertyKey;
255
+ }
256
+ async next() {
257
+ return this.provider.execute(sql$1`SELECT nextval('${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
258
+ }
259
+ async current() {
260
+ return this.provider.execute(sql$1`SELECT last_value FROM ${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
261
+ }
262
+ $provider() {
263
+ return this.options.provider ?? this.alepha.inject(DatabaseProvider);
264
+ }
265
+ };
266
+ $sequence[KIND] = SequencePrimitive;
267
+
268
+ //#endregion
269
+ //#region ../../src/orm/providers/DrizzleKitProvider.ts
270
+ var DrizzleKitProvider = class {
271
+ log = $logger();
272
+ alepha = $inject(Alepha);
307
273
  /**
308
- * Recursively build joins for the query builder based on the relations map
274
+ * Synchronize database with current schema definitions.
275
+ *
276
+ * In development mode, it will generate and execute migrations based on the current state.
277
+ * In testing mode, it will generate migrations from scratch without applying them.
278
+ *
279
+ * Does nothing in production mode, you must handle migrations manually.
309
280
  */
310
- buildJoins(provider, builder, joins, withRelations, table, parentKey) {
311
- for (const [key, join] of Object.entries(withRelations)) {
312
- const from = provider.table(join.join);
313
- const on = isSQLWrapper$1(join.on) ? join.on : sql$1`${table[join.on[0]]} = ${from[join.on[1].name]}`;
314
- if (join.type === "right") builder.rightJoin(from, on);
315
- else if (join.type === "inner") builder.innerJoin(from, on);
316
- else builder.leftJoin(from, on);
317
- joins.push({
318
- key,
319
- table: getTableName(from),
320
- schema: join.join.schema,
321
- col: (name) => from[name],
322
- parent: parentKey
323
- });
324
- if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
281
+ async synchronize(provider) {
282
+ if (this.alepha.isProduction()) {
283
+ this.log.warn("Synchronization skipped in production mode.");
284
+ return;
285
+ }
286
+ if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
287
+ const now = Date.now();
288
+ if (this.alepha.isTest()) {
289
+ const { statements } = await this.generateMigration(provider);
290
+ await this.executeStatements(statements, provider);
291
+ } else {
292
+ const entry = await this.loadDevMigrations(provider);
293
+ const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
294
+ await this.executeStatements(statements, provider, true);
295
+ await this.saveDevMigrations(provider, snapshot, entry);
325
296
  }
297
+ this.log.info(`Sync with '${provider.name}' OK [${Date.now() - now}ms]`);
326
298
  }
327
299
  /**
328
- * Map a row with its joined relations based on the joins definition
300
+ * Mostly used for testing purposes. You can generate SQL migration statements without executing them.
329
301
  */
330
- mapRowWithJoins(record, row, schema$1, joins, parentKey) {
331
- for (const join of joins) if (join.parent === parentKey) {
332
- const joinedData = row[join.table];
333
- if (this.isAllNull(joinedData)) record[join.key] = void 0;
334
- else {
335
- record[join.key] = joinedData;
336
- this.mapRowWithJoins(record[join.key], row, schema$1, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
302
+ async generateMigration(provider, prevSnapshot) {
303
+ const kit = this.importDrizzleKit();
304
+ const models = this.getModels(provider);
305
+ if (Object.keys(models).length > 0) {
306
+ if (provider.dialect === "sqlite") {
307
+ const prev = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
308
+ const curr = await kit.generateSQLiteDrizzleJson(models);
309
+ return {
310
+ models,
311
+ statements: await kit.generateSQLiteMigration(prev, curr),
312
+ snapshot: curr
313
+ };
337
314
  }
315
+ const prev = prevSnapshot ?? kit.generateDrizzleJson({});
316
+ const curr = kit.generateDrizzleJson(models);
317
+ return {
318
+ models,
319
+ statements: await kit.generateMigration(prev, curr),
320
+ snapshot: curr
321
+ };
338
322
  }
339
- return record;
323
+ return {
324
+ models,
325
+ statements: [],
326
+ snapshot: {}
327
+ };
340
328
  }
341
329
  /**
342
- * Check if all values in an object are null (indicates a left join with no match)
330
+ * Load all tables, enums, sequences, etc. from the provider's repositories.
343
331
  */
344
- isAllNull(obj) {
345
- if (obj === null || obj === void 0) return true;
346
- if (typeof obj !== "object") return false;
347
- return Object.values(obj).every((val) => val === null);
332
+ getModels(provider) {
333
+ const models = {};
334
+ for (const [key, value] of provider.tables.entries()) {
335
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
336
+ models[key] = value;
337
+ }
338
+ for (const [key, value] of provider.enums.entries()) {
339
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
340
+ models[key] = value;
341
+ }
342
+ for (const [key, value] of provider.sequences.entries()) {
343
+ if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
344
+ models[key] = value;
345
+ }
346
+ return models;
348
347
  }
349
348
  /**
350
- * Build a schema that includes all join properties recursively
349
+ * Load the migration snapshot from the database.
351
350
  */
352
- buildSchemaWithJoins(baseSchema, joins, parentPath) {
353
- const schema$1 = Value.Clone(baseSchema);
354
- const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
355
- for (const join of joinsAtThisLevel) {
356
- const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
357
- const childJoins = joins.filter((j) => j.parent === joinPath);
358
- let joinSchema = join.schema;
359
- if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
360
- schema$1.properties[join.key] = t.optional(joinSchema);
351
+ async loadDevMigrations(provider) {
352
+ const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
353
+ if (provider.url.includes(":memory:")) {
354
+ this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
355
+ return;
356
+ }
357
+ if (provider.dialect === "sqlite") {
358
+ try {
359
+ const text = await readFile(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
360
+ return this.alepha.codec.decode(devMigrationsSchema, text);
361
+ } catch (e) {
362
+ this.log.trace(`No existing migration snapshot for '${name}'`, e);
363
+ }
364
+ return;
365
+ }
366
+ await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
367
+ await provider.execute(sql$1`
368
+ CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
369
+ "id" SERIAL PRIMARY KEY,
370
+ "name" TEXT NOT NULL,
371
+ "created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
372
+ "snapshot" TEXT NOT NULL
373
+ );
374
+ `);
375
+ const rows = await provider.run(sql$1`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
376
+ if (rows.length === 0) {
377
+ this.log.trace(`No existing migration snapshot for '${name}'`);
378
+ return;
361
379
  }
362
- return schema$1;
380
+ return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
363
381
  }
364
- };
365
-
366
- //#endregion
367
- //#region ../../src/orm/services/QueryManager.ts
368
- var QueryManager = class {
369
- alepha = $inject(Alepha);
370
- /**
371
- * Convert a query object to a SQL query.
372
- */
373
- toSQL(query, options) {
374
- const { schema: schema$1, col, joins } = options;
375
- const conditions = [];
376
- if (isSQLWrapper(query)) conditions.push(query);
377
- else {
378
- const keys = Object.keys(query);
379
- for (const key of keys) {
380
- const operator = query[key];
381
- if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
382
- const matchingJoins = joins.filter((j) => j.key === key);
383
- if (matchingJoins.length > 0) {
384
- const join = matchingJoins[0];
385
- const joinPath = join.parent ? `${join.parent}.${key}` : key;
386
- const recursiveJoins = joins.filter((j) => {
387
- if (!j.parent) return false;
388
- return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
389
- }).map((j) => {
390
- const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
391
- return {
392
- ...j,
393
- parent: newParent
394
- };
395
- });
396
- const sql$2 = this.toSQL(query[key], {
397
- schema: join.schema,
398
- col: join.col,
399
- joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
400
- dialect: options.dialect
401
- });
402
- if (sql$2) conditions.push(sql$2);
403
- continue;
404
- }
405
- }
406
- if (Array.isArray(operator)) {
407
- const operations = operator.map((it) => {
408
- if (isSQLWrapper(it)) return it;
409
- return this.toSQL(it, {
410
- schema: schema$1,
411
- col,
412
- joins,
413
- dialect: options.dialect
414
- });
415
- }).filter((it) => it != null);
416
- if (key === "and") return and(...operations);
417
- if (key === "or") return or(...operations);
418
- }
419
- if (key === "not") {
420
- const where = this.toSQL(operator, {
421
- schema: schema$1,
422
- col,
423
- joins,
424
- dialect: options.dialect
425
- });
426
- if (where) return not(where);
427
- }
428
- if (operator) {
429
- const column = col(key);
430
- const sql$2 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
431
- if (sql$2) conditions.push(sql$2);
432
- }
382
+ async saveDevMigrations(provider, curr, devMigrations) {
383
+ if (provider.url.includes(":memory:")) {
384
+ this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
385
+ return;
386
+ }
387
+ const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
388
+ if (provider.dialect === "sqlite") {
389
+ const filePath = `node_modules/.alepha/sqlite-${name}.json`;
390
+ await mkdir("node_modules/.alepha", { recursive: true }).catch(() => null);
391
+ await writeFile(filePath, JSON.stringify({
392
+ id: devMigrations?.id ?? 1,
393
+ name,
394
+ created_at: /* @__PURE__ */ new Date(),
395
+ snapshot: JSON.stringify(curr)
396
+ }, null, 2));
397
+ this.log.debug(`Saved migration snapshot to '${filePath}'`);
398
+ return;
399
+ }
400
+ if (!devMigrations) await provider.execute(sql$1`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
401
+ else {
402
+ const newSnapshot = JSON.stringify(curr);
403
+ if (devMigrations.snapshot !== newSnapshot) await provider.execute(sql$1`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
404
+ }
405
+ }
406
+ async executeStatements(statements, provider, catchErrors = false) {
407
+ let nErrors = 0;
408
+ for (const statement of statements) {
409
+ if (statement.startsWith("DROP SCHEMA")) continue;
410
+ try {
411
+ await provider.execute(sql$1.raw(statement));
412
+ } catch (error) {
413
+ const errorMessage = `Error executing statement: ${statement}`;
414
+ if (catchErrors) {
415
+ nErrors++;
416
+ this.log.warn(errorMessage, { context: [error] });
417
+ } else throw error;
433
418
  }
434
419
  }
435
- if (conditions.length === 1) return conditions[0];
436
- return and(...conditions);
420
+ if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
437
421
  }
438
- /**
439
- * Check if an object has any filter operator properties.
440
- */
441
- hasFilterOperatorProperties(obj) {
442
- if (!obj || typeof obj !== "object") return false;
443
- return [
444
- "eq",
445
- "ne",
446
- "gt",
447
- "gte",
448
- "lt",
449
- "lte",
450
- "inArray",
451
- "notInArray",
452
- "isNull",
453
- "isNotNull",
454
- "like",
455
- "notLike",
456
- "ilike",
457
- "notIlike",
458
- "contains",
459
- "startsWith",
460
- "endsWith",
461
- "between",
462
- "notBetween",
463
- "arrayContains",
464
- "arrayContained",
465
- "arrayOverlaps"
466
- ].some((key) => key in obj);
422
+ async createSchemaIfNotExists(provider, schemaName) {
423
+ if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
424
+ const sqlSchema = sql$1.raw(schemaName);
425
+ if (schemaName.startsWith("test_")) {
426
+ this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
427
+ await provider.execute(sql$1`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
428
+ }
429
+ this.log.debug(`Ensuring schema '${schemaName}' exists`);
430
+ await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
467
431
  }
468
432
  /**
469
- * Map a filter operator to a SQL query.
433
+ * Try to load the official Drizzle Kit API.
434
+ * If not available, fallback to the local kit import.
470
435
  */
471
- mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
472
- const encodeValue = (value) => {
473
- if (value == null) return value;
474
- if (columnSchema && columnName) try {
475
- const fieldSchema = columnSchema.properties[columnName];
476
- if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
477
- } catch (error) {}
478
- return value;
479
- };
480
- const encodeArray = (values) => {
481
- return values.map((v) => encodeValue(v));
482
- };
483
- if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return eq(column, encodeValue(operator));
484
- const conditions = [];
485
- if (operator?.eq != null) conditions.push(eq(column, encodeValue(operator.eq)));
486
- if (operator?.ne != null) conditions.push(ne(column, encodeValue(operator.ne)));
487
- if (operator?.gt != null) conditions.push(gt(column, encodeValue(operator.gt)));
488
- if (operator?.gte != null) conditions.push(gte(column, encodeValue(operator.gte)));
489
- if (operator?.lt != null) conditions.push(lt(column, encodeValue(operator.lt)));
490
- if (operator?.lte != null) conditions.push(lte(column, encodeValue(operator.lte)));
491
- if (operator?.inArray != null) {
492
- if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new AlephaError("inArray operator requires at least one value");
493
- conditions.push(inArray(column, encodeArray(operator.inArray)));
494
- }
495
- if (operator?.notInArray != null) {
496
- if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new AlephaError("notInArray operator requires at least one value");
497
- conditions.push(notInArray(column, encodeArray(operator.notInArray)));
498
- }
499
- if (operator?.isNull != null) conditions.push(isNull(column));
500
- if (operator?.isNotNull != null) conditions.push(isNotNull(column));
501
- if (operator?.like != null) conditions.push(like(column, encodeValue(operator.like)));
502
- if (operator?.notLike != null) conditions.push(notLike(column, encodeValue(operator.notLike)));
503
- if (operator?.ilike != null) conditions.push(ilike(column, encodeValue(operator.ilike)));
504
- if (operator?.notIlike != null) conditions.push(notIlike(column, encodeValue(operator.notIlike)));
505
- if (operator?.contains != null) {
506
- const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
507
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
508
- else conditions.push(ilike(column, encodeValue(`%${escapedValue}%`)));
509
- }
510
- if (operator?.startsWith != null) {
511
- const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
512
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
513
- else conditions.push(ilike(column, encodeValue(`${escapedValue}%`)));
514
- }
515
- if (operator?.endsWith != null) {
516
- const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
517
- if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
518
- else conditions.push(ilike(column, encodeValue(`%${escapedValue}`)));
519
- }
520
- if (operator?.between != null) {
521
- if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
522
- conditions.push(between(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
523
- }
524
- if (operator?.notBetween != null) {
525
- if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
526
- conditions.push(notBetween(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
436
+ importDrizzleKit() {
437
+ try {
438
+ return createRequire(import.meta.url)("drizzle-kit/api");
439
+ } catch (_) {
440
+ throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
527
441
  }
528
- if (operator?.arrayContains != null) conditions.push(arrayContains(column, encodeValue(operator.arrayContains)));
529
- if (operator?.arrayContained != null) conditions.push(arrayContained(column, encodeValue(operator.arrayContained)));
530
- if (operator?.arrayOverlaps != null) conditions.push(arrayOverlaps(column, encodeValue(operator.arrayOverlaps)));
531
- if (conditions.length === 0) return;
532
- if (conditions.length === 1) return conditions[0];
533
- return and(...conditions);
534
442
  }
443
+ };
444
+ const devMigrationsSchema = t.object({
445
+ id: t.number(),
446
+ name: t.text(),
447
+ snapshot: t.string(),
448
+ created_at: t.string()
449
+ });
450
+
451
+ //#endregion
452
+ //#region ../../src/orm/errors/DbMigrationError.ts
453
+ var DbMigrationError = class extends DbError {
454
+ name = "DbMigrationError";
455
+ constructor(cause) {
456
+ super("Failed to migrate database", cause);
457
+ }
458
+ };
459
+
460
+ //#endregion
461
+ //#region ../../src/orm/types/byte.ts
462
+ /**
463
+ * Postgres bytea type.
464
+ */
465
+ const byte = customType({ dataType: () => "bytea" });
466
+
467
+ //#endregion
468
+ //#region ../../src/orm/services/ModelBuilder.ts
469
+ /**
470
+ * Abstract base class for transforming Alepha Primitives (Entity, Sequence, etc...)
471
+ * into drizzle models (tables, enums, sequences, etc...).
472
+ */
473
+ var ModelBuilder = class {
535
474
  /**
536
- * Parse pagination sort string to orderBy format.
537
- * Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
538
- * - Columns separated by comma
539
- * - Prefix with '-' for DESC direction
540
- *
541
- * @param sort Pagination sort string
542
- * @returns OrderBy array or single object
475
+ * Convert camelCase to snake_case for column names.
543
476
  */
544
- parsePaginationSort(sort) {
545
- const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
546
- if (field.startsWith("-")) return {
547
- column: field.substring(1),
548
- direction: "desc"
549
- };
550
- return {
551
- column: field,
552
- direction: "asc"
553
- };
554
- });
555
- return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
477
+ toColumnName(str) {
478
+ return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
556
479
  }
557
480
  /**
558
- * Normalize orderBy parameter to array format.
559
- * Supports 3 modes:
560
- * 1. String: "name" -> [{ column: "name", direction: "asc" }]
561
- * 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
562
- * 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
481
+ * Build the table configuration function for any database.
482
+ * This includes indexes, foreign keys, constraints, and custom config.
563
483
  *
564
- * @param orderBy The orderBy parameter
565
- * @returns Normalized array of order by clauses
484
+ * @param entity - The entity primitive
485
+ * @param builders - Database-specific builder functions
486
+ * @param tableResolver - Function to resolve entity references to table columns
487
+ * @param customConfigHandler - Optional handler for custom config
566
488
  */
567
- normalizeOrderBy(orderBy) {
568
- if (typeof orderBy === "string") return [{
569
- column: orderBy,
570
- direction: "asc"
571
- }];
572
- if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
573
- column: orderBy.column,
574
- direction: orderBy.direction ?? "asc"
575
- }];
576
- if (Array.isArray(orderBy)) return orderBy.map((item) => ({
577
- column: item.column,
578
- direction: item.direction ?? "asc"
579
- }));
580
- return [];
581
- }
582
- /**
583
- * Create a pagination object.
584
- *
585
- * @deprecated Use `createPagination` from alepha instead.
586
- * This method now delegates to the framework-level helper.
587
- *
588
- * @param entities The entities to paginate.
589
- * @param limit The limit of the pagination.
590
- * @param offset The offset of the pagination.
591
- * @param sort Optional sort metadata to include in response.
592
- */
593
- createPagination(entities, limit = 10, offset = 0, sort) {
594
- return createPagination(entities, limit, offset, sort);
489
+ buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
490
+ if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
491
+ return (self) => {
492
+ const configs = [];
493
+ if (entity.options.indexes) {
494
+ for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
495
+ const columnName = this.toColumnName(indexDef);
496
+ const indexName = `${entity.name}_${columnName}_idx`;
497
+ if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
498
+ } else if (typeof indexDef === "object" && indexDef !== null) {
499
+ if ("column" in indexDef) {
500
+ const columnName = this.toColumnName(indexDef.column);
501
+ const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
502
+ if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
503
+ else configs.push(builders.index(indexName).on(self[indexDef.column]));
504
+ } else if ("columns" in indexDef) {
505
+ const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
506
+ const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
507
+ const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
508
+ if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
509
+ else configs.push(builders.index(indexName).on(...cols));
510
+ }
511
+ }
512
+ }
513
+ if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
514
+ const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
515
+ const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
516
+ if (cols.length === fkDef.columns.length) {
517
+ const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
518
+ const foreignColumns = fkDef.foreignColumns.map((colRef) => {
519
+ const entityCol = colRef();
520
+ if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
521
+ if (tableResolver) {
522
+ const foreignTable = tableResolver(entityCol.entity.name);
523
+ if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
524
+ return foreignTable[entityCol.name];
525
+ }
526
+ return entityCol;
527
+ });
528
+ configs.push(builders.foreignKey({
529
+ name: fkName,
530
+ columns: cols,
531
+ foreignColumns
532
+ }));
533
+ }
534
+ }
535
+ if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
536
+ const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
537
+ const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
538
+ if (cols.length === constraintDef.columns.length) {
539
+ if (constraintDef.unique) {
540
+ const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
541
+ configs.push(builders.unique(constraintName).on(...cols));
542
+ }
543
+ if (constraintDef.check) {
544
+ const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
545
+ configs.push(builders.check(constraintName, constraintDef.check));
546
+ }
547
+ }
548
+ }
549
+ if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
550
+ else if (entity.options.config) {
551
+ const customConfigs = entity.options.config(self);
552
+ if (Array.isArray(customConfigs)) configs.push(...customConfigs);
553
+ }
554
+ return configs;
555
+ };
595
556
  }
596
557
  };
597
558
 
598
559
  //#endregion
599
- //#region ../../src/orm/services/Repository.ts
600
- var Repository = class {
601
- entity;
602
- provider;
603
- relationManager = $inject(PgRelationManager);
604
- queryManager = $inject(QueryManager);
605
- dateTimeProvider = $inject(DateTimeProvider);
606
- alepha = $inject(Alepha);
607
- constructor(entity, provider = DatabaseProvider) {
608
- this.entity = entity;
609
- this.provider = this.alepha.inject(provider);
610
- this.provider.registerEntity(entity);
611
- }
612
- /**
613
- * Represents the primary key of the table.
614
- * - Key is the name of the primary key column.
615
- * - Type is the type (TypeBox) of the primary key column.
616
- *
617
- * ID is mandatory. If the table does not have a primary key, it will throw an error.
618
- */
619
- get id() {
620
- return this.getPrimaryKey(this.entity.schema);
560
+ //#region ../../src/orm/services/PostgresModelBuilder.ts
561
+ var PostgresModelBuilder = class extends ModelBuilder {
562
+ schemas = /* @__PURE__ */ new Map();
563
+ getPgSchema(name) {
564
+ if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
565
+ const nsp = name !== "public" ? this.schemas.get(name) : {
566
+ enum: pgEnum,
567
+ table: pgTable
568
+ };
569
+ if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
570
+ return nsp;
621
571
  }
622
- /**
623
- * Get Drizzle table object.
624
- */
625
- get table() {
626
- return this.provider.table(this.entity);
572
+ buildTable(entity, options) {
573
+ const tableName = entity.name;
574
+ if (options.tables.has(tableName)) return;
575
+ const nsp = this.getPgSchema(options.schema);
576
+ const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
577
+ const configFn = this.getTableConfig(entity, options.tables);
578
+ const table = nsp.table(tableName, columns, configFn);
579
+ options.tables.set(tableName, table);
627
580
  }
628
- /**
629
- * Get SQL table name. (from Drizzle table object)
630
- */
631
- get tableName() {
632
- return this.entity.name;
581
+ buildSequence(sequence, options) {
582
+ const sequenceName = sequence.name;
583
+ if (options.sequences.has(sequenceName)) return;
584
+ const nsp = this.getPgSchema(options.schema);
585
+ options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
633
586
  }
634
587
  /**
635
- * Getter for the database connection from the database provider.
588
+ * Get PostgreSQL-specific config builder for the table.
636
589
  */
637
- get db() {
638
- return this.provider.db;
590
+ getTableConfig(entity, tables) {
591
+ const pgBuilders = {
592
+ index,
593
+ uniqueIndex,
594
+ unique,
595
+ check,
596
+ foreignKey
597
+ };
598
+ const tableResolver = (entityName) => {
599
+ return tables.get(entityName);
600
+ };
601
+ return this.buildTableConfig(entity, pgBuilders, tableResolver);
639
602
  }
603
+ schemaToPgColumns = (tableName, schema, nsp, enums, tables) => {
604
+ return Object.entries(schema.properties).reduce((columns, [key, value]) => {
605
+ let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
606
+ if ("default" in value && value.default != null) col = col.default(value.default);
607
+ if (PG_PRIMARY_KEY in value) col = col.primaryKey();
608
+ if (PG_REF in value) {
609
+ const config = value[PG_REF];
610
+ col = col.references(() => {
611
+ const ref = config.ref();
612
+ const table = tables.get(ref.entity.name);
613
+ if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
614
+ const target = table[ref.name];
615
+ if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
616
+ return target;
617
+ }, config.actions);
618
+ }
619
+ if (schema.required?.includes(key)) col = col.notNull();
620
+ return {
621
+ ...columns,
622
+ [key]: col
623
+ };
624
+ }, {});
625
+ };
626
+ mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
627
+ const key = this.toColumnName(fieldName);
628
+ if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
629
+ if (t.schema.isInteger(value)) {
630
+ if (PG_SERIAL in value) return pg$2.serial(key);
631
+ if (PG_IDENTITY in value) {
632
+ const options = value[PG_IDENTITY];
633
+ if (options.mode === "byDefault") return pg$2.integer().generatedByDefaultAsIdentity(options);
634
+ return pg$2.integer().generatedAlwaysAsIdentity(options);
635
+ }
636
+ return pg$2.integer(key);
637
+ }
638
+ if (t.schema.isBigInt(value)) {
639
+ if (PG_IDENTITY in value) {
640
+ const options = value[PG_IDENTITY];
641
+ if (options.mode === "byDefault") return pg$2.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
642
+ return pg$2.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
643
+ }
644
+ }
645
+ if (t.schema.isNumber(value)) {
646
+ if (PG_IDENTITY in value) {
647
+ const options = value[PG_IDENTITY];
648
+ if (options.mode === "byDefault") return pg$2.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
649
+ return pg$2.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
650
+ }
651
+ if (value.format === "int64") return pg$2.bigint(key, { mode: "number" });
652
+ return pg$2.numeric(key);
653
+ }
654
+ if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
655
+ if (t.schema.isBoolean(value)) return pg$2.boolean(key);
656
+ if (t.schema.isObject(value)) return schema(key, value);
657
+ if (t.schema.isRecord(value)) return schema(key, value);
658
+ const isTypeEnum = (value) => t.schema.isUnsafe(value) && "type" in value && value.type === "string" && "enum" in value && Array.isArray(value.enum);
659
+ if (t.schema.isArray(value)) {
660
+ if (t.schema.isObject(value.items)) return schema(key, value);
661
+ if (t.schema.isRecord(value.items)) return schema(key, value);
662
+ if (t.schema.isString(value.items)) return pg$2.text(key).array();
663
+ if (t.schema.isInteger(value.items)) return pg$2.integer(key).array();
664
+ if (t.schema.isNumber(value.items)) return pg$2.numeric(key).array();
665
+ if (t.schema.isBoolean(value.items)) return pg$2.boolean(key).array();
666
+ if (isTypeEnum(value.items)) return pg$2.text(key).array();
667
+ }
668
+ if (isTypeEnum(value)) {
669
+ if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
670
+ if (PG_ENUM in value && value[PG_ENUM]) {
671
+ const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
672
+ if (enums.has(enumName)) {
673
+ const values = enums.get(enumName).enumValues.join(",");
674
+ const newValues = value.enum.join(",");
675
+ if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
676
+ }
677
+ enums.set(enumName, nsp.enum(enumName, value.enum));
678
+ return enums.get(enumName)(key);
679
+ }
680
+ return this.mapStringToColumn(key, value);
681
+ }
682
+ throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
683
+ };
640
684
  /**
641
- * Execute a SQL query.
642
- *
643
- * This method allows executing raw SQL queries against the database.
644
- * This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
645
- *
646
- * You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
685
+ * Map a string to a PG column.
647
686
  *
648
- * @example
649
- * ```ts
650
- * class App {
651
- * repository = $repository({ ... });
652
- * async getAdults() {
653
- * const users = repository.table; // Drizzle table object
654
- * await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
655
- * // or better
656
- * await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
657
- * }
658
- * }
659
- * ```
660
- */
661
- async query(query, schema$1) {
662
- const raw = typeof query === "function" ? query(this.table, this.db) : query;
663
- if (typeof raw === "string" && raw.includes("[object Object]")) throw new AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
664
- return (await this.provider.execute(raw)).map((it) => {
665
- return this.clean(this.mapRawFieldsToEntity(it), schema$1 ?? this.entity.schema);
666
- });
667
- }
668
- /**
669
- * Map raw database fields to entity fields. (handles column name differences)
687
+ * @param key The key of the field.
688
+ * @param value The value of the field.
670
689
  */
671
- mapRawFieldsToEntity(row) {
672
- const entity = {};
673
- for (const key of Object.keys(row)) {
674
- entity[key] = row[key];
675
- for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
676
- entity[colKey] = row[key];
677
- break;
690
+ mapStringToColumn = (key, value) => {
691
+ if ("format" in value) {
692
+ if (value.format === "uuid") {
693
+ if (PG_PRIMARY_KEY in value) return pg$2.uuid(key).defaultRandom();
694
+ return pg$2.uuid(key);
695
+ }
696
+ if (value.format === "byte") return byte(key);
697
+ if (value.format === "date-time") {
698
+ if (PG_CREATED_AT in value) return pg$2.timestamp(key, {
699
+ mode: "string",
700
+ withTimezone: true
701
+ }).defaultNow();
702
+ if (PG_UPDATED_AT in value) return pg$2.timestamp(key, {
703
+ mode: "string",
704
+ withTimezone: true
705
+ }).defaultNow();
706
+ return pg$2.timestamp(key, {
707
+ mode: "string",
708
+ withTimezone: true
709
+ });
678
710
  }
711
+ if (value.format === "date") return pg$2.date(key, { mode: "string" });
679
712
  }
680
- return entity;
681
- }
682
- /**
683
- * Get a Drizzle column from the table by his name.
684
- */
685
- col(name) {
686
- const column = this.table[name];
687
- if (!column) throw new AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
688
- return column;
689
- }
690
- /**
691
- * Run a transaction.
692
- */
693
- async transaction(transaction, config) {
694
- return await this.db.transaction(transaction, config);
713
+ return pg$2.text(key);
714
+ };
715
+ };
716
+
717
+ //#endregion
718
+ //#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
719
+ const envSchema$4 = t.object({
720
+ DATABASE_URL: t.optional(t.text()),
721
+ POSTGRES_SCHEMA: t.optional(t.text())
722
+ });
723
+ /**
724
+ * Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
725
+ *
726
+ * This provider uses Bun's built-in SQL class for PostgreSQL connections,
727
+ * which provides excellent performance on the Bun runtime.
728
+ *
729
+ * @example
730
+ * ```ts
731
+ * // Set DATABASE_URL environment variable
732
+ * // DATABASE_URL=postgres://user:password@localhost:5432/database
733
+ *
734
+ * // Or configure programmatically
735
+ * alepha.with({
736
+ * provide: DatabaseProvider,
737
+ * use: BunPostgresProvider,
738
+ * });
739
+ * ```
740
+ */
741
+ var BunPostgresProvider = class extends DatabaseProvider {
742
+ log = $logger();
743
+ env = $env(envSchema$4);
744
+ kit = $inject(DrizzleKitProvider);
745
+ builder = $inject(PostgresModelBuilder);
746
+ client;
747
+ bunDb;
748
+ dialect = "postgresql";
749
+ get name() {
750
+ return "postgres";
695
751
  }
696
752
  /**
697
- * Start a SELECT query on the table.
753
+ * In testing mode, the schema name will be generated and deleted after the test.
698
754
  */
699
- rawSelect(opts = {}) {
700
- return (opts.tx ?? this.db).select().from(this.table);
755
+ schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
756
+ get url() {
757
+ if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
758
+ return this.env.DATABASE_URL;
701
759
  }
702
760
  /**
703
- * Start a SELECT DISTINCT query on the table.
761
+ * Execute a SQL statement.
704
762
  */
705
- rawSelectDistinct(opts = {}, columns = []) {
706
- const db$1 = opts.tx ?? this.db;
707
- const table = this.table;
708
- const fields = {};
709
- for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
710
- return db$1.selectDistinct(fields).from(table);
763
+ execute(statement) {
764
+ try {
765
+ return this.db.execute(statement);
766
+ } catch (error) {
767
+ throw new DbError("Error executing statement", error);
768
+ }
711
769
  }
712
770
  /**
713
- * Start an INSERT query on the table.
771
+ * Get Postgres schema used by this provider.
714
772
  */
715
- rawInsert(opts = {}) {
716
- return (opts.tx ?? this.db).insert(this.table);
773
+ get schema() {
774
+ if (this.schemaForTesting) return this.schemaForTesting;
775
+ if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
776
+ return "public";
717
777
  }
718
778
  /**
719
- * Start an UPDATE query on the table.
779
+ * Get the Drizzle Postgres database instance.
720
780
  */
721
- rawUpdate(opts = {}) {
722
- return (opts.tx ?? this.db).update(this.table);
781
+ get db() {
782
+ if (!this.bunDb) throw new AlephaError("Database not initialized");
783
+ return this.bunDb;
723
784
  }
724
- /**
725
- * Start a DELETE query on the table.
726
- */
727
- rawDelete(opts = {}) {
728
- return (opts.tx ?? this.db).delete(this.table);
785
+ async executeMigrations(migrationsFolder) {
786
+ const { migrate } = await import("drizzle-orm/bun-sql/migrator");
787
+ await migrate(this.bunDb, { migrationsFolder });
729
788
  }
730
- /**
731
- * Create a Drizzle `select` query based on a JSON query object.
732
- *
733
- * > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
734
- */
735
- async findMany(query = {}, opts = {}) {
736
- await this.alepha.events.emit("repository:read:before", {
737
- tableName: this.tableName,
738
- query
739
- });
740
- const columns = query.columns ?? query.distinct;
741
- const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
742
- const joins = [];
743
- if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
744
- const where = this.withDeletedAt(query.where ?? {}, opts);
745
- builder.where(() => this.toSQL(where, joins));
746
- if (query.offset) {
747
- builder.offset(query.offset);
748
- if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
749
- }
750
- if (query.limit) builder.limit(query.limit);
751
- if (query.orderBy) {
752
- const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
753
- builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? desc(this.col(clause.column)) : asc(this.col(clause.column))));
754
- }
755
- if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
756
- if (opts.for) {
757
- if (typeof opts.for === "string") builder.for(opts.for);
758
- else if (opts.for) builder.for(opts.for.strength, opts.for.config);
789
+ onStart = $hook({
790
+ on: "start",
791
+ handler: async () => {
792
+ await this.connect();
793
+ if (!this.alepha.isServerless()) try {
794
+ await this.migrateLock.run();
795
+ } catch (error) {
796
+ throw new DbMigrationError(error);
797
+ }
759
798
  }
760
- try {
761
- let rows = await builder.execute();
762
- let schema$1 = this.entity.schema;
763
- if (columns) schema$1 = t.pick(schema$1, columns);
764
- if (joins.length) rows = rows.map((row) => {
765
- const rowSchema = {
766
- ...schema$1,
767
- properties: { ...schema$1.properties }
768
- };
769
- return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
770
- });
771
- rows = rows.map((row) => {
772
- if (joins.length) {
773
- const joinedSchema = this.relationManager.buildSchemaWithJoins(schema$1, joins);
774
- return this.cleanWithJoins(row, joinedSchema, joins);
775
- }
776
- return this.clean(row, schema$1);
777
- });
778
- await this.alepha.events.emit("repository:read:after", {
779
- tableName: this.tableName,
780
- query,
781
- entities: rows
782
- });
783
- return rows;
784
- } catch (error) {
785
- throw new DbError("Query select has failed", error);
799
+ });
800
+ onStop = $hook({
801
+ on: "stop",
802
+ handler: async () => {
803
+ if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
804
+ if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
805
+ this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
806
+ await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
807
+ this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
808
+ }
809
+ await this.close();
786
810
  }
811
+ });
812
+ async connect() {
813
+ this.log.debug("Connect ..");
814
+ if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
815
+ const { drizzle } = await import("drizzle-orm/bun-sql");
816
+ this.client = new Bun.SQL(this.url);
817
+ await this.client.unsafe("SELECT 1");
818
+ this.bunDb = drizzle({
819
+ client: this.client,
820
+ logger: { logQuery: (query, params) => {
821
+ this.log.trace(query, { params });
822
+ } }
823
+ });
824
+ this.log.info("Connection OK");
787
825
  }
788
- /**
789
- * Find a single entity.
790
- */
791
- async findOne(query, opts = {}) {
792
- const [entity] = await this.findMany({
793
- limit: 1,
794
- ...query
795
- }, opts);
796
- if (!entity) throw new DbEntityNotFoundError(this.tableName);
797
- return entity;
798
- }
799
- /**
800
- * Find entities with pagination.
801
- *
802
- * It uses the same parameters as `find()`, but adds pagination metadata to the response.
803
- *
804
- * > Pagination CAN also do a count query to get the total number of elements.
805
- */
806
- async paginate(pagination = {}, query = {}, opts = {}) {
807
- const limit = query.limit ?? pagination.size ?? 10;
808
- const page = pagination.page ?? 0;
809
- const offset = query.offset ?? page * limit;
810
- let orderBy = query.orderBy;
811
- if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
812
- const now = Date.now();
813
- const timers = {
814
- query: now,
815
- count: now
816
- };
817
- const tasks = [];
818
- tasks.push(this.findMany({
819
- offset,
820
- limit: limit + 1,
821
- orderBy,
822
- ...query
823
- }, opts).then((it) => {
824
- timers.query = Date.now() - timers.query;
825
- return it;
826
- }));
827
- if (opts.count) {
828
- const where = isSQLWrapper(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
829
- tasks.push(this.db.$count(this.table, where).then((it) => {
830
- timers.count = Date.now() - timers.count;
831
- return it;
832
- }));
826
+ async close() {
827
+ if (this.client) {
828
+ this.log.debug("Close...");
829
+ await this.client.close();
830
+ this.client = void 0;
831
+ this.bunDb = void 0;
832
+ this.log.info("Connection closed");
833
833
  }
834
- const [entities, countResult] = await Promise.all(tasks);
835
- let sortMetadata;
836
- if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
837
- const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
838
- response.page.totalElements = countResult;
839
- if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
840
- return response;
841
834
  }
842
- /**
843
- * Find an entity by ID.
844
- *
845
- * This is a convenience method for `findOne` with a where clause on the primary key.
846
- * If you need more complex queries, use `findOne` instead.
847
- */
848
- async findById(id, opts = {}) {
849
- return await this.findOne({ where: this.getWhereId(id) }, opts);
835
+ migrateLock = $lock({ handler: async () => {
836
+ await this.migrate();
837
+ } });
838
+ };
839
+
840
+ //#endregion
841
+ //#region ../../src/orm/services/SqliteModelBuilder.ts
842
+ var SqliteModelBuilder = class extends ModelBuilder {
843
+ buildTable(entity, options) {
844
+ const tableName = entity.name;
845
+ if (options.tables.has(tableName)) return;
846
+ const table = sqliteTable(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
847
+ options.tables.set(tableName, table);
850
848
  }
851
- /**
852
- * Helper to create a type-safe query object.
853
- */
854
- createQuery() {
855
- return {};
849
+ buildSequence(sequence, options) {
850
+ throw new AlephaError("SQLite does not support sequences");
856
851
  }
857
852
  /**
858
- * Helper to create a type-safe where clause.
853
+ * Get SQLite-specific config builder for the table.
859
854
  */
860
- createQueryWhere() {
861
- return {};
855
+ getTableConfig(entity, tables) {
856
+ const sqliteBuilders = {
857
+ index: index$1,
858
+ uniqueIndex: uniqueIndex$1,
859
+ unique: unique$1,
860
+ check: check$1,
861
+ foreignKey: foreignKey$1
862
+ };
863
+ const tableResolver = (entityName) => {
864
+ return tables.get(entityName);
865
+ };
866
+ return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
867
+ const customConfigs = config(self);
868
+ return Array.isArray(customConfigs) ? customConfigs : [];
869
+ });
870
+ }
871
+ schemaToSqliteColumns = (tableName, schema, enums, tables) => {
872
+ return Object.entries(schema.properties).reduce((columns, [key, value]) => {
873
+ let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
874
+ if ("default" in value && value.default != null) col = col.default(value.default);
875
+ if (PG_PRIMARY_KEY in value) col = col.primaryKey();
876
+ if (PG_REF in value) {
877
+ const config = value[PG_REF];
878
+ col = col.references(() => {
879
+ const ref = config.ref();
880
+ const table = tables.get(ref.entity.name);
881
+ if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
882
+ const target = table[ref.name];
883
+ if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
884
+ return target;
885
+ }, config.actions);
886
+ }
887
+ if (schema.required?.includes(key)) col = col.notNull();
888
+ return {
889
+ ...columns,
890
+ [key]: col
891
+ };
892
+ }, {});
893
+ };
894
+ mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
895
+ const key = this.toColumnName(fieldName);
896
+ if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
897
+ if (t.schema.isInteger(value)) {
898
+ if (PG_SERIAL in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
899
+ return pg$1.integer(key);
900
+ }
901
+ if (t.schema.isBigInt(value)) {
902
+ if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
903
+ return pg$1.integer(key, { mode: "number" });
904
+ }
905
+ if (t.schema.isNumber(value)) {
906
+ if (PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
907
+ return pg$1.numeric(key);
908
+ }
909
+ if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
910
+ if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
911
+ if (t.schema.isObject(value)) return this.sqliteJson(key, value);
912
+ if (t.schema.isRecord(value)) return this.sqliteJson(key, value);
913
+ if (t.schema.isAny(value)) return this.sqliteJson(key, value);
914
+ if (t.schema.isArray(value)) {
915
+ if (t.schema.isObject(value.items)) return this.sqliteJson(key, value);
916
+ if (t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
917
+ if (t.schema.isAny(value.items)) return this.sqliteJson(key, value);
918
+ if (t.schema.isString(value.items)) return this.sqliteJson(key, value);
919
+ if (t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
920
+ if (t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
921
+ if (t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
922
+ }
923
+ if (t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
924
+ throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
925
+ };
926
+ mapStringToSqliteColumn = (key, value) => {
927
+ if (value.format === "uuid") {
928
+ if (PG_PRIMARY_KEY in value) return pg$1.text(key).primaryKey().$defaultFn(() => randomUUID());
929
+ return pg$1.text(key);
930
+ }
931
+ if (value.format === "byte") return this.sqliteJson(key, value);
932
+ if (value.format === "date-time") {
933
+ if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
934
+ if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
935
+ return this.sqliteDateTime(key, {});
936
+ }
937
+ if (value.format === "date") return this.sqliteDate(key, {});
938
+ return pg$1.text(key);
939
+ };
940
+ sqliteJson = (name, document) => pg$1.customType({
941
+ dataType: () => "text",
942
+ toDriver: (value) => JSON.stringify(value),
943
+ fromDriver: (value) => {
944
+ return value && typeof value === "string" ? JSON.parse(value) : value;
945
+ }
946
+ })(name, { document }).$type();
947
+ sqliteDateTime = pg$1.customType({
948
+ dataType: () => "integer",
949
+ toDriver: (value) => new Date(value).getTime(),
950
+ fromDriver: (value) => {
951
+ return new Date(value).toISOString();
952
+ }
953
+ });
954
+ sqliteBool = pg$1.customType({
955
+ dataType: () => "integer",
956
+ toDriver: (value) => value ? 1 : 0,
957
+ fromDriver: (value) => value === 1
958
+ });
959
+ sqliteDate = pg$1.customType({
960
+ dataType: () => "integer",
961
+ toDriver: (value) => new Date(value).getTime(),
962
+ fromDriver: (value) => {
963
+ return new Date(value).toISOString().split("T")[0];
964
+ }
965
+ });
966
+ };
967
+
968
+ //#endregion
969
+ //#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
970
+ const envSchema$3 = t.object({ DATABASE_URL: t.optional(t.text()) });
971
+ /**
972
+ * Configuration options for the Bun SQLite database provider.
973
+ */
974
+ const bunSqliteOptions = $atom({
975
+ name: "alepha.postgres.bun-sqlite.options",
976
+ schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
977
+ default: {}
978
+ });
979
+ /**
980
+ * Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
981
+ *
982
+ * This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
983
+ * which provides excellent performance on the Bun runtime.
984
+ *
985
+ * @example
986
+ * ```ts
987
+ * // Set DATABASE_URL environment variable
988
+ * // DATABASE_URL=sqlite://./my-database.db
989
+ *
990
+ * // Or configure programmatically
991
+ * alepha.with({
992
+ * provide: DatabaseProvider,
993
+ * use: BunSqliteProvider,
994
+ * });
995
+ *
996
+ * // Or use options atom
997
+ * alepha.store.mut(bunSqliteOptions, (old) => ({
998
+ * ...old,
999
+ * path: ":memory:",
1000
+ * }));
1001
+ * ```
1002
+ */
1003
+ var BunSqliteProvider = class extends DatabaseProvider {
1004
+ kit = $inject(DrizzleKitProvider);
1005
+ log = $logger();
1006
+ env = $env(envSchema$3);
1007
+ builder = $inject(SqliteModelBuilder);
1008
+ options = $use(bunSqliteOptions);
1009
+ sqlite;
1010
+ bunDb;
1011
+ get name() {
1012
+ return "sqlite";
1013
+ }
1014
+ dialect = "sqlite";
1015
+ get url() {
1016
+ const path = this.options.path ?? this.env.DATABASE_URL;
1017
+ if (path) {
1018
+ if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
1019
+ return path;
1020
+ }
1021
+ if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
1022
+ else return "node_modules/.alepha/bun-sqlite.db";
1023
+ }
1024
+ get db() {
1025
+ if (!this.bunDb) throw new AlephaError("Database not initialized");
1026
+ return this.bunDb;
1027
+ }
1028
+ async execute(query) {
1029
+ return this.bunDb.all(query);
1030
+ }
1031
+ onStart = $hook({
1032
+ on: "start",
1033
+ handler: async () => {
1034
+ if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
1035
+ const { Database } = await import("bun:sqlite");
1036
+ const { drizzle } = await import("drizzle-orm/bun-sqlite");
1037
+ const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
1038
+ if (filepath !== ":memory:" && filepath !== "") {
1039
+ const dirname = filepath.split("/").slice(0, -1).join("/");
1040
+ if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
1041
+ }
1042
+ this.sqlite = new Database(filepath);
1043
+ this.bunDb = drizzle({
1044
+ client: this.sqlite,
1045
+ logger: { logQuery: (query, params) => {
1046
+ this.log.trace(query, { params });
1047
+ } }
1048
+ });
1049
+ await this.migrate();
1050
+ this.log.info(`Using Bun SQLite database at ${filepath}`);
1051
+ }
1052
+ });
1053
+ onStop = $hook({
1054
+ on: "stop",
1055
+ handler: async () => {
1056
+ if (this.sqlite) {
1057
+ this.log.debug("Closing Bun SQLite connection...");
1058
+ this.sqlite.close();
1059
+ this.sqlite = void 0;
1060
+ this.bunDb = void 0;
1061
+ this.log.info("Bun SQLite connection closed");
1062
+ }
1063
+ }
1064
+ });
1065
+ async executeMigrations(migrationsFolder) {
1066
+ const { migrate } = await import("drizzle-orm/bun-sqlite/migrator");
1067
+ await migrate(this.bunDb, { migrationsFolder });
1068
+ }
1069
+ };
1070
+
1071
+ //#endregion
1072
+ //#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
1073
+ /**
1074
+ * Cloudflare D1 SQLite provider using Drizzle ORM.
1075
+ *
1076
+ * This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
1077
+ * The binding is typically obtained from the Cloudflare Workers environment.
1078
+ *
1079
+ * @example
1080
+ * ```ts
1081
+ * // In your Cloudflare Worker
1082
+ * alepha.set(cloudflareD1Options, { binding: env.DB });
1083
+ * ```
1084
+ */
1085
+ var CloudflareD1Provider = class extends DatabaseProvider {
1086
+ kit = $inject(DrizzleKitProvider);
1087
+ log = $logger();
1088
+ builder = $inject(SqliteModelBuilder);
1089
+ env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
1090
+ d1;
1091
+ drizzleDb;
1092
+ get name() {
1093
+ return "sqlite";
1094
+ }
1095
+ get driver() {
1096
+ return "d1";
1097
+ }
1098
+ dialect = "sqlite";
1099
+ get url() {
1100
+ return this.env.DATABASE_URL;
1101
+ }
1102
+ get db() {
1103
+ if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
1104
+ return this.drizzleDb;
1105
+ }
1106
+ async execute(query) {
1107
+ const { rows } = await this.db.run(query);
1108
+ return rows;
1109
+ }
1110
+ onStart = $hook({
1111
+ on: "start",
1112
+ handler: async () => {
1113
+ const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
1114
+ const cloudflareEnv = this.alepha.store.get("cloudflare.env");
1115
+ if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
1116
+ const binding = cloudflareEnv[bindingName];
1117
+ if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
1118
+ this.d1 = binding;
1119
+ const { drizzle } = await import("drizzle-orm/d1");
1120
+ this.drizzleDb = drizzle(this.d1);
1121
+ await this.migrate();
1122
+ this.log.info("Using Cloudflare D1 database");
1123
+ }
1124
+ });
1125
+ async executeMigrations(migrationsFolder) {
1126
+ const { migrate } = await import("drizzle-orm/d1/migrator");
1127
+ await migrate(this.db, { migrationsFolder });
862
1128
  }
863
1129
  /**
864
- * Create an entity.
865
- *
866
- * @param data The entity to create.
867
- * @param opts The options for creating the entity.
868
- * @returns The ID of the created entity.
1130
+ * Override development migration to skip sync (not supported on D1).
1131
+ * D1 requires proper migrations to be applied.
869
1132
  */
870
- async create(data, opts = {}) {
871
- await this.alepha.events.emit("repository:create:before", {
872
- tableName: this.tableName,
873
- data
874
- });
1133
+ async runDevelopmentMigration(migrationsFolder) {
1134
+ await this.executeMigrations(migrationsFolder);
1135
+ }
1136
+ /**
1137
+ * Override test migration to run migrations instead of sync.
1138
+ * D1 doesn't support schema synchronization.
1139
+ */
1140
+ async runTestMigration() {
1141
+ const migrationsFolder = this.getMigrationsFolder();
875
1142
  try {
876
- const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
877
- await this.alepha.events.emit("repository:create:after", {
878
- tableName: this.tableName,
879
- data,
880
- entity
881
- });
882
- return entity;
883
- } catch (error) {
884
- throw this.handleError(error, "Insert query has failed");
1143
+ await this.executeMigrations(migrationsFolder);
1144
+ } catch {
1145
+ this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
885
1146
  }
886
1147
  }
1148
+ };
1149
+
1150
+ //#endregion
1151
+ //#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
1152
+ const envSchema$2 = t.object({
1153
+ DATABASE_URL: t.optional(t.text()),
1154
+ POSTGRES_SCHEMA: t.optional(t.text())
1155
+ });
1156
+ var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
1157
+ static SSL_MODES = [
1158
+ "require",
1159
+ "allow",
1160
+ "prefer",
1161
+ "verify-full"
1162
+ ];
1163
+ log = $logger();
1164
+ env = $env(envSchema$2);
1165
+ kit = $inject(DrizzleKitProvider);
1166
+ builder = $inject(PostgresModelBuilder);
1167
+ client;
1168
+ pg;
1169
+ dialect = "postgresql";
1170
+ get name() {
1171
+ return "postgres";
1172
+ }
887
1173
  /**
888
- * Create many entities.
889
- *
890
- * Inserts are batched in chunks of 1000 to avoid hitting database limits.
891
- *
892
- * @param values The entities to create.
893
- * @param opts The statement options.
894
- * @returns The created entities.
1174
+ * In testing mode, the schema name will be generated and deleted after the test.
895
1175
  */
896
- async createMany(values, opts = {}) {
897
- if (values.length === 0) return [];
898
- await this.alepha.events.emit("repository:create:before", {
899
- tableName: this.tableName,
900
- data: values
901
- });
902
- const batchSize = opts.batchSize ?? 1e3;
903
- const allEntities = [];
1176
+ schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
1177
+ get url() {
1178
+ if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
1179
+ return this.env.DATABASE_URL;
1180
+ }
1181
+ /**
1182
+ * Execute a SQL statement.
1183
+ */
1184
+ execute(statement) {
904
1185
  try {
905
- for (let i = 0; i < values.length; i += batchSize) {
906
- const batch = values.slice(i, i + batchSize);
907
- const entities = await this.rawInsert(opts).values(batch.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
908
- allEntities.push(...entities);
1186
+ return this.db.execute(statement);
1187
+ } catch (error) {
1188
+ throw new DbError("Error executing statement", error);
1189
+ }
1190
+ }
1191
+ /**
1192
+ * Get Postgres schema used by this provider.
1193
+ */
1194
+ get schema() {
1195
+ if (this.schemaForTesting) return this.schemaForTesting;
1196
+ if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
1197
+ return "public";
1198
+ }
1199
+ /**
1200
+ * Get the Drizzle Postgres database instance.
1201
+ */
1202
+ get db() {
1203
+ if (!this.pg) throw new AlephaError("Database not initialized");
1204
+ return this.pg;
1205
+ }
1206
+ async executeMigrations(migrationsFolder) {
1207
+ await migrate(this.db, { migrationsFolder });
1208
+ }
1209
+ onStart = $hook({
1210
+ on: "start",
1211
+ handler: async () => {
1212
+ await this.connect();
1213
+ if (!this.alepha.isServerless()) try {
1214
+ await this.migrateLock.run();
1215
+ } catch (error) {
1216
+ throw new DbMigrationError(error);
1217
+ }
1218
+ }
1219
+ });
1220
+ onStop = $hook({
1221
+ on: "stop",
1222
+ handler: async () => {
1223
+ if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
1224
+ if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
1225
+ this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
1226
+ await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
1227
+ this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
1228
+ }
1229
+ await this.close();
1230
+ }
1231
+ });
1232
+ async connect() {
1233
+ this.log.debug("Connect ..");
1234
+ const client = postgres(this.getClientOptions());
1235
+ await client`SELECT 1`;
1236
+ this.client = client;
1237
+ this.pg = drizzle$1(client, { logger: { logQuery: (query, params) => {
1238
+ this.log.trace(query, { params });
1239
+ } } });
1240
+ this.log.info("Connection OK");
1241
+ }
1242
+ async close() {
1243
+ if (this.client) {
1244
+ this.log.debug("Close...");
1245
+ await this.client.end();
1246
+ this.client = void 0;
1247
+ this.pg = void 0;
1248
+ this.log.info("Connection closed");
1249
+ }
1250
+ }
1251
+ migrateLock = $lock({ handler: async () => {
1252
+ await this.migrate();
1253
+ } });
1254
+ /**
1255
+ * Map the DATABASE_URL to postgres client options.
1256
+ */
1257
+ getClientOptions() {
1258
+ const url = new URL(this.url);
1259
+ return {
1260
+ host: url.hostname,
1261
+ user: decodeURIComponent(url.username),
1262
+ database: decodeURIComponent(url.pathname.replace("/", "")),
1263
+ password: decodeURIComponent(url.password),
1264
+ port: Number(url.port || 5432),
1265
+ ssl: this.ssl(url),
1266
+ onnotice: () => {}
1267
+ };
1268
+ }
1269
+ ssl(url) {
1270
+ const mode = url.searchParams.get("sslmode");
1271
+ for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
1272
+ }
1273
+ };
1274
+
1275
+ //#endregion
1276
+ //#region ../../src/orm/providers/drivers/NodeSqliteProvider.ts
1277
+ const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
1278
+ /**
1279
+ * Configuration options for the Node.js SQLite database provider.
1280
+ */
1281
+ const nodeSqliteOptions = $atom({
1282
+ name: "alepha.postgres.node-sqlite.options",
1283
+ schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
1284
+ default: {}
1285
+ });
1286
+ /**
1287
+ * Add a fake support for SQLite in Node.js based on Postgres interfaces.
1288
+ *
1289
+ * This is NOT a real SQLite provider, it's a workaround to use SQLite with Drizzle ORM.
1290
+ * This is NOT recommended for production use.
1291
+ */
1292
+ var NodeSqliteProvider = class extends DatabaseProvider {
1293
+ kit = $inject(DrizzleKitProvider);
1294
+ log = $logger();
1295
+ env = $env(envSchema$1);
1296
+ builder = $inject(SqliteModelBuilder);
1297
+ options = $use(nodeSqliteOptions);
1298
+ sqlite;
1299
+ get name() {
1300
+ return "sqlite";
1301
+ }
1302
+ dialect = "sqlite";
1303
+ get url() {
1304
+ const path = this.options.path ?? this.env.DATABASE_URL;
1305
+ if (path) {
1306
+ if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
1307
+ return path;
1308
+ }
1309
+ if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
1310
+ else return "node_modules/.alepha/sqlite.db";
1311
+ }
1312
+ async execute(query) {
1313
+ const { sql, params, method } = this.db.all(query).getQuery();
1314
+ this.log.trace(`${sql}`, params);
1315
+ const statement = this.sqlite.prepare(sql);
1316
+ if (method === "run") {
1317
+ statement.run(...params);
1318
+ return [];
1319
+ }
1320
+ if (method === "get") {
1321
+ const data = statement.get(...params);
1322
+ return data ? [{ ...data }] : [];
1323
+ }
1324
+ return statement.all(...params);
1325
+ }
1326
+ db = drizzle$2(async (sql, params, method) => {
1327
+ const statement = this.sqlite.prepare(sql);
1328
+ this.log.trace(`${sql}`, { params });
1329
+ if (method === "get") {
1330
+ const data = statement.get(...params);
1331
+ return { rows: data ? [{ ...data }] : [] };
1332
+ }
1333
+ if (method === "run") {
1334
+ statement.run(...params);
1335
+ return { rows: [] };
1336
+ }
1337
+ if (method === "all") return { rows: statement.all(...params).map((row) => Object.values(row)) };
1338
+ if (method === "values") return { rows: statement.all(...params).map((row) => Object.values(row)) };
1339
+ throw new AlephaError(`Unsupported method: ${method}`);
1340
+ });
1341
+ onStart = $hook({
1342
+ on: "start",
1343
+ handler: async () => {
1344
+ const { DatabaseSync } = await import("node:sqlite");
1345
+ const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
1346
+ if (filepath !== ":memory:" && filepath !== "") {
1347
+ const dirname = filepath.split("/").slice(0, -1).join("/");
1348
+ if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
909
1349
  }
910
- await this.alepha.events.emit("repository:create:after", {
911
- tableName: this.tableName,
912
- data: values,
913
- entity: allEntities
914
- });
915
- return allEntities;
916
- } catch (error) {
917
- throw this.handleError(error, "Insert query has failed");
1350
+ this.sqlite = new DatabaseSync(filepath);
1351
+ await this.migrate();
1352
+ this.log.info(`Using SQLite database at ${filepath}`);
918
1353
  }
1354
+ });
1355
+ async executeMigrations(migrationsFolder) {
1356
+ await migrate$1(this.db, async (migrationQueries) => {
1357
+ this.log.debug("Executing migration queries", { migrationQueries });
1358
+ for (const query of migrationQueries) this.sqlite.prepare(query).run();
1359
+ }, { migrationsFolder });
919
1360
  }
920
- /**
921
- * Find an entity and update it.
922
- */
923
- async updateOne(where, data, opts = {}) {
924
- await this.alepha.events.emit("repository:update:before", {
925
- tableName: this.tableName,
926
- where,
927
- data
928
- });
929
- let row = data;
930
- const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
931
- if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
932
- where = this.withDeletedAt(where, opts);
933
- row = this.cast(row, false);
934
- delete row[this.id.key];
935
- const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
936
- throw this.handleError(error, "Update query has failed");
937
- });
938
- if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
1361
+ };
1362
+
1363
+ //#endregion
1364
+ //#region ../../src/orm/providers/drivers/PglitePostgresProvider.ts
1365
+ const envSchema = t.object({ DATABASE_URL: t.optional(t.text()) });
1366
+ var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
1367
+ static importPglite() {
939
1368
  try {
940
- const entity = this.clean(response[0], this.entity.schema);
941
- await this.alepha.events.emit("repository:update:after", {
942
- tableName: this.tableName,
943
- where,
944
- data,
945
- entities: [entity]
946
- });
947
- return entity;
948
- } catch (error) {
949
- throw this.handleError(error, "Update query has failed");
950
- }
1369
+ return createRequire(import.meta.url)("@electric-sql/pglite");
1370
+ } catch {}
951
1371
  }
952
- /**
953
- * Save a given entity.
954
- *
955
- * @example
956
- * ```ts
957
- * const entity = await repository.findById(1);
958
- * entity.name = "New Name"; // update a field
959
- * delete entity.description; // delete a field
960
- * await repository.save(entity);
961
- * ```
962
- *
963
- * Difference with `updateById/updateOne`:
964
- *
965
- * - requires the entity to be fetched first (whole object is expected)
966
- * - check pg.version() if present -> optimistic locking
967
- * - validate entity against schema
968
- * - undefined values will be set to null, not ignored!
969
- *
970
- * @see {@link DbVersionMismatchError}
971
- */
972
- async save(entity, opts = {}) {
973
- const row = entity;
974
- const id = row[this.id.key];
975
- if (id == null) throw new AlephaError("Cannot save entity without ID - missing primary key in value");
976
- for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
977
- let where = this.createQueryWhere();
978
- where.id = { eq: id };
979
- const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
980
- if (versionField && typeof row[versionField.key] === "number") {
981
- where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
982
- row[versionField.key] += 1;
1372
+ env = $env(envSchema);
1373
+ log = $logger();
1374
+ kit = $inject(DrizzleKitProvider);
1375
+ builder = $inject(PostgresModelBuilder);
1376
+ client;
1377
+ pglite;
1378
+ get name() {
1379
+ return "postgres";
1380
+ }
1381
+ get driver() {
1382
+ return "pglite";
1383
+ }
1384
+ dialect = "postgresql";
1385
+ get url() {
1386
+ let path = this.env.DATABASE_URL;
1387
+ if (!path) if (this.alepha.isTest()) path = ":memory:";
1388
+ else path = "node_modules/.alepha/pglite";
1389
+ else if (path.includes(":memory:")) path = ":memory:";
1390
+ else if (path.startsWith("file://")) path = path.replace("file://", "");
1391
+ return path;
1392
+ }
1393
+ get db() {
1394
+ if (!this.pglite) throw new AlephaError("Database not initialized");
1395
+ return this.pglite;
1396
+ }
1397
+ async execute(statement) {
1398
+ const { rows } = await this.db.execute(statement);
1399
+ return rows;
1400
+ }
1401
+ onStart = $hook({
1402
+ on: "start",
1403
+ handler: async () => {
1404
+ if (Object.keys(this.kit.getModels(this)).length === 0) return;
1405
+ const module = PglitePostgresProvider.importPglite();
1406
+ if (!module) throw new AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
1407
+ const { drizzle } = createRequire(import.meta.url)("drizzle-orm/pglite");
1408
+ const path = this.url;
1409
+ if (path !== ":memory:") {
1410
+ await mkdir(path, { recursive: true }).catch(() => null);
1411
+ this.client = new module.PGlite(path);
1412
+ } else this.client = new module.PGlite();
1413
+ this.pglite = drizzle({ client: this.client });
1414
+ await this.migrate();
1415
+ this.log.info(`Using PGlite database at ${path}`);
983
1416
  }
984
- try {
985
- const newValue = await this.updateOne(where, row, opts);
986
- for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
987
- Object.assign(row, newValue);
988
- } catch (error) {
989
- if (error instanceof DbEntityNotFoundError && versionField) try {
990
- await this.findById(id);
991
- throw new DbVersionMismatchError(this.tableName, id);
992
- } catch (lookupError) {
993
- if (lookupError instanceof DbEntityNotFoundError) throw error;
994
- if (lookupError instanceof DbVersionMismatchError) throw lookupError;
995
- throw lookupError;
1417
+ });
1418
+ onStop = $hook({
1419
+ on: "stop",
1420
+ handler: async () => {
1421
+ if (this.client) {
1422
+ this.log.debug("Closing PGlite connection...");
1423
+ await this.client.close();
1424
+ this.client = void 0;
1425
+ this.pglite = void 0;
1426
+ this.log.info("PGlite connection closed");
996
1427
  }
997
- throw error;
998
1428
  }
1429
+ });
1430
+ async executeMigrations(migrationsFolder) {
1431
+ await migrate$2(this.db, { migrationsFolder });
999
1432
  }
1000
- /**
1001
- * Find an entity by ID and update it.
1002
- */
1003
- async updateById(id, data, opts = {}) {
1004
- return await this.updateOne(this.getWhereId(id), data, opts);
1433
+ };
1434
+
1435
+ //#endregion
1436
+ //#region ../../src/orm/errors/DbConflictError.ts
1437
+ var DbConflictError = class extends DbError {
1438
+ name = "DbConflictError";
1439
+ status = 409;
1440
+ };
1441
+
1442
+ //#endregion
1443
+ //#region ../../src/orm/errors/DbEntityNotFoundError.ts
1444
+ var DbEntityNotFoundError = class extends DbError {
1445
+ name = "DbEntityNotFoundError";
1446
+ status = 404;
1447
+ constructor(entityName) {
1448
+ super(`Entity from '${entityName}' was not found`);
1005
1449
  }
1006
- /**
1007
- * Find many entities and update all of them.
1008
- */
1009
- async updateMany(where, data, opts = {}) {
1010
- await this.alepha.events.emit("repository:update:before", {
1011
- tableName: this.tableName,
1012
- where,
1013
- data
1014
- });
1015
- const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
1016
- if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
1017
- where = this.withDeletedAt(where, opts);
1018
- data = this.cast(data, false);
1019
- try {
1020
- const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
1021
- await this.alepha.events.emit("repository:update:after", {
1022
- tableName: this.tableName,
1023
- where,
1024
- data,
1025
- entities
1026
- });
1027
- return entities.map((it) => it[this.id.key]);
1028
- } catch (error) {
1029
- throw this.handleError(error, "Update query has failed");
1030
- }
1450
+ };
1451
+
1452
+ //#endregion
1453
+ //#region ../../src/orm/errors/DbVersionMismatchError.ts
1454
+ /**
1455
+ * Error thrown when there is a version mismatch.
1456
+ * It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
1457
+ * This is used for optimistic concurrency control.
1458
+ */
1459
+ var DbVersionMismatchError = class extends DbError {
1460
+ name = "DbVersionMismatchError";
1461
+ constructor(table, id) {
1462
+ super(`Version mismatch for table '${table}' and id '${id}'`);
1463
+ }
1464
+ };
1465
+
1466
+ //#endregion
1467
+ //#region ../../src/orm/helpers/pgAttr.ts
1468
+ /**
1469
+ * Decorates a typebox schema with a Postgres attribute.
1470
+ *
1471
+ * > It's just a fancy way to add Symbols to a field.
1472
+ *
1473
+ * @example
1474
+ * ```ts
1475
+ * import { t } from "alepha";
1476
+ * import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
1477
+ *
1478
+ * export const updatedAtSchema = pgAttr(
1479
+ * t.datetime(), PG_UPDATED_AT,
1480
+ * );
1481
+ * ```
1482
+ */
1483
+ const pgAttr = (type, attr, value) => {
1484
+ Object.assign(type, { [attr]: value ?? {} });
1485
+ return type;
1486
+ };
1487
+ /**
1488
+ * Retrieves the fields of a schema that have a specific attribute.
1489
+ */
1490
+ const getAttrFields = (schema, name) => {
1491
+ const fields = [];
1492
+ for (const key of Object.keys(schema.properties)) {
1493
+ const value = schema.properties[key];
1494
+ if (name in value) fields.push({
1495
+ type: value,
1496
+ key,
1497
+ data: value[name]
1498
+ });
1031
1499
  }
1500
+ return fields;
1501
+ };
1502
+
1503
+ //#endregion
1504
+ //#region ../../src/orm/services/PgRelationManager.ts
1505
+ var PgRelationManager = class {
1032
1506
  /**
1033
- * Find many and delete all of them.
1034
- * @returns Array of deleted entity IDs
1507
+ * Recursively build joins for the query builder based on the relations map
1035
1508
  */
1036
- async deleteMany(where = {}, opts = {}) {
1037
- const deletedAt = this.deletedAt();
1038
- if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
1039
- await this.alepha.events.emit("repository:delete:before", {
1040
- tableName: this.tableName,
1041
- where
1042
- });
1043
- try {
1044
- const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
1045
- await this.alepha.events.emit("repository:delete:after", {
1046
- tableName: this.tableName,
1047
- where,
1048
- ids
1509
+ buildJoins(provider, builder, joins, withRelations, table, parentKey) {
1510
+ for (const [key, join] of Object.entries(withRelations)) {
1511
+ const from = provider.table(join.join);
1512
+ const on = isSQLWrapper$1(join.on) ? join.on : sql$1`${table[join.on[0]]} = ${from[join.on[1].name]}`;
1513
+ if (join.type === "right") builder.rightJoin(from, on);
1514
+ else if (join.type === "inner") builder.innerJoin(from, on);
1515
+ else builder.leftJoin(from, on);
1516
+ joins.push({
1517
+ key,
1518
+ table: getTableName(from),
1519
+ schema: join.join.schema,
1520
+ col: (name) => from[name],
1521
+ parent: parentKey
1049
1522
  });
1050
- return ids;
1051
- } catch (error) {
1052
- throw new DbError("Delete query has failed", error);
1523
+ if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
1053
1524
  }
1054
1525
  }
1055
1526
  /**
1056
- * Delete all entities.
1057
- * @returns Array of deleted entity IDs
1058
- */
1059
- clear(opts = {}) {
1060
- return this.deleteMany({}, opts);
1061
- }
1062
- /**
1063
- * Delete the given entity.
1064
- *
1065
- * You must fetch the entity first in order to delete it.
1066
- * @returns Array containing the deleted entity ID
1527
+ * Map a row with its joined relations based on the joins definition
1067
1528
  */
1068
- async destroy(entity, opts = {}) {
1069
- const id = entity[this.id.key];
1070
- if (id == null) throw new AlephaError("Cannot destroy entity without ID");
1071
- const deletedAt = this.deletedAt();
1072
- if (deletedAt && !opts.force) {
1073
- opts.now ??= this.dateTimeProvider.nowISOString();
1074
- entity[deletedAt.key] = opts.now;
1529
+ mapRowWithJoins(record, row, schema, joins, parentKey) {
1530
+ for (const join of joins) if (join.parent === parentKey) {
1531
+ const joinedData = row[join.table];
1532
+ if (this.isAllNull(joinedData)) record[join.key] = void 0;
1533
+ else {
1534
+ record[join.key] = joinedData;
1535
+ this.mapRowWithJoins(record[join.key], row, schema, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
1536
+ }
1075
1537
  }
1076
- return await this.deleteById(id, opts);
1538
+ return record;
1077
1539
  }
1078
1540
  /**
1079
- * Find an entity and delete it.
1080
- * @returns Array of deleted entity IDs (should contain at most one ID)
1541
+ * Check if all values in an object are null (indicates a left join with no match)
1081
1542
  */
1082
- async deleteOne(where = {}, opts = {}) {
1083
- return await this.deleteMany(where, opts);
1543
+ isAllNull(obj) {
1544
+ if (obj === null || obj === void 0) return true;
1545
+ if (typeof obj !== "object") return false;
1546
+ return Object.values(obj).every((val) => val === null);
1084
1547
  }
1085
1548
  /**
1086
- * Find an entity by ID and delete it.
1087
- * @returns Array containing the deleted entity ID
1088
- * @throws DbEntityNotFoundError if the entity is not found
1549
+ * Build a schema that includes all join properties recursively
1089
1550
  */
1090
- async deleteById(id, opts = {}) {
1091
- const result = await this.deleteMany(this.getWhereId(id), opts);
1092
- if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
1093
- return result;
1551
+ buildSchemaWithJoins(baseSchema, joins, parentPath) {
1552
+ const schema = Value.Clone(baseSchema);
1553
+ const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
1554
+ for (const join of joinsAtThisLevel) {
1555
+ const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
1556
+ const childJoins = joins.filter((j) => j.parent === joinPath);
1557
+ let joinSchema = join.schema;
1558
+ if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
1559
+ schema.properties[join.key] = t.optional(joinSchema);
1560
+ }
1561
+ return schema;
1094
1562
  }
1563
+ };
1564
+
1565
+ //#endregion
1566
+ //#region ../../src/orm/services/QueryManager.ts
1567
+ var QueryManager = class {
1568
+ alepha = $inject(Alepha);
1095
1569
  /**
1096
- * Count entities.
1570
+ * Convert a query object to a SQL query.
1097
1571
  */
1098
- async count(where = {}, opts = {}) {
1099
- where = this.withDeletedAt(where, opts);
1100
- return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
1101
- }
1102
- conflictMessagePattern = "duplicate key value violates unique constraint";
1103
- handleError(error, message) {
1104
- if (!(error instanceof Error)) return new DbError(message);
1105
- if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
1106
- return new DbError(message, error);
1107
- }
1108
- withDeletedAt(where, opts = {}) {
1109
- if (opts.force) return where;
1110
- const deletedAt = this.deletedAt();
1111
- if (!deletedAt) return where;
1112
- return { and: [where, { [deletedAt.key]: { isNull: true } }] };
1113
- }
1114
- deletedAt() {
1115
- const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
1116
- if (deletedAtFields.length > 0) return deletedAtFields[0];
1572
+ toSQL(query, options) {
1573
+ const { schema, col, joins } = options;
1574
+ const conditions = [];
1575
+ if (isSQLWrapper(query)) conditions.push(query);
1576
+ else {
1577
+ const keys = Object.keys(query);
1578
+ for (const key of keys) {
1579
+ const operator = query[key];
1580
+ if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
1581
+ const matchingJoins = joins.filter((j) => j.key === key);
1582
+ if (matchingJoins.length > 0) {
1583
+ const join = matchingJoins[0];
1584
+ const joinPath = join.parent ? `${join.parent}.${key}` : key;
1585
+ const recursiveJoins = joins.filter((j) => {
1586
+ if (!j.parent) return false;
1587
+ return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
1588
+ }).map((j) => {
1589
+ const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
1590
+ return {
1591
+ ...j,
1592
+ parent: newParent
1593
+ };
1594
+ });
1595
+ const sql = this.toSQL(query[key], {
1596
+ schema: join.schema,
1597
+ col: join.col,
1598
+ joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
1599
+ dialect: options.dialect
1600
+ });
1601
+ if (sql) conditions.push(sql);
1602
+ continue;
1603
+ }
1604
+ }
1605
+ if (Array.isArray(operator)) {
1606
+ const operations = operator.map((it) => {
1607
+ if (isSQLWrapper(it)) return it;
1608
+ return this.toSQL(it, {
1609
+ schema,
1610
+ col,
1611
+ joins,
1612
+ dialect: options.dialect
1613
+ });
1614
+ }).filter((it) => it != null);
1615
+ if (key === "and") return and(...operations);
1616
+ if (key === "or") return or(...operations);
1617
+ }
1618
+ if (key === "not") {
1619
+ const where = this.toSQL(operator, {
1620
+ schema,
1621
+ col,
1622
+ joins,
1623
+ dialect: options.dialect
1624
+ });
1625
+ if (where) return not(where);
1626
+ }
1627
+ if (operator) {
1628
+ const column = col(key);
1629
+ const sql = this.mapOperatorToSql(operator, column, schema, key, options.dialect);
1630
+ if (sql) conditions.push(sql);
1631
+ }
1632
+ }
1633
+ }
1634
+ if (conditions.length === 1) return conditions[0];
1635
+ return and(...conditions);
1117
1636
  }
1118
1637
  /**
1119
- * Convert something to valid Pg Insert Value.
1638
+ * Check if an object has any filter operator properties.
1120
1639
  */
1121
- cast(data, insert) {
1122
- const schema$1 = insert ? this.entity.insertSchema : t.partial(this.entity.updateSchema);
1123
- return this.alepha.codec.encode(schema$1, data);
1640
+ hasFilterOperatorProperties(obj) {
1641
+ if (!obj || typeof obj !== "object") return false;
1642
+ return [
1643
+ "eq",
1644
+ "ne",
1645
+ "gt",
1646
+ "gte",
1647
+ "lt",
1648
+ "lte",
1649
+ "inArray",
1650
+ "notInArray",
1651
+ "isNull",
1652
+ "isNotNull",
1653
+ "like",
1654
+ "notLike",
1655
+ "ilike",
1656
+ "notIlike",
1657
+ "contains",
1658
+ "startsWith",
1659
+ "endsWith",
1660
+ "between",
1661
+ "notBetween",
1662
+ "arrayContains",
1663
+ "arrayContained",
1664
+ "arrayOverlaps"
1665
+ ].some((key) => key in obj);
1124
1666
  }
1125
1667
  /**
1126
- * Transform a row from the database into a clean entity.
1668
+ * Map a filter operator to a SQL query.
1127
1669
  */
1128
- clean(row, schema$1) {
1129
- for (const key of Object.keys(schema$1.properties)) {
1130
- const value = schema$1.properties[key];
1131
- if (typeof row[key] === "string") {
1132
- if (t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
1133
- else if (t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
1134
- }
1135
- if (typeof row[key] === "bigint" && t.schema.isBigInt(value)) row[key] = row[key].toString();
1670
+ mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
1671
+ const encodeValue = (value) => {
1672
+ if (value == null) return value;
1673
+ if (columnSchema && columnName) try {
1674
+ const fieldSchema = columnSchema.properties[columnName];
1675
+ if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
1676
+ } catch (error) {}
1677
+ return value;
1678
+ };
1679
+ const encodeArray = (values) => {
1680
+ return values.map((v) => encodeValue(v));
1681
+ };
1682
+ if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return eq(column, encodeValue(operator));
1683
+ const conditions = [];
1684
+ if (operator?.eq != null) conditions.push(eq(column, encodeValue(operator.eq)));
1685
+ if (operator?.ne != null) conditions.push(ne(column, encodeValue(operator.ne)));
1686
+ if (operator?.gt != null) conditions.push(gt(column, encodeValue(operator.gt)));
1687
+ if (operator?.gte != null) conditions.push(gte(column, encodeValue(operator.gte)));
1688
+ if (operator?.lt != null) conditions.push(lt(column, encodeValue(operator.lt)));
1689
+ if (operator?.lte != null) conditions.push(lte(column, encodeValue(operator.lte)));
1690
+ if (operator?.inArray != null) {
1691
+ if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new AlephaError("inArray operator requires at least one value");
1692
+ conditions.push(inArray(column, encodeArray(operator.inArray)));
1693
+ }
1694
+ if (operator?.notInArray != null) {
1695
+ if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new AlephaError("notInArray operator requires at least one value");
1696
+ conditions.push(notInArray(column, encodeArray(operator.notInArray)));
1697
+ }
1698
+ if (operator?.isNull != null) conditions.push(isNull(column));
1699
+ if (operator?.isNotNull != null) conditions.push(isNotNull(column));
1700
+ if (operator?.like != null) conditions.push(like(column, encodeValue(operator.like)));
1701
+ if (operator?.notLike != null) conditions.push(notLike(column, encodeValue(operator.notLike)));
1702
+ if (operator?.ilike != null) conditions.push(ilike(column, encodeValue(operator.ilike)));
1703
+ if (operator?.notIlike != null) conditions.push(notIlike(column, encodeValue(operator.notIlike)));
1704
+ if (operator?.contains != null) {
1705
+ const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1706
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
1707
+ else conditions.push(ilike(column, encodeValue(`%${escapedValue}%`)));
1136
1708
  }
1137
- return this.alepha.codec.decode(schema$1, row);
1138
- }
1139
- /**
1140
- * Clean a row with joins recursively
1141
- */
1142
- cleanWithJoins(row, schema$1, joins, parentPath) {
1143
- const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
1144
- const cleanRow = { ...row };
1145
- const joinedData = {};
1146
- for (const join of joinsAtThisLevel) {
1147
- joinedData[join.key] = cleanRow[join.key];
1148
- delete cleanRow[join.key];
1709
+ if (operator?.startsWith != null) {
1710
+ const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1711
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
1712
+ else conditions.push(ilike(column, encodeValue(`${escapedValue}%`)));
1149
1713
  }
1150
- const entity = this.clean(cleanRow, schema$1);
1151
- for (const join of joinsAtThisLevel) {
1152
- const joinedValue = joinedData[join.key];
1153
- if (joinedValue != null) {
1154
- const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
1155
- if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
1156
- else entity[join.key] = this.clean(joinedValue, join.schema);
1157
- } else entity[join.key] = void 0;
1714
+ if (operator?.endsWith != null) {
1715
+ const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
1716
+ if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
1717
+ else conditions.push(ilike(column, encodeValue(`%${escapedValue}`)));
1158
1718
  }
1159
- return entity;
1719
+ if (operator?.between != null) {
1720
+ if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
1721
+ conditions.push(between(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
1722
+ }
1723
+ if (operator?.notBetween != null) {
1724
+ if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
1725
+ conditions.push(notBetween(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
1726
+ }
1727
+ if (operator?.arrayContains != null) conditions.push(arrayContains(column, encodeValue(operator.arrayContains)));
1728
+ if (operator?.arrayContained != null) conditions.push(arrayContained(column, encodeValue(operator.arrayContained)));
1729
+ if (operator?.arrayOverlaps != null) conditions.push(arrayOverlaps(column, encodeValue(operator.arrayOverlaps)));
1730
+ if (conditions.length === 0) return;
1731
+ if (conditions.length === 1) return conditions[0];
1732
+ return and(...conditions);
1160
1733
  }
1161
1734
  /**
1162
- * Convert a where clause to SQL.
1735
+ * Parse pagination sort string to orderBy format.
1736
+ * Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
1737
+ * - Columns separated by comma
1738
+ * - Prefix with '-' for DESC direction
1739
+ *
1740
+ * @param sort Pagination sort string
1741
+ * @returns OrderBy array or single object
1163
1742
  */
1164
- toSQL(where, joins) {
1165
- return this.queryManager.toSQL(where, {
1166
- schema: this.entity.schema,
1167
- col: (name) => {
1168
- return this.col(name);
1169
- },
1170
- joins,
1171
- dialect: this.provider.dialect
1743
+ parsePaginationSort(sort) {
1744
+ const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
1745
+ if (field.startsWith("-")) return {
1746
+ column: field.substring(1),
1747
+ direction: "desc"
1748
+ };
1749
+ return {
1750
+ column: field,
1751
+ direction: "asc"
1752
+ };
1172
1753
  });
1754
+ return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
1173
1755
  }
1174
1756
  /**
1175
- * Get the where clause for an ID.
1757
+ * Normalize orderBy parameter to array format.
1758
+ * Supports 3 modes:
1759
+ * 1. String: "name" -> [{ column: "name", direction: "asc" }]
1760
+ * 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
1761
+ * 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
1176
1762
  *
1177
- * @param id The ID to get the where clause for.
1178
- * @returns The where clause for the ID.
1763
+ * @param orderBy The orderBy parameter
1764
+ * @returns Normalized array of order by clauses
1179
1765
  */
1180
- getWhereId(id) {
1181
- return { [this.id.key]: { eq: t.schema.isString(this.id.type) ? String(id) : Number(id) } };
1766
+ normalizeOrderBy(orderBy) {
1767
+ if (typeof orderBy === "string") return [{
1768
+ column: orderBy,
1769
+ direction: "asc"
1770
+ }];
1771
+ if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
1772
+ column: orderBy.column,
1773
+ direction: orderBy.direction ?? "asc"
1774
+ }];
1775
+ if (Array.isArray(orderBy)) return orderBy.map((item) => ({
1776
+ column: item.column,
1777
+ direction: item.direction ?? "asc"
1778
+ }));
1779
+ return [];
1182
1780
  }
1183
1781
  /**
1184
- * Find a primary key in the schema.
1782
+ * Create a pagination object.
1783
+ *
1784
+ * @deprecated Use `createPagination` from alepha instead.
1785
+ * This method now delegates to the framework-level helper.
1786
+ *
1787
+ * @param entities The entities to paginate.
1788
+ * @param limit The limit of the pagination.
1789
+ * @param offset The offset of the pagination.
1790
+ * @param sort Optional sort metadata to include in response.
1185
1791
  */
1186
- getPrimaryKey(schema$1) {
1187
- const primaryKeys = getAttrFields(schema$1, PG_PRIMARY_KEY);
1188
- if (primaryKeys.length === 0) throw new AlephaError("Primary key not found in schema");
1189
- if (primaryKeys.length > 1) throw new AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
1190
- return {
1191
- key: primaryKeys[0].key,
1192
- col: this.col(primaryKeys[0].key),
1193
- type: primaryKeys[0].type
1194
- };
1792
+ createPagination(entities, limit = 10, offset = 0, sort) {
1793
+ return createPagination(entities, limit, offset, sort);
1195
1794
  }
1196
1795
  };
1197
1796
 
1198
1797
  //#endregion
1199
- //#region ../../src/orm/providers/RepositoryProvider.ts
1200
- var RepositoryProvider = class {
1798
+ //#region ../../src/orm/services/Repository.ts
1799
+ var Repository = class Repository {
1800
+ entity;
1801
+ provider;
1802
+ log = $logger();
1803
+ relationManager = $inject(PgRelationManager);
1804
+ queryManager = $inject(QueryManager);
1805
+ dateTimeProvider = $inject(DateTimeProvider);
1201
1806
  alepha = $inject(Alepha);
1202
- registry = /* @__PURE__ */ new Map();
1203
- getRepositories(provider) {
1204
- const repositories = this.alepha.services(Repository);
1205
- if (provider) return repositories.filter((it) => it.provider === provider);
1206
- return repositories;
1207
- }
1208
- getRepository(entity) {
1209
- const RepositoryClass = this.createClassRepository(entity);
1210
- return this.alepha.inject(RepositoryClass);
1211
- }
1212
- createClassRepository(entity) {
1213
- let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
1214
- if (name.endsWith("s")) name = name.slice(0, -1);
1215
- name = `${name}Repository`;
1216
- if (this.registry.has(entity)) return this.registry.get(entity);
1217
- class GenericRepository extends Repository {
1807
+ static of(entity, provider = DatabaseProvider) {
1808
+ return class InlineRepository extends Repository {
1218
1809
  constructor() {
1219
- super(entity);
1810
+ super(entity, provider);
1220
1811
  }
1221
- }
1222
- Object.defineProperty(GenericRepository, "name", { value: name });
1223
- this.registry.set(entity, GenericRepository);
1224
- return GenericRepository;
1812
+ };
1225
1813
  }
1226
- };
1227
-
1228
- //#endregion
1229
- //#region ../../src/orm/primitives/$repository.ts
1230
- /**
1231
- * Get the repository for the given entity.
1232
- */
1233
- const $repository = (entity) => {
1234
- const { alepha } = $context();
1235
- return $inject(alepha.inject(RepositoryProvider).createClassRepository(entity));
1236
- };
1237
-
1238
- //#endregion
1239
- //#region ../../src/orm/primitives/$sequence.ts
1240
- /**
1241
- * Creates a PostgreSQL sequence primitive for generating unique numeric values.
1242
- */
1243
- const $sequence = (options = {}) => {
1244
- return createPrimitive(SequencePrimitive, options);
1245
- };
1246
- var SequencePrimitive = class extends Primitive {
1247
- provider = this.$provider();
1248
- onInit() {
1249
- this.provider.registerSequence(this);
1814
+ constructor(entity, provider = DatabaseProvider) {
1815
+ this.entity = entity;
1816
+ this.provider = this.alepha.inject(provider);
1817
+ this.provider.registerEntity(entity);
1250
1818
  }
1251
- get name() {
1252
- return this.options.name ?? this.config.propertyKey;
1819
+ /**
1820
+ * Represents the primary key of the table.
1821
+ * - Key is the name of the primary key column.
1822
+ * - Type is the type (TypeBox) of the primary key column.
1823
+ *
1824
+ * ID is mandatory. If the table does not have a primary key, it will throw an error.
1825
+ */
1826
+ get id() {
1827
+ return this.getPrimaryKey(this.entity.schema);
1253
1828
  }
1254
- async next() {
1255
- return this.provider.execute(sql$1`SELECT nextval('${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
1829
+ /**
1830
+ * Get Drizzle table object.
1831
+ */
1832
+ get table() {
1833
+ return this.provider.table(this.entity);
1256
1834
  }
1257
- async current() {
1258
- return this.provider.execute(sql$1`SELECT last_value FROM ${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
1835
+ /**
1836
+ * Get SQL table name. (from Drizzle table object)
1837
+ */
1838
+ get tableName() {
1839
+ return this.entity.name;
1259
1840
  }
1260
- $provider() {
1261
- return this.options.provider ?? this.alepha.inject(DatabaseProvider);
1841
+ /**
1842
+ * Getter for the database connection from the database provider.
1843
+ */
1844
+ get db() {
1845
+ return this.provider.db;
1262
1846
  }
1263
- };
1264
- $sequence[KIND] = SequencePrimitive;
1265
-
1266
- //#endregion
1267
- //#region ../../src/orm/providers/DrizzleKitProvider.ts
1268
- var DrizzleKitProvider = class {
1269
- log = $logger();
1270
- alepha = $inject(Alepha);
1271
1847
  /**
1272
- * Synchronize database with current schema definitions.
1848
+ * Execute a SQL query.
1273
1849
  *
1274
- * In development mode, it will generate and execute migrations based on the current state.
1275
- * In testing mode, it will generate migrations from scratch without applying them.
1850
+ * This method allows executing raw SQL queries against the database.
1851
+ * This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
1276
1852
  *
1277
- * Does nothing in production mode, you must handle migrations manually.
1853
+ * You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
1854
+ *
1855
+ * @example
1856
+ * ```ts
1857
+ * class App {
1858
+ * repository = $repository({ ... });
1859
+ * async getAdults() {
1860
+ * const users = repository.table; // Drizzle table object
1861
+ * await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
1862
+ * // or better
1863
+ * await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
1864
+ * }
1865
+ * }
1866
+ * ```
1278
1867
  */
1279
- async synchronize(provider) {
1280
- if (this.alepha.isProduction()) {
1281
- this.log.warn("Synchronization skipped in production mode.");
1282
- return;
1283
- }
1284
- if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
1285
- const now = Date.now();
1286
- if (this.alepha.isTest()) {
1287
- const { statements } = await this.generateMigration(provider);
1288
- await this.executeStatements(statements, provider);
1289
- } else {
1290
- const entry = await this.loadDevMigrations(provider);
1291
- const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
1292
- await this.executeStatements(statements, provider, true);
1293
- await this.saveDevMigrations(provider, snapshot, entry);
1294
- }
1295
- this.log.info(`Db '${provider.name}' synchronization OK [${Date.now() - now}ms]`);
1868
+ async query(query, schema) {
1869
+ const raw = typeof query === "function" ? query(this.table, this.db) : query;
1870
+ if (typeof raw === "string" && raw.includes("[object Object]")) throw new AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
1871
+ return (await this.provider.execute(raw)).map((it) => {
1872
+ return this.clean(this.mapRawFieldsToEntity(it), schema ?? this.entity.schema);
1873
+ });
1296
1874
  }
1297
1875
  /**
1298
- * Mostly used for testing purposes. You can generate SQL migration statements without executing them.
1876
+ * Map raw database fields to entity fields. (handles column name differences)
1299
1877
  */
1300
- async generateMigration(provider, prevSnapshot) {
1301
- const kit = this.importDrizzleKit();
1302
- const models = this.getModels(provider);
1303
- if (Object.keys(models).length > 0) {
1304
- if (provider.dialect === "sqlite") {
1305
- const prev$1 = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
1306
- const curr$1 = await kit.generateSQLiteDrizzleJson(models);
1307
- return {
1308
- models,
1309
- statements: await kit.generateSQLiteMigration(prev$1, curr$1),
1310
- snapshot: curr$1
1311
- };
1878
+ mapRawFieldsToEntity(row) {
1879
+ const entity = {};
1880
+ for (const key of Object.keys(row)) {
1881
+ entity[key] = row[key];
1882
+ for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
1883
+ entity[colKey] = row[key];
1884
+ break;
1312
1885
  }
1313
- const prev = prevSnapshot ?? await kit.generateDrizzleJson({});
1314
- const curr = await kit.generateDrizzleJson(models);
1315
- return {
1316
- models,
1317
- statements: await kit.generateMigration(prev, curr),
1318
- snapshot: curr
1319
- };
1320
1886
  }
1321
- return {
1322
- models,
1323
- statements: [],
1324
- snapshot: {}
1325
- };
1887
+ return entity;
1326
1888
  }
1327
1889
  /**
1328
- * Load all tables, enums, sequences, etc. from the provider's repositories.
1890
+ * Get a Drizzle column from the table by his name.
1329
1891
  */
1330
- getModels(provider) {
1331
- const models = {};
1332
- for (const [key, value] of provider.tables.entries()) {
1333
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1334
- models[key] = value;
1335
- }
1336
- for (const [key, value] of provider.enums.entries()) {
1337
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1338
- models[key] = value;
1339
- }
1340
- for (const [key, value] of provider.sequences.entries()) {
1341
- if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
1342
- models[key] = value;
1343
- }
1344
- return models;
1892
+ col(name) {
1893
+ const column = this.table[name];
1894
+ if (!column) throw new AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
1895
+ return column;
1345
1896
  }
1346
1897
  /**
1347
- * Load the migration snapshot from the database.
1898
+ * Run a transaction.
1348
1899
  */
1349
- async loadDevMigrations(provider) {
1350
- const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
1351
- if (provider.url.includes(":memory:")) {
1352
- this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
1353
- return;
1354
- }
1355
- if (provider.dialect === "sqlite") {
1356
- try {
1357
- const text = await readFile(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
1358
- return this.alepha.codec.decode(devMigrationsSchema, text);
1359
- } catch (e) {
1360
- this.log.trace(`No existing migration snapshot for '${name}'`, e);
1361
- }
1362
- return;
1363
- }
1364
- await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
1365
- await provider.execute(sql$1`
1366
- CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
1367
- "id" SERIAL PRIMARY KEY,
1368
- "name" TEXT NOT NULL,
1369
- "created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
1370
- "snapshot" TEXT NOT NULL
1371
- );
1372
- `);
1373
- const rows = await provider.run(sql$1`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
1374
- if (rows.length === 0) {
1375
- this.log.trace(`No existing migration snapshot for '${name}'`);
1376
- return;
1377
- }
1378
- return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
1379
- }
1380
- async saveDevMigrations(provider, curr, devMigrations) {
1381
- if (provider.url.includes(":memory:")) {
1382
- this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
1383
- return;
1384
- }
1385
- const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
1386
- if (provider.dialect === "sqlite") {
1387
- const filePath = `node_modules/.alepha/sqlite-${name}.json`;
1388
- await mkdir("node_modules/.alepha", { recursive: true }).catch(() => null);
1389
- await writeFile(filePath, JSON.stringify({
1390
- id: devMigrations?.id ?? 1,
1391
- name,
1392
- created_at: /* @__PURE__ */ new Date(),
1393
- snapshot: JSON.stringify(curr)
1394
- }, null, 2));
1395
- this.log.debug(`Saved migration snapshot to '${filePath}'`);
1396
- return;
1397
- }
1398
- if (!devMigrations) await provider.execute(sql$1`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
1399
- else {
1400
- const newSnapshot = JSON.stringify(curr);
1401
- if (devMigrations.snapshot !== newSnapshot) await provider.execute(sql$1`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
1402
- }
1403
- }
1404
- async executeStatements(statements, provider, catchErrors = false) {
1405
- let nErrors = 0;
1406
- for (const statement of statements) {
1407
- if (statement.startsWith("DROP SCHEMA")) continue;
1408
- try {
1409
- await provider.execute(sql$1.raw(statement));
1410
- } catch (error) {
1411
- const errorMessage = `Error executing statement: ${statement}`;
1412
- if (catchErrors) {
1413
- nErrors++;
1414
- this.log.warn(errorMessage, { context: [error] });
1415
- } else throw error;
1416
- }
1417
- }
1418
- if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
1419
- }
1420
- async createSchemaIfNotExists(provider, schemaName) {
1421
- if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
1422
- const sqlSchema = sql$1.raw(schemaName);
1423
- if (schemaName.startsWith("test_")) {
1424
- this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
1425
- await provider.execute(sql$1`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
1900
+ async transaction(transaction, config) {
1901
+ if (this.provider.driver === "pglite") {
1902
+ this.log.warn("Transactions are not supported with pglite driver");
1903
+ return await transaction(null);
1426
1904
  }
1427
- this.log.debug(`Ensuring schema '${schemaName}' exists`);
1428
- await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
1905
+ this.log.debug(`Starting transaction on table ${this.tableName}`);
1906
+ return await this.db.transaction(transaction, config);
1429
1907
  }
1430
1908
  /**
1431
- * Try to load the official Drizzle Kit API.
1432
- * If not available, fallback to the local kit import.
1909
+ * Start a SELECT query on the table.
1433
1910
  */
1434
- importDrizzleKit() {
1435
- try {
1436
- return createRequire(import.meta.url)("drizzle-kit/api");
1437
- } catch (_) {
1438
- throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
1439
- }
1911
+ rawSelect(opts = {}) {
1912
+ return (opts.tx ?? this.db).select().from(this.table);
1440
1913
  }
1441
- };
1442
- const devMigrationsSchema = t.object({
1443
- id: t.number(),
1444
- name: t.text(),
1445
- snapshot: t.string(),
1446
- created_at: t.string()
1447
- });
1448
-
1449
- //#endregion
1450
- //#region ../../src/orm/services/ModelBuilder.ts
1451
- /**
1452
- * Abstract base class for transforming Alepha Primitives (Entity, Sequence, etc...)
1453
- * into drizzle models (tables, enums, sequences, etc...).
1454
- */
1455
- var ModelBuilder = class {
1456
1914
  /**
1457
- * Convert camelCase to snake_case for column names.
1915
+ * Start a SELECT DISTINCT query on the table.
1458
1916
  */
1459
- toColumnName(str) {
1460
- return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
1917
+ rawSelectDistinct(opts = {}, columns = []) {
1918
+ const db = opts.tx ?? this.db;
1919
+ const table = this.table;
1920
+ const fields = {};
1921
+ for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
1922
+ return db.selectDistinct(fields).from(table);
1461
1923
  }
1462
1924
  /**
1463
- * Build the table configuration function for any database.
1464
- * This includes indexes, foreign keys, constraints, and custom config.
1465
- *
1466
- * @param entity - The entity primitive
1467
- * @param builders - Database-specific builder functions
1468
- * @param tableResolver - Function to resolve entity references to table columns
1469
- * @param customConfigHandler - Optional handler for custom config
1925
+ * Start an INSERT query on the table.
1470
1926
  */
1471
- buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
1472
- if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
1473
- return (self) => {
1474
- const configs = [];
1475
- if (entity.options.indexes) {
1476
- for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
1477
- const columnName = this.toColumnName(indexDef);
1478
- const indexName = `${entity.name}_${columnName}_idx`;
1479
- if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
1480
- } else if (typeof indexDef === "object" && indexDef !== null) {
1481
- if ("column" in indexDef) {
1482
- const columnName = this.toColumnName(indexDef.column);
1483
- const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
1484
- if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
1485
- else configs.push(builders.index(indexName).on(self[indexDef.column]));
1486
- } else if ("columns" in indexDef) {
1487
- const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
1488
- const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
1489
- const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
1490
- if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
1491
- else configs.push(builders.index(indexName).on(...cols));
1492
- }
1493
- }
1494
- }
1495
- if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
1496
- const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
1497
- const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
1498
- if (cols.length === fkDef.columns.length) {
1499
- const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
1500
- const foreignColumns = fkDef.foreignColumns.map((colRef) => {
1501
- const entityCol = colRef();
1502
- if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
1503
- if (tableResolver) {
1504
- const foreignTable = tableResolver(entityCol.entity.name);
1505
- if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
1506
- return foreignTable[entityCol.name];
1507
- }
1508
- return entityCol;
1509
- });
1510
- configs.push(builders.foreignKey({
1511
- name: fkName,
1512
- columns: cols,
1513
- foreignColumns
1514
- }));
1515
- }
1516
- }
1517
- if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
1518
- const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
1519
- const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
1520
- if (cols.length === constraintDef.columns.length) {
1521
- if (constraintDef.unique) {
1522
- const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
1523
- configs.push(builders.unique(constraintName).on(...cols));
1524
- }
1525
- if (constraintDef.check) {
1526
- const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
1527
- configs.push(builders.check(constraintName, constraintDef.check));
1528
- }
1529
- }
1530
- }
1531
- if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
1532
- else if (entity.options.config) {
1533
- const customConfigs = entity.options.config(self);
1534
- if (Array.isArray(customConfigs)) configs.push(...customConfigs);
1535
- }
1536
- return configs;
1537
- };
1538
- }
1539
- };
1540
-
1541
- //#endregion
1542
- //#region ../../src/orm/services/SqliteModelBuilder.ts
1543
- var SqliteModelBuilder = class extends ModelBuilder {
1544
- buildTable(entity, options) {
1545
- const tableName = entity.name;
1546
- if (options.tables.has(tableName)) return;
1547
- const table = sqliteTable(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
1548
- options.tables.set(tableName, table);
1927
+ rawInsert(opts = {}) {
1928
+ return (opts.tx ?? this.db).insert(this.table);
1549
1929
  }
1550
- buildSequence(sequence, options) {
1551
- throw new AlephaError("SQLite does not support sequences");
1930
+ /**
1931
+ * Start an UPDATE query on the table.
1932
+ */
1933
+ rawUpdate(opts = {}) {
1934
+ return (opts.tx ?? this.db).update(this.table);
1552
1935
  }
1553
1936
  /**
1554
- * Get SQLite-specific config builder for the table.
1937
+ * Start a DELETE query on the table.
1555
1938
  */
1556
- getTableConfig(entity, tables) {
1557
- const sqliteBuilders = {
1558
- index: index$1,
1559
- uniqueIndex: uniqueIndex$1,
1560
- unique: unique$1,
1561
- check: check$1,
1562
- foreignKey: foreignKey$1
1563
- };
1564
- const tableResolver = (entityName) => {
1565
- return tables.get(entityName);
1566
- };
1567
- return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
1568
- const customConfigs = config(self);
1569
- return Array.isArray(customConfigs) ? customConfigs : [];
1570
- });
1939
+ rawDelete(opts = {}) {
1940
+ return (opts.tx ?? this.db).delete(this.table);
1571
1941
  }
1572
- schemaToSqliteColumns = (tableName, schema$1, enums, tables) => {
1573
- return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
1574
- let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
1575
- if ("default" in value && value.default != null) col = col.default(value.default);
1576
- if (PG_PRIMARY_KEY in value) col = col.primaryKey();
1577
- if (PG_REF in value) {
1578
- const config = value[PG_REF];
1579
- col = col.references(() => {
1580
- const ref = config.ref();
1581
- const table = tables.get(ref.entity.name);
1582
- if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
1583
- const target = table[ref.name];
1584
- if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
1585
- return target;
1586
- }, config.actions);
1587
- }
1588
- if (schema$1.required?.includes(key)) col = col.notNull();
1589
- return {
1590
- ...columns,
1591
- [key]: col
1592
- };
1593
- }, {});
1594
- };
1595
- mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
1596
- const key = this.toColumnName(fieldName);
1597
- if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
1598
- if (t.schema.isInteger(value)) {
1599
- if (PG_SERIAL in value || PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1600
- return pg$2.integer(key);
1601
- }
1602
- if (t.schema.isBigInt(value)) {
1603
- if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1604
- return pg$2.integer(key, { mode: "number" });
1605
- }
1606
- if (t.schema.isNumber(value)) {
1607
- if (PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
1608
- return pg$2.numeric(key);
1609
- }
1610
- if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
1611
- if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
1612
- if (t.schema.isObject(value)) return this.sqliteJson(key, value);
1613
- if (t.schema.isRecord(value)) return this.sqliteJson(key, value);
1614
- if (t.schema.isAny(value)) return this.sqliteJson(key, value);
1615
- if (t.schema.isArray(value)) {
1616
- if (t.schema.isObject(value.items)) return this.sqliteJson(key, value);
1617
- if (t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
1618
- if (t.schema.isAny(value.items)) return this.sqliteJson(key, value);
1619
- if (t.schema.isString(value.items)) return this.sqliteJson(key, value);
1620
- if (t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
1621
- if (t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
1622
- if (t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
1623
- }
1624
- if (t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
1625
- throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
1626
- };
1627
- mapStringToSqliteColumn = (key, value) => {
1628
- if (value.format === "uuid") {
1629
- if (PG_PRIMARY_KEY in value) return pg$2.text(key).primaryKey().$defaultFn(() => randomUUID());
1630
- return pg$2.text(key);
1631
- }
1632
- if (value.format === "byte") return this.sqliteJson(key, value);
1633
- if (value.format === "date-time") {
1634
- if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
1635
- if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
1636
- return this.sqliteDateTime(key, {});
1942
+ /**
1943
+ * Create a Drizzle `select` query based on a JSON query object.
1944
+ *
1945
+ * > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
1946
+ */
1947
+ async findMany(query = {}, opts = {}) {
1948
+ await this.alepha.events.emit("repository:read:before", {
1949
+ tableName: this.tableName,
1950
+ query
1951
+ });
1952
+ const columns = query.columns ?? query.distinct;
1953
+ const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
1954
+ const joins = [];
1955
+ if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
1956
+ const where = this.withDeletedAt(query.where ?? {}, opts);
1957
+ builder.where(() => this.toSQL(where, joins));
1958
+ if (query.offset) {
1959
+ builder.offset(query.offset);
1960
+ if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
1637
1961
  }
1638
- if (value.format === "date") return this.sqliteDate(key, {});
1639
- return pg$2.text(key);
1640
- };
1641
- sqliteJson = (name, document) => pg$2.customType({
1642
- dataType: () => "text",
1643
- toDriver: (value) => JSON.stringify(value),
1644
- fromDriver: (value) => {
1645
- return value && typeof value === "string" ? JSON.parse(value) : value;
1962
+ if (query.limit) builder.limit(query.limit);
1963
+ if (query.orderBy) {
1964
+ const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
1965
+ builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? desc(this.col(clause.column)) : asc(this.col(clause.column))));
1646
1966
  }
1647
- })(name, { document }).$type();
1648
- sqliteDateTime = pg$2.customType({
1649
- dataType: () => "integer",
1650
- toDriver: (value) => new Date(value).getTime(),
1651
- fromDriver: (value) => {
1652
- return new Date(value).toISOString();
1967
+ if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
1968
+ if (opts.for) {
1969
+ if (typeof opts.for === "string") builder.for(opts.for);
1970
+ else if (opts.for) builder.for(opts.for.strength, opts.for.config);
1653
1971
  }
1654
- });
1655
- sqliteBool = pg$2.customType({
1656
- dataType: () => "integer",
1657
- toDriver: (value) => value ? 1 : 0,
1658
- fromDriver: (value) => value === 1
1659
- });
1660
- sqliteDate = pg$2.customType({
1661
- dataType: () => "integer",
1662
- toDriver: (value) => new Date(value).getTime(),
1663
- fromDriver: (value) => {
1664
- return new Date(value).toISOString().split("T")[0];
1972
+ try {
1973
+ let rows = await builder.execute();
1974
+ let schema = this.entity.schema;
1975
+ if (columns) schema = t.pick(schema, columns);
1976
+ if (joins.length) rows = rows.map((row) => {
1977
+ const rowSchema = {
1978
+ ...schema,
1979
+ properties: { ...schema.properties }
1980
+ };
1981
+ return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
1982
+ });
1983
+ rows = rows.map((row) => {
1984
+ if (joins.length) {
1985
+ const joinedSchema = this.relationManager.buildSchemaWithJoins(schema, joins);
1986
+ return this.cleanWithJoins(row, joinedSchema, joins);
1987
+ }
1988
+ return this.clean(row, schema);
1989
+ });
1990
+ await this.alepha.events.emit("repository:read:after", {
1991
+ tableName: this.tableName,
1992
+ query,
1993
+ entities: rows
1994
+ });
1995
+ return rows;
1996
+ } catch (error) {
1997
+ throw new DbError("Query select has failed", error);
1665
1998
  }
1666
- });
1667
- };
1668
-
1669
- //#endregion
1670
- //#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
1671
- /**
1672
- * Cloudflare D1 SQLite provider using Drizzle ORM.
1673
- *
1674
- * This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
1675
- * The binding is typically obtained from the Cloudflare Workers environment.
1676
- *
1677
- * @example
1678
- * ```ts
1679
- * // In your Cloudflare Worker
1680
- * alepha.set(cloudflareD1Options, { binding: env.DB });
1681
- * ```
1682
- */
1683
- var CloudflareD1Provider = class extends DatabaseProvider {
1684
- kit = $inject(DrizzleKitProvider);
1685
- log = $logger();
1686
- builder = $inject(SqliteModelBuilder);
1687
- env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
1688
- d1;
1689
- drizzleDb;
1690
- get name() {
1691
- return "d1";
1692
1999
  }
1693
- dialect = "sqlite";
1694
- get url() {
1695
- return this.env.DATABASE_URL;
1696
- }
1697
- get db() {
1698
- if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
1699
- return this.drizzleDb;
1700
- }
1701
- async execute(query) {
1702
- const { rows } = await this.db.run(query);
1703
- return rows;
2000
+ /**
2001
+ * Find a single entity.
2002
+ */
2003
+ async findOne(query, opts = {}) {
2004
+ const [entity] = await this.findMany({
2005
+ limit: 1,
2006
+ ...query
2007
+ }, opts);
2008
+ if (!entity) throw new DbEntityNotFoundError(this.tableName);
2009
+ return entity;
1704
2010
  }
1705
- onStart = $hook({
1706
- on: "start",
1707
- handler: async () => {
1708
- const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
1709
- const cloudflareEnv = this.alepha.store.get("cloudflare.env");
1710
- if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
1711
- const binding = cloudflareEnv[bindingName];
1712
- if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
1713
- this.d1 = binding;
1714
- const { drizzle: drizzle$3 } = await import("drizzle-orm/d1");
1715
- this.drizzleDb = drizzle$3(this.d1);
1716
- await this.migrate();
1717
- this.log.info("Using Cloudflare D1 database");
2011
+ /**
2012
+ * Find entities with pagination.
2013
+ *
2014
+ * It uses the same parameters as `find()`, but adds pagination metadata to the response.
2015
+ *
2016
+ * > Pagination CAN also do a count query to get the total number of elements.
2017
+ */
2018
+ async paginate(pagination = {}, query = {}, opts = {}) {
2019
+ const limit = query.limit ?? pagination.size ?? 10;
2020
+ const page = pagination.page ?? 0;
2021
+ const offset = query.offset ?? page * limit;
2022
+ let orderBy = query.orderBy;
2023
+ if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
2024
+ const now = Date.now();
2025
+ const timers = {
2026
+ query: now,
2027
+ count: now
2028
+ };
2029
+ const tasks = [];
2030
+ tasks.push(this.findMany({
2031
+ offset,
2032
+ limit: limit + 1,
2033
+ orderBy,
2034
+ ...query
2035
+ }, opts).then((it) => {
2036
+ timers.query = Date.now() - timers.query;
2037
+ return it;
2038
+ }));
2039
+ if (opts.count) {
2040
+ const where = isSQLWrapper(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
2041
+ tasks.push(this.db.$count(this.table, where).then((it) => {
2042
+ timers.count = Date.now() - timers.count;
2043
+ return it;
2044
+ }));
1718
2045
  }
1719
- });
1720
- async executeMigrations(migrationsFolder) {
1721
- const { migrate: migrate$3 } = await import("drizzle-orm/d1/migrator");
1722
- await migrate$3(this.db, { migrationsFolder });
2046
+ const [entities, countResult] = await Promise.all(tasks);
2047
+ let sortMetadata;
2048
+ if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
2049
+ const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
2050
+ response.page.totalElements = countResult;
2051
+ if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
2052
+ return response;
2053
+ }
2054
+ /**
2055
+ * Find an entity by ID.
2056
+ *
2057
+ * This is a convenience method for `findOne` with a where clause on the primary key.
2058
+ * If you need more complex queries, use `findOne` instead.
2059
+ */
2060
+ async findById(id, opts = {}) {
2061
+ return await this.findOne({ where: this.getWhereId(id) }, opts);
1723
2062
  }
1724
2063
  /**
1725
- * Override development migration to skip sync (not supported on D1).
1726
- * D1 requires proper migrations to be applied.
2064
+ * Helper to create a type-safe query object.
1727
2065
  */
1728
- async runDevelopmentMigration(migrationsFolder) {
1729
- await this.executeMigrations(migrationsFolder);
2066
+ createQuery() {
2067
+ return {};
1730
2068
  }
1731
2069
  /**
1732
- * Override test migration to run migrations instead of sync.
1733
- * D1 doesn't support schema synchronization.
2070
+ * Helper to create a type-safe where clause.
1734
2071
  */
1735
- async runTestMigration() {
1736
- const migrationsFolder = this.getMigrationsFolder();
2072
+ createQueryWhere() {
2073
+ return {};
2074
+ }
2075
+ /**
2076
+ * Create an entity.
2077
+ *
2078
+ * @param data The entity to create.
2079
+ * @param opts The options for creating the entity.
2080
+ * @returns The ID of the created entity.
2081
+ */
2082
+ async create(data, opts = {}) {
2083
+ await this.alepha.events.emit("repository:create:before", {
2084
+ tableName: this.tableName,
2085
+ data
2086
+ });
1737
2087
  try {
1738
- await this.executeMigrations(migrationsFolder);
1739
- } catch {
1740
- this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
2088
+ const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
2089
+ await this.alepha.events.emit("repository:create:after", {
2090
+ tableName: this.tableName,
2091
+ data,
2092
+ entity
2093
+ });
2094
+ return entity;
2095
+ } catch (error) {
2096
+ throw this.handleError(error, "Insert query has failed");
1741
2097
  }
1742
2098
  }
1743
- };
1744
-
1745
- //#endregion
1746
- //#region ../../src/orm/errors/DbMigrationError.ts
1747
- var DbMigrationError = class extends DbError {
1748
- name = "DbMigrationError";
1749
- constructor(cause) {
1750
- super("Failed to migrate database", cause);
1751
- }
1752
- };
1753
-
1754
- //#endregion
1755
- //#region ../../src/orm/types/byte.ts
1756
- /**
1757
- * Postgres bytea type.
1758
- */
1759
- const byte = customType({ dataType: () => "bytea" });
1760
-
1761
- //#endregion
1762
- //#region ../../src/orm/services/PostgresModelBuilder.ts
1763
- var PostgresModelBuilder = class extends ModelBuilder {
1764
- schemas = /* @__PURE__ */ new Map();
1765
- getPgSchema(name) {
1766
- if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
1767
- const nsp = name !== "public" ? this.schemas.get(name) : {
1768
- enum: pgEnum,
1769
- table: pgTable
1770
- };
1771
- if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
1772
- return nsp;
1773
- }
1774
- buildTable(entity, options) {
1775
- const tableName = entity.name;
1776
- if (options.tables.has(tableName)) return;
1777
- const nsp = this.getPgSchema(options.schema);
1778
- const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
1779
- const configFn = this.getTableConfig(entity, options.tables);
1780
- const table = nsp.table(tableName, columns, configFn);
1781
- options.tables.set(tableName, table);
1782
- }
1783
- buildSequence(sequence, options) {
1784
- const sequenceName = sequence.name;
1785
- if (options.sequences.has(sequenceName)) return;
1786
- const nsp = this.getPgSchema(options.schema);
1787
- options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
1788
- }
1789
2099
  /**
1790
- * Get PostgreSQL-specific config builder for the table.
2100
+ * Create many entities.
2101
+ *
2102
+ * Inserts are batched in chunks of 1000 to avoid hitting database limits.
2103
+ *
2104
+ * @param values The entities to create.
2105
+ * @param opts The statement options.
2106
+ * @returns The created entities.
1791
2107
  */
1792
- getTableConfig(entity, tables) {
1793
- const pgBuilders = {
1794
- index,
1795
- uniqueIndex,
1796
- unique,
1797
- check,
1798
- foreignKey
1799
- };
1800
- const tableResolver = (entityName) => {
1801
- return tables.get(entityName);
1802
- };
1803
- return this.buildTableConfig(entity, pgBuilders, tableResolver);
1804
- }
1805
- schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
1806
- return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
1807
- let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
1808
- if ("default" in value && value.default != null) col = col.default(value.default);
1809
- if (PG_PRIMARY_KEY in value) col = col.primaryKey();
1810
- if (PG_REF in value) {
1811
- const config = value[PG_REF];
1812
- col = col.references(() => {
1813
- const ref = config.ref();
1814
- const table = tables.get(ref.entity.name);
1815
- if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
1816
- const target = table[ref.name];
1817
- if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
1818
- return target;
1819
- }, config.actions);
1820
- }
1821
- if (schema$1.required?.includes(key)) col = col.notNull();
1822
- return {
1823
- ...columns,
1824
- [key]: col
1825
- };
1826
- }, {});
1827
- };
1828
- mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
1829
- const key = this.toColumnName(fieldName);
1830
- if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
1831
- if (t.schema.isInteger(value)) {
1832
- if (PG_SERIAL in value) return pg$1.serial(key);
1833
- if (PG_IDENTITY in value) {
1834
- const options = value[PG_IDENTITY];
1835
- if (options.mode === "byDefault") return pg$1.integer().generatedByDefaultAsIdentity(options);
1836
- return pg$1.integer().generatedAlwaysAsIdentity(options);
1837
- }
1838
- return pg$1.integer(key);
1839
- }
1840
- if (t.schema.isBigInt(value)) {
1841
- if (PG_IDENTITY in value) {
1842
- const options = value[PG_IDENTITY];
1843
- if (options.mode === "byDefault") return pg$1.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
1844
- return pg$1.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
2108
+ async createMany(values, opts = {}) {
2109
+ if (values.length === 0) return [];
2110
+ await this.alepha.events.emit("repository:create:before", {
2111
+ tableName: this.tableName,
2112
+ data: values
2113
+ });
2114
+ const batchSize = opts.batchSize ?? 1e3;
2115
+ const allEntities = [];
2116
+ try {
2117
+ for (let i = 0; i < values.length; i += batchSize) {
2118
+ const batch = values.slice(i, i + batchSize);
2119
+ const entities = await this.rawInsert(opts).values(batch.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
2120
+ allEntities.push(...entities);
1845
2121
  }
2122
+ await this.alepha.events.emit("repository:create:after", {
2123
+ tableName: this.tableName,
2124
+ data: values,
2125
+ entity: allEntities
2126
+ });
2127
+ return allEntities;
2128
+ } catch (error) {
2129
+ throw this.handleError(error, "Insert query has failed");
1846
2130
  }
1847
- if (t.schema.isNumber(value)) {
1848
- if (PG_IDENTITY in value) {
1849
- const options = value[PG_IDENTITY];
1850
- if (options.mode === "byDefault") return pg$1.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
1851
- return pg$1.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
1852
- }
1853
- if (value.format === "int64") return pg$1.bigint(key, { mode: "number" });
1854
- return pg$1.numeric(key);
2131
+ }
2132
+ /**
2133
+ * Find an entity and update it.
2134
+ */
2135
+ async updateOne(where, data, opts = {}) {
2136
+ await this.alepha.events.emit("repository:update:before", {
2137
+ tableName: this.tableName,
2138
+ where,
2139
+ data
2140
+ });
2141
+ let row = data;
2142
+ const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
2143
+ if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
2144
+ where = this.withDeletedAt(where, opts);
2145
+ row = this.cast(row, false);
2146
+ delete row[this.id.key];
2147
+ const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
2148
+ throw this.handleError(error, "Update query has failed");
2149
+ });
2150
+ if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
2151
+ try {
2152
+ const entity = this.clean(response[0], this.entity.schema);
2153
+ await this.alepha.events.emit("repository:update:after", {
2154
+ tableName: this.tableName,
2155
+ where,
2156
+ data,
2157
+ entities: [entity]
2158
+ });
2159
+ return entity;
2160
+ } catch (error) {
2161
+ throw this.handleError(error, "Update query has failed");
1855
2162
  }
1856
- if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
1857
- if (t.schema.isBoolean(value)) return pg$1.boolean(key);
1858
- if (t.schema.isObject(value)) return schema(key, value);
1859
- if (t.schema.isRecord(value)) return schema(key, value);
1860
- const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
1861
- if (t.schema.isArray(value)) {
1862
- if (t.schema.isObject(value.items)) return schema(key, value);
1863
- if (t.schema.isRecord(value.items)) return schema(key, value);
1864
- if (t.schema.isString(value.items)) return pg$1.text(key).array();
1865
- if (t.schema.isInteger(value.items)) return pg$1.integer(key).array();
1866
- if (t.schema.isNumber(value.items)) return pg$1.numeric(key).array();
1867
- if (t.schema.isBoolean(value.items)) return pg$1.boolean(key).array();
1868
- if (isTypeEnum(value.items)) return pg$1.text(key).array();
2163
+ }
2164
+ /**
2165
+ * Save a given entity.
2166
+ *
2167
+ * @example
2168
+ * ```ts
2169
+ * const entity = await repository.findById(1);
2170
+ * entity.name = "New Name"; // update a field
2171
+ * delete entity.description; // delete a field
2172
+ * await repository.save(entity);
2173
+ * ```
2174
+ *
2175
+ * Difference with `updateById/updateOne`:
2176
+ *
2177
+ * - requires the entity to be fetched first (whole object is expected)
2178
+ * - check pg.version() if present -> optimistic locking
2179
+ * - validate entity against schema
2180
+ * - undefined values will be set to null, not ignored!
2181
+ *
2182
+ * @see {@link DbVersionMismatchError}
2183
+ */
2184
+ async save(entity, opts = {}) {
2185
+ const row = entity;
2186
+ const id = row[this.id.key];
2187
+ if (id == null) throw new AlephaError("Cannot save entity without ID - missing primary key in value");
2188
+ for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
2189
+ let where = this.createQueryWhere();
2190
+ where.id = { eq: id };
2191
+ const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
2192
+ if (versionField && typeof row[versionField.key] === "number") {
2193
+ where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
2194
+ row[versionField.key] += 1;
1869
2195
  }
1870
- if (isTypeEnum(value)) {
1871
- if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
1872
- if (PG_ENUM in value && value[PG_ENUM]) {
1873
- const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
1874
- if (enums.has(enumName)) {
1875
- const values = enums.get(enumName).enumValues.join(",");
1876
- const newValues = value.enum.join(",");
1877
- if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
1878
- }
1879
- enums.set(enumName, nsp.enum(enumName, value.enum));
1880
- return enums.get(enumName)(key);
2196
+ try {
2197
+ const newValue = await this.updateOne(where, row, opts);
2198
+ for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
2199
+ Object.assign(row, newValue);
2200
+ } catch (error) {
2201
+ if (error instanceof DbEntityNotFoundError && versionField) try {
2202
+ await this.findById(id);
2203
+ throw new DbVersionMismatchError(this.tableName, id);
2204
+ } catch (lookupError) {
2205
+ if (lookupError instanceof DbEntityNotFoundError) throw error;
2206
+ if (lookupError instanceof DbVersionMismatchError) throw lookupError;
2207
+ throw lookupError;
1881
2208
  }
1882
- return this.mapStringToColumn(key, value);
2209
+ throw error;
1883
2210
  }
1884
- throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
1885
- };
2211
+ }
1886
2212
  /**
1887
- * Map a string to a PG column.
1888
- *
1889
- * @param key The key of the field.
1890
- * @param value The value of the field.
2213
+ * Find an entity by ID and update it.
1891
2214
  */
1892
- mapStringToColumn = (key, value) => {
1893
- if ("format" in value) {
1894
- if (value.format === "uuid") {
1895
- if (PG_PRIMARY_KEY in value) return pg$1.uuid(key).defaultRandom();
1896
- return pg$1.uuid(key);
1897
- }
1898
- if (value.format === "byte") return byte(key);
1899
- if (value.format === "date-time") {
1900
- if (PG_CREATED_AT in value) return pg$1.timestamp(key, {
1901
- mode: "string",
1902
- withTimezone: true
1903
- }).defaultNow();
1904
- if (PG_UPDATED_AT in value) return pg$1.timestamp(key, {
1905
- mode: "string",
1906
- withTimezone: true
1907
- }).defaultNow();
1908
- return pg$1.timestamp(key, {
1909
- mode: "string",
1910
- withTimezone: true
1911
- });
1912
- }
1913
- if (value.format === "date") return pg$1.date(key, { mode: "string" });
1914
- }
1915
- return pg$1.text(key);
1916
- };
1917
- };
1918
-
1919
- //#endregion
1920
- //#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
1921
- const envSchema$2 = t.object({
1922
- DATABASE_URL: t.optional(t.text()),
1923
- POSTGRES_SCHEMA: t.optional(t.text())
1924
- });
1925
- var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
1926
- static SSL_MODES = [
1927
- "require",
1928
- "allow",
1929
- "prefer",
1930
- "verify-full"
1931
- ];
1932
- log = $logger();
1933
- env = $env(envSchema$2);
1934
- kit = $inject(DrizzleKitProvider);
1935
- builder = $inject(PostgresModelBuilder);
1936
- client;
1937
- pg;
1938
- dialect = "postgresql";
1939
- get name() {
1940
- return "postgres";
2215
+ async updateById(id, data, opts = {}) {
2216
+ return await this.updateOne(this.getWhereId(id), data, opts);
1941
2217
  }
1942
2218
  /**
1943
- * In testing mode, the schema name will be generated and deleted after the test.
2219
+ * Find many entities and update all of them.
1944
2220
  */
1945
- schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
1946
- get url() {
1947
- if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
1948
- return this.env.DATABASE_URL;
2221
+ async updateMany(where, data, opts = {}) {
2222
+ await this.alepha.events.emit("repository:update:before", {
2223
+ tableName: this.tableName,
2224
+ where,
2225
+ data
2226
+ });
2227
+ const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
2228
+ if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
2229
+ where = this.withDeletedAt(where, opts);
2230
+ data = this.cast(data, false);
2231
+ try {
2232
+ const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
2233
+ await this.alepha.events.emit("repository:update:after", {
2234
+ tableName: this.tableName,
2235
+ where,
2236
+ data,
2237
+ entities
2238
+ });
2239
+ return entities.map((it) => it[this.id.key]);
2240
+ } catch (error) {
2241
+ throw this.handleError(error, "Update query has failed");
2242
+ }
1949
2243
  }
1950
2244
  /**
1951
- * Execute a SQL statement.
2245
+ * Find many and delete all of them.
2246
+ * @returns Array of deleted entity IDs
1952
2247
  */
1953
- execute(statement) {
2248
+ async deleteMany(where = {}, opts = {}) {
2249
+ const deletedAt = this.deletedAt();
2250
+ if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
2251
+ await this.alepha.events.emit("repository:delete:before", {
2252
+ tableName: this.tableName,
2253
+ where
2254
+ });
1954
2255
  try {
1955
- return this.db.execute(statement);
2256
+ const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
2257
+ await this.alepha.events.emit("repository:delete:after", {
2258
+ tableName: this.tableName,
2259
+ where,
2260
+ ids
2261
+ });
2262
+ return ids;
1956
2263
  } catch (error) {
1957
- throw new DbError("Error executing statement", error);
2264
+ throw new DbError("Delete query has failed", error);
1958
2265
  }
1959
2266
  }
1960
2267
  /**
1961
- * Get Postgres schema used by this provider.
2268
+ * Delete all entities.
2269
+ * @returns Array of deleted entity IDs
1962
2270
  */
1963
- get schema() {
1964
- if (this.schemaForTesting) return this.schemaForTesting;
1965
- if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
1966
- return "public";
2271
+ clear(opts = {}) {
2272
+ return this.deleteMany({}, opts);
1967
2273
  }
1968
2274
  /**
1969
- * Get the Drizzle Postgres database instance.
2275
+ * Delete the given entity.
2276
+ *
2277
+ * You must fetch the entity first in order to delete it.
2278
+ * @returns Array containing the deleted entity ID
1970
2279
  */
1971
- get db() {
1972
- if (!this.pg) throw new AlephaError("Database not initialized");
1973
- return this.pg;
1974
- }
1975
- async executeMigrations(migrationsFolder) {
1976
- await migrate(this.db, { migrationsFolder });
1977
- }
1978
- onStart = $hook({
1979
- on: "start",
1980
- handler: async () => {
1981
- await this.connect();
1982
- if (!this.alepha.isServerless()) try {
1983
- await this.migrateLock.run();
1984
- } catch (error) {
1985
- throw new DbMigrationError(error);
1986
- }
1987
- }
1988
- });
1989
- onStop = $hook({
1990
- on: "stop",
1991
- handler: async () => {
1992
- if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
1993
- if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
1994
- this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
1995
- await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
1996
- this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
1997
- }
1998
- await this.close();
2280
+ async destroy(entity, opts = {}) {
2281
+ const id = entity[this.id.key];
2282
+ if (id == null) throw new AlephaError("Cannot destroy entity without ID");
2283
+ const deletedAt = this.deletedAt();
2284
+ if (deletedAt && !opts.force) {
2285
+ opts.now ??= this.dateTimeProvider.nowISOString();
2286
+ entity[deletedAt.key] = opts.now;
1999
2287
  }
2000
- });
2001
- async connect() {
2002
- this.log.debug("Connect ..");
2003
- const client = postgres(this.getClientOptions());
2004
- await client`SELECT 1`;
2005
- this.client = client;
2006
- this.pg = drizzle$1(client, { logger: { logQuery: (query, params) => {
2007
- this.log.trace(query, { params });
2008
- } } });
2009
- this.log.info("Connection OK");
2288
+ return await this.deleteById(id, opts);
2010
2289
  }
2011
- async close() {
2012
- if (this.client) {
2013
- this.log.debug("Close...");
2014
- await this.client.end();
2015
- this.client = void 0;
2016
- this.pg = void 0;
2017
- this.log.info("Connection closed");
2018
- }
2290
+ /**
2291
+ * Find an entity and delete it.
2292
+ * @returns Array of deleted entity IDs (should contain at most one ID)
2293
+ */
2294
+ async deleteOne(where = {}, opts = {}) {
2295
+ return await this.deleteMany(where, opts);
2019
2296
  }
2020
- migrateLock = $lock({ handler: async () => {
2021
- await this.migrate();
2022
- } });
2023
2297
  /**
2024
- * Map the DATABASE_URL to postgres client options.
2298
+ * Find an entity by ID and delete it.
2299
+ * @returns Array containing the deleted entity ID
2300
+ * @throws DbEntityNotFoundError if the entity is not found
2025
2301
  */
2026
- getClientOptions() {
2027
- const url = new URL(this.url);
2028
- return {
2029
- host: url.hostname,
2030
- user: decodeURIComponent(url.username),
2031
- database: decodeURIComponent(url.pathname.replace("/", "")),
2032
- password: decodeURIComponent(url.password),
2033
- port: Number(url.port || 5432),
2034
- ssl: this.ssl(url),
2035
- onnotice: () => {}
2036
- };
2302
+ async deleteById(id, opts = {}) {
2303
+ const result = await this.deleteMany(this.getWhereId(id), opts);
2304
+ if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
2305
+ return result;
2037
2306
  }
2038
- ssl(url) {
2039
- const mode = url.searchParams.get("sslmode");
2040
- for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
2307
+ /**
2308
+ * Count entities.
2309
+ */
2310
+ async count(where = {}, opts = {}) {
2311
+ where = this.withDeletedAt(where, opts);
2312
+ return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
2041
2313
  }
2042
- };
2043
-
2044
- //#endregion
2045
- //#region ../../src/orm/providers/drivers/NodeSqliteProvider.ts
2046
- const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
2047
- /**
2048
- * Configuration options for the Node.js SQLite database provider.
2049
- */
2050
- const nodeSqliteOptions = $atom({
2051
- name: "alepha.postgres.node-sqlite.options",
2052
- schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
2053
- default: {}
2054
- });
2055
- /**
2056
- * Add a fake support for SQLite in Node.js based on Postgres interfaces.
2057
- *
2058
- * This is NOT a real SQLite provider, it's a workaround to use SQLite with Drizzle ORM.
2059
- * This is NOT recommended for production use.
2060
- */
2061
- var NodeSqliteProvider = class extends DatabaseProvider {
2062
- kit = $inject(DrizzleKitProvider);
2063
- log = $logger();
2064
- env = $env(envSchema$1);
2065
- builder = $inject(SqliteModelBuilder);
2066
- options = $use(nodeSqliteOptions);
2067
- sqlite;
2068
- get name() {
2069
- return "sqlite";
2314
+ conflictMessagePattern = "duplicate key value violates unique constraint";
2315
+ handleError(error, message) {
2316
+ if (!(error instanceof Error)) return new DbError(message);
2317
+ if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
2318
+ return new DbError(message, error);
2319
+ }
2320
+ withDeletedAt(where, opts = {}) {
2321
+ if (opts.force) return where;
2322
+ const deletedAt = this.deletedAt();
2323
+ if (!deletedAt) return where;
2324
+ return { and: [where, { [deletedAt.key]: { isNull: true } }] };
2325
+ }
2326
+ deletedAt() {
2327
+ const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
2328
+ if (deletedAtFields.length > 0) return deletedAtFields[0];
2070
2329
  }
2071
- dialect = "sqlite";
2072
- get url() {
2073
- const path = this.options.path ?? this.env.DATABASE_URL;
2074
- if (path) {
2075
- if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
2076
- return path;
2077
- }
2078
- if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
2079
- else return "node_modules/.alepha/sqlite.db";
2330
+ /**
2331
+ * Convert something to valid Pg Insert Value.
2332
+ */
2333
+ cast(data, insert) {
2334
+ const schema = insert ? this.entity.insertSchema : t.partial(this.entity.updateSchema);
2335
+ return this.alepha.codec.encode(schema, data);
2080
2336
  }
2081
- async execute(query) {
2082
- const { sql: sql$2, params, method } = this.db.all(query).getQuery();
2083
- this.log.trace(`${sql$2}`, params);
2084
- const statement = this.sqlite.prepare(sql$2);
2085
- if (method === "run") {
2086
- statement.run(...params);
2087
- return [];
2088
- }
2089
- if (method === "get") {
2090
- const data = statement.get(...params);
2091
- return data ? [{ ...data }] : [];
2337
+ /**
2338
+ * Transform a row from the database into a clean entity.
2339
+ */
2340
+ clean(row, schema) {
2341
+ for (const key of Object.keys(schema.properties)) {
2342
+ const value = schema.properties[key];
2343
+ if (typeof row[key] === "string") {
2344
+ if (t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
2345
+ else if (t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
2346
+ }
2347
+ if (typeof row[key] === "bigint" && t.schema.isBigInt(value)) row[key] = row[key].toString();
2092
2348
  }
2093
- return statement.all(...params);
2349
+ return this.alepha.codec.decode(schema, row);
2094
2350
  }
2095
- db = drizzle$2(async (sql$2, params, method) => {
2096
- const statement = this.sqlite.prepare(sql$2);
2097
- this.log.trace(`${sql$2}`, { params });
2098
- if (method === "get") {
2099
- const data = statement.get(...params);
2100
- return { rows: data ? [{ ...data }] : [] };
2101
- }
2102
- if (method === "run") {
2103
- statement.run(...params);
2104
- return { rows: [] };
2351
+ /**
2352
+ * Clean a row with joins recursively
2353
+ */
2354
+ cleanWithJoins(row, schema, joins, parentPath) {
2355
+ const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
2356
+ const cleanRow = { ...row };
2357
+ const joinedData = {};
2358
+ for (const join of joinsAtThisLevel) {
2359
+ joinedData[join.key] = cleanRow[join.key];
2360
+ delete cleanRow[join.key];
2105
2361
  }
2106
- if (method === "all") return { rows: statement.all(...params).map((row) => Object.values(row)) };
2107
- if (method === "values") return { rows: statement.all(...params).map((row) => Object.values(row)) };
2108
- throw new AlephaError(`Unsupported method: ${method}`);
2109
- });
2110
- onStart = $hook({
2111
- on: "start",
2112
- handler: async () => {
2113
- const { DatabaseSync } = await import("node:sqlite");
2114
- const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
2115
- if (filepath !== ":memory:" && filepath !== "") {
2116
- const dirname = filepath.split("/").slice(0, -1).join("/");
2117
- if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
2118
- }
2119
- this.sqlite = new DatabaseSync(filepath);
2120
- await this.migrate();
2121
- this.log.info(`Using SQLite database at ${filepath}`);
2362
+ const entity = this.clean(cleanRow, schema);
2363
+ for (const join of joinsAtThisLevel) {
2364
+ const joinedValue = joinedData[join.key];
2365
+ if (joinedValue != null) {
2366
+ const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
2367
+ if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
2368
+ else entity[join.key] = this.clean(joinedValue, join.schema);
2369
+ } else entity[join.key] = void 0;
2122
2370
  }
2123
- });
2124
- async executeMigrations(migrationsFolder) {
2125
- await migrate$1(this.db, async (migrationQueries) => {
2126
- this.log.debug("Executing migration queries", { migrationQueries });
2127
- for (const query of migrationQueries) this.sqlite.prepare(query).run();
2128
- }, { migrationsFolder });
2371
+ return entity;
2129
2372
  }
2130
- };
2131
-
2132
- //#endregion
2133
- //#region ../../src/orm/providers/drivers/PglitePostgresProvider.ts
2134
- const envSchema = t.object({ DATABASE_URL: t.optional(t.text()) });
2135
- var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
2136
- static importPglite() {
2137
- try {
2138
- return createRequire(import.meta.url)("@electric-sql/pglite");
2139
- } catch {}
2373
+ /**
2374
+ * Convert a where clause to SQL.
2375
+ */
2376
+ toSQL(where, joins) {
2377
+ return this.queryManager.toSQL(where, {
2378
+ schema: this.entity.schema,
2379
+ col: (name) => {
2380
+ return this.col(name);
2381
+ },
2382
+ joins,
2383
+ dialect: this.provider.dialect
2384
+ });
2140
2385
  }
2141
- env = $env(envSchema);
2142
- log = $logger();
2143
- kit = $inject(DrizzleKitProvider);
2144
- builder = $inject(PostgresModelBuilder);
2145
- client;
2146
- pglite;
2147
- get name() {
2148
- return "pglite";
2386
+ /**
2387
+ * Get the where clause for an ID.
2388
+ *
2389
+ * @param id The ID to get the where clause for.
2390
+ * @returns The where clause for the ID.
2391
+ */
2392
+ getWhereId(id) {
2393
+ return { [this.id.key]: { eq: t.schema.isString(this.id.type) ? String(id) : Number(id) } };
2149
2394
  }
2150
- dialect = "postgresql";
2151
- get url() {
2152
- let path = this.env.DATABASE_URL;
2153
- if (!path) if (this.alepha.isTest()) path = ":memory:";
2154
- else path = "node_modules/.alepha/pglite";
2155
- else if (path.includes(":memory:")) path = ":memory:";
2156
- else if (path.startsWith("file://")) path = path.replace("file://", "");
2157
- return path;
2395
+ /**
2396
+ * Find a primary key in the schema.
2397
+ */
2398
+ getPrimaryKey(schema) {
2399
+ const primaryKeys = getAttrFields(schema, PG_PRIMARY_KEY);
2400
+ if (primaryKeys.length === 0) throw new AlephaError("Primary key not found in schema");
2401
+ if (primaryKeys.length > 1) throw new AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
2402
+ return {
2403
+ key: primaryKeys[0].key,
2404
+ col: this.col(primaryKeys[0].key),
2405
+ type: primaryKeys[0].type
2406
+ };
2158
2407
  }
2159
- get db() {
2160
- if (!this.pglite) throw new AlephaError("Database not initialized");
2161
- return this.pglite;
2408
+ };
2409
+
2410
+ //#endregion
2411
+ //#region ../../src/orm/providers/RepositoryProvider.ts
2412
+ var RepositoryProvider = class {
2413
+ alepha = $inject(Alepha);
2414
+ registry = /* @__PURE__ */ new Map();
2415
+ getRepositories(provider) {
2416
+ const repositories = this.alepha.services(Repository);
2417
+ if (provider) return repositories.filter((it) => it.provider === provider);
2418
+ return repositories;
2162
2419
  }
2163
- async execute(statement) {
2164
- const { rows } = await this.db.execute(statement);
2165
- return rows;
2420
+ getRepository(entity) {
2421
+ const RepositoryClass = this.createClassRepository(entity);
2422
+ return this.alepha.inject(RepositoryClass);
2166
2423
  }
2167
- onStart = $hook({
2168
- on: "start",
2169
- handler: async () => {
2170
- if (Object.keys(this.kit.getModels(this)).length === 0) return;
2171
- const module = PglitePostgresProvider.importPglite();
2172
- if (!module) throw new AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
2173
- const { drizzle: drizzle$3 } = createRequire(import.meta.url)("drizzle-orm/pglite");
2174
- const path = this.url;
2175
- if (path !== ":memory:") {
2176
- await mkdir(path, { recursive: true }).catch(() => null);
2177
- this.client = new module.PGlite(path);
2178
- } else this.client = new module.PGlite();
2179
- this.pglite = drizzle$3({ client: this.client });
2180
- await this.migrate();
2181
- this.log.info(`Using PGlite database at ${path}`);
2182
- }
2183
- });
2184
- onStop = $hook({
2185
- on: "stop",
2186
- handler: async () => {
2187
- if (this.client) {
2188
- this.log.debug("Closing PGlite connection...");
2189
- await this.client.close();
2190
- this.client = void 0;
2191
- this.pglite = void 0;
2192
- this.log.info("PGlite connection closed");
2424
+ createClassRepository(entity) {
2425
+ let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
2426
+ if (name.endsWith("s")) name = name.slice(0, -1);
2427
+ name = `${name}Repository`;
2428
+ if (this.registry.has(entity)) return this.registry.get(entity);
2429
+ class GenericRepository extends Repository {
2430
+ constructor() {
2431
+ super(entity);
2193
2432
  }
2194
2433
  }
2195
- });
2196
- async executeMigrations(migrationsFolder) {
2197
- await migrate$2(this.db, { migrationsFolder });
2434
+ Object.defineProperty(GenericRepository, "name", { value: name });
2435
+ this.registry.set(entity, GenericRepository);
2436
+ return GenericRepository;
2198
2437
  }
2199
2438
  };
2200
2439
 
@@ -2475,8 +2714,8 @@ function buildQueryString(where) {
2475
2714
  if (!where || typeof where !== "object") return "";
2476
2715
  if ("and" in where && Array.isArray(where.and)) return where.and.map((w) => buildQueryString(w)).join("&");
2477
2716
  if ("or" in where && Array.isArray(where.or)) {
2478
- const parts$1 = where.or.map((w) => buildQueryString(w));
2479
- return parts$1.length > 1 ? `(${parts$1.join("|")})` : parts$1[0];
2717
+ const parts = where.or.map((w) => buildQueryString(w));
2718
+ return parts.length > 1 ? `(${parts.join("|")})` : parts[0];
2480
2719
  }
2481
2720
  if ("not" in where) return "";
2482
2721
  const parts = [];
@@ -2507,32 +2746,6 @@ function buildQueryString(where) {
2507
2746
  return parts.join("&");
2508
2747
  }
2509
2748
 
2510
- //#endregion
2511
- //#region ../../src/orm/primitives/$transaction.ts
2512
- /**
2513
- * Creates a transaction primitive for database operations requiring atomicity and consistency.
2514
- *
2515
- * This primitive provides a convenient way to wrap database operations in PostgreSQL
2516
- * transactions, ensuring ACID properties and automatic retry logic for version conflicts.
2517
- * It integrates seamlessly with the repository pattern and provides built-in handling
2518
- * for optimistic locking scenarios with automatic retry on version mismatches.
2519
- *
2520
- * **Important Notes**:
2521
- * - All operations within the transaction handler are atomic
2522
- * - Automatic retry on `PgVersionMismatchError` for optimistic locking
2523
- * - Pass `{ tx }` option to all repository operations within the transaction
2524
- * - Transactions are automatically rolled back on any unhandled error
2525
- * - Use appropriate isolation levels based on your consistency requirements
2526
- */
2527
- const $transaction = (opts) => {
2528
- const { alepha } = $context();
2529
- const provider = alepha.inject(DatabaseProvider);
2530
- return $retry({
2531
- when: (err) => err instanceof DbVersionMismatchError,
2532
- handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
2533
- });
2534
- };
2535
-
2536
2749
  //#endregion
2537
2750
  //#region ../../src/orm/providers/DatabaseTypeProvider.ts
2538
2751
  var DatabaseTypeProvider = class {
@@ -2654,11 +2867,101 @@ const pg = db;
2654
2867
  */
2655
2868
  const legacyIdSchema = pgAttr(pgAttr(pgAttr(t.integer(), PG_PRIMARY_KEY), PG_SERIAL), PG_DEFAULT);
2656
2869
 
2870
+ //#endregion
2871
+ //#region ../../src/orm/primitives/$repository.ts
2872
+ /**
2873
+ * Get the repository for the given entity.
2874
+ */
2875
+ const $repository = (entity) => {
2876
+ const { alepha } = $context();
2877
+ return $inject(alepha.inject(RepositoryProvider).createClassRepository(entity));
2878
+ };
2879
+
2880
+ //#endregion
2881
+ //#region ../../src/orm/primitives/$transaction.ts
2882
+ /**
2883
+ * Creates a transaction primitive for database operations requiring atomicity and consistency.
2884
+ *
2885
+ * This primitive provides a convenient way to wrap database operations in PostgreSQL
2886
+ * transactions, ensuring ACID properties and automatic retry logic for version conflicts.
2887
+ * It integrates seamlessly with the repository pattern and provides built-in handling
2888
+ * for optimistic locking scenarios with automatic retry on version mismatches.
2889
+ *
2890
+ * **Important Notes**:
2891
+ * - All operations within the transaction handler are atomic
2892
+ * - Automatic retry on `PgVersionMismatchError` for optimistic locking
2893
+ * - Pass `{ tx }` option to all repository operations within the transaction
2894
+ * - Transactions are automatically rolled back on any unhandled error
2895
+ * - Use appropriate isolation levels based on your consistency requirements
2896
+ */
2897
+ const $transaction = (opts) => {
2898
+ const { alepha } = $context();
2899
+ const provider = alepha.inject(DatabaseProvider);
2900
+ return $retry({
2901
+ when: (err) => err instanceof DbVersionMismatchError,
2902
+ handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
2903
+ });
2904
+ };
2905
+
2657
2906
  //#endregion
2658
2907
  //#region ../../src/orm/index.ts
2908
+ var orm_exports = /* @__PURE__ */ __exportAll({
2909
+ $entity: () => $entity,
2910
+ $repository: () => $repository,
2911
+ $sequence: () => $sequence,
2912
+ $transaction: () => $transaction,
2913
+ AlephaPostgres: () => AlephaPostgres,
2914
+ BunPostgresProvider: () => BunPostgresProvider,
2915
+ BunSqliteProvider: () => BunSqliteProvider,
2916
+ CloudflareD1Provider: () => CloudflareD1Provider,
2917
+ DatabaseProvider: () => DatabaseProvider,
2918
+ DatabaseTypeProvider: () => DatabaseTypeProvider,
2919
+ DbConflictError: () => DbConflictError,
2920
+ DbEntityNotFoundError: () => DbEntityNotFoundError,
2921
+ DbError: () => DbError,
2922
+ DbMigrationError: () => DbMigrationError,
2923
+ DbVersionMismatchError: () => DbVersionMismatchError,
2924
+ DrizzleKitProvider: () => DrizzleKitProvider,
2925
+ EntityPrimitive: () => EntityPrimitive,
2926
+ NodePostgresProvider: () => NodePostgresProvider,
2927
+ NodeSqliteProvider: () => NodeSqliteProvider,
2928
+ PG_CREATED_AT: () => PG_CREATED_AT,
2929
+ PG_DEFAULT: () => PG_DEFAULT,
2930
+ PG_DELETED_AT: () => PG_DELETED_AT,
2931
+ PG_ENUM: () => PG_ENUM,
2932
+ PG_IDENTITY: () => PG_IDENTITY,
2933
+ PG_PRIMARY_KEY: () => PG_PRIMARY_KEY,
2934
+ PG_REF: () => PG_REF,
2935
+ PG_SERIAL: () => PG_SERIAL,
2936
+ PG_UPDATED_AT: () => PG_UPDATED_AT,
2937
+ PG_VERSION: () => PG_VERSION,
2938
+ Repository: () => Repository,
2939
+ RepositoryProvider: () => RepositoryProvider,
2940
+ SequencePrimitive: () => SequencePrimitive,
2941
+ buildQueryString: () => buildQueryString,
2942
+ bunSqliteOptions: () => bunSqliteOptions,
2943
+ db: () => db,
2944
+ drizzle: () => drizzle,
2945
+ getAttrFields: () => getAttrFields,
2946
+ insertSchema: () => insertSchema,
2947
+ legacyIdSchema: () => legacyIdSchema,
2948
+ nodeSqliteOptions: () => nodeSqliteOptions,
2949
+ pageQuerySchema: () => pageQuerySchema,
2950
+ pageSchema: () => pageSchema,
2951
+ parseQueryString: () => parseQueryString,
2952
+ pg: () => pg,
2953
+ pgAttr: () => pgAttr,
2954
+ schema: () => schema,
2955
+ sql: () => sql,
2956
+ updateSchema: () => updateSchema
2957
+ });
2659
2958
  /**
2660
2959
  * Postgres client based on Drizzle ORM, Alepha type-safe friendly.
2661
2960
  *
2961
+ * Automatically selects the appropriate provider based on runtime:
2962
+ * - Bun: Uses `BunPostgresProvider` or `BunSqliteProvider`
2963
+ * - Node.js: Uses `NodePostgresProvider` or `NodeSqliteProvider`
2964
+ *
2662
2965
  * ```ts
2663
2966
  * import { t } from "alepha";
2664
2967
  * import { $entity, $repository, db } from "alepha/postgres";
@@ -2697,6 +3000,10 @@ const legacyIdSchema = pgAttr(pgAttr(pgAttr(t.integer(), PG_PRIMARY_KEY), PG_SER
2697
3000
  * @see {@link $sequence}
2698
3001
  * @see {@link $repository}
2699
3002
  * @see {@link $transaction}
3003
+ * @see {@link NodePostgresProvider} - Node.js Postgres implementation
3004
+ * @see {@link NodeSqliteProvider} - Node.js SQLite implementation
3005
+ * @see {@link BunPostgresProvider} - Bun Postgres implementation
3006
+ * @see {@link BunSqliteProvider} - Bun SQLite implementation
2700
3007
  * @module alepha.postgres
2701
3008
  */
2702
3009
  const AlephaPostgres = $module({
@@ -2706,8 +3013,10 @@ const AlephaPostgres = $module({
2706
3013
  AlephaDateTime,
2707
3014
  DatabaseProvider,
2708
3015
  NodePostgresProvider,
2709
- PglitePostgresProvider,
2710
3016
  NodeSqliteProvider,
3017
+ BunPostgresProvider,
3018
+ BunSqliteProvider,
3019
+ PglitePostgresProvider,
2711
3020
  CloudflareD1Provider,
2712
3021
  SqliteModelBuilder,
2713
3022
  PostgresModelBuilder,
@@ -2727,6 +3036,7 @@ const AlephaPostgres = $module({
2727
3036
  const isSqlite = url?.startsWith("sqlite:");
2728
3037
  const isMemory = url?.includes(":memory:");
2729
3038
  const isFile = !!url && !isPostgres && !isMemory;
3039
+ const isBun = alepha.isBun();
2730
3040
  if (url?.startsWith("cloudflare-d1:")) {
2731
3041
  alepha.with({
2732
3042
  optional: true,
@@ -2747,18 +3057,18 @@ const AlephaPostgres = $module({
2747
3057
  alepha.with({
2748
3058
  optional: true,
2749
3059
  provide: DatabaseProvider,
2750
- use: NodePostgresProvider
3060
+ use: isBun ? BunPostgresProvider : NodePostgresProvider
2751
3061
  });
2752
3062
  return;
2753
3063
  }
2754
3064
  alepha.with({
2755
3065
  optional: true,
2756
3066
  provide: DatabaseProvider,
2757
- use: NodeSqliteProvider
3067
+ use: isBun ? BunSqliteProvider : NodeSqliteProvider
2758
3068
  });
2759
3069
  }
2760
3070
  });
2761
3071
 
2762
3072
  //#endregion
2763
- export { $entity, $repository, $sequence, $transaction, AlephaPostgres, CloudflareD1Provider, DatabaseProvider, DatabaseTypeProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, NodePostgresProvider, NodeSqliteProvider, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, db, drizzle, getAttrFields, insertSchema, legacyIdSchema, nodeSqliteOptions, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
3073
+ export { $entity, $repository, $sequence, $transaction, AlephaPostgres, BunPostgresProvider, BunSqliteProvider, CloudflareD1Provider, DatabaseProvider, DatabaseTypeProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, NodePostgresProvider, NodeSqliteProvider, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, bunSqliteOptions, db, drizzle, getAttrFields, insertSchema, legacyIdSchema, nodeSqliteOptions, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
2764
3074
  //# sourceMappingURL=index.js.map