alepha 0.14.4 → 0.15.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -4
- package/dist/api/audits/index.d.ts +619 -731
- package/dist/api/audits/index.d.ts.map +1 -1
- package/dist/api/files/index.d.ts +185 -298
- package/dist/api/files/index.d.ts.map +1 -1
- package/dist/api/files/index.js +0 -1
- package/dist/api/files/index.js.map +1 -1
- package/dist/api/jobs/index.d.ts +245 -356
- package/dist/api/jobs/index.d.ts.map +1 -1
- package/dist/api/notifications/index.d.ts +238 -350
- package/dist/api/notifications/index.d.ts.map +1 -1
- package/dist/api/parameters/index.d.ts +499 -611
- package/dist/api/parameters/index.d.ts.map +1 -1
- package/dist/api/users/index.browser.js +1 -2
- package/dist/api/users/index.browser.js.map +1 -1
- package/dist/api/users/index.d.ts +1697 -1804
- package/dist/api/users/index.d.ts.map +1 -1
- package/dist/api/users/index.js +178 -151
- package/dist/api/users/index.js.map +1 -1
- package/dist/api/verifications/index.d.ts +132 -132
- package/dist/api/verifications/index.d.ts.map +1 -1
- package/dist/batch/index.d.ts +122 -122
- package/dist/batch/index.d.ts.map +1 -1
- package/dist/batch/index.js +1 -2
- package/dist/batch/index.js.map +1 -1
- package/dist/bucket/index.d.ts +163 -163
- package/dist/bucket/index.d.ts.map +1 -1
- package/dist/cache/core/index.d.ts +46 -46
- package/dist/cache/core/index.d.ts.map +1 -1
- package/dist/cache/redis/index.d.ts.map +1 -1
- package/dist/cli/index.d.ts +302 -299
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/index.js +966 -564
- package/dist/cli/index.js.map +1 -1
- package/dist/command/index.d.ts +303 -299
- package/dist/command/index.d.ts.map +1 -1
- package/dist/command/index.js +11 -7
- package/dist/command/index.js.map +1 -1
- package/dist/core/index.browser.js +419 -99
- package/dist/core/index.browser.js.map +1 -1
- package/dist/core/index.d.ts +718 -625
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +420 -99
- package/dist/core/index.js.map +1 -1
- package/dist/core/index.native.js +419 -99
- package/dist/core/index.native.js.map +1 -1
- package/dist/datetime/index.d.ts +44 -44
- package/dist/datetime/index.d.ts.map +1 -1
- package/dist/datetime/index.js +4 -4
- package/dist/datetime/index.js.map +1 -1
- package/dist/email/index.d.ts +97 -50
- package/dist/email/index.d.ts.map +1 -1
- package/dist/email/index.js +129 -33
- package/dist/email/index.js.map +1 -1
- package/dist/fake/index.d.ts +7981 -14
- package/dist/fake/index.d.ts.map +1 -1
- package/dist/file/index.d.ts +523 -390
- package/dist/file/index.d.ts.map +1 -1
- package/dist/file/index.js +253 -1
- package/dist/file/index.js.map +1 -1
- package/dist/lock/core/index.d.ts +208 -208
- package/dist/lock/core/index.d.ts.map +1 -1
- package/dist/lock/redis/index.d.ts.map +1 -1
- package/dist/logger/index.d.ts +25 -26
- package/dist/logger/index.d.ts.map +1 -1
- package/dist/mcp/index.d.ts +197 -197
- package/dist/mcp/index.d.ts.map +1 -1
- package/dist/orm/chunk-DtkW-qnP.js +38 -0
- package/dist/orm/index.browser.js.map +1 -1
- package/dist/orm/index.bun.js +2814 -0
- package/dist/orm/index.bun.js.map +1 -0
- package/dist/orm/index.d.ts +1205 -1057
- package/dist/orm/index.d.ts.map +1 -1
- package/dist/orm/index.js +2056 -1753
- package/dist/orm/index.js.map +1 -1
- package/dist/queue/core/index.d.ts +248 -248
- package/dist/queue/core/index.d.ts.map +1 -1
- package/dist/queue/redis/index.d.ts.map +1 -1
- package/dist/redis/index.bun.js +285 -0
- package/dist/redis/index.bun.js.map +1 -0
- package/dist/redis/index.d.ts +118 -136
- package/dist/redis/index.d.ts.map +1 -1
- package/dist/redis/index.js +18 -38
- package/dist/redis/index.js.map +1 -1
- package/dist/retry/index.d.ts +69 -69
- package/dist/retry/index.d.ts.map +1 -1
- package/dist/router/index.d.ts +6 -6
- package/dist/router/index.d.ts.map +1 -1
- package/dist/scheduler/index.d.ts +25 -25
- package/dist/scheduler/index.d.ts.map +1 -1
- package/dist/security/index.browser.js +5 -1
- package/dist/security/index.browser.js.map +1 -1
- package/dist/security/index.d.ts +417 -254
- package/dist/security/index.d.ts.map +1 -1
- package/dist/security/index.js +386 -86
- package/dist/security/index.js.map +1 -1
- package/dist/server/auth/index.d.ts +277 -277
- package/dist/server/auth/index.d.ts.map +1 -1
- package/dist/server/auth/index.js +20 -20
- package/dist/server/auth/index.js.map +1 -1
- package/dist/server/cache/index.d.ts +60 -57
- package/dist/server/cache/index.d.ts.map +1 -1
- package/dist/server/cache/index.js +1 -1
- package/dist/server/cache/index.js.map +1 -1
- package/dist/server/compress/index.d.ts +3 -3
- package/dist/server/compress/index.d.ts.map +1 -1
- package/dist/server/cookies/index.d.ts +6 -6
- package/dist/server/cookies/index.d.ts.map +1 -1
- package/dist/server/cookies/index.js +3 -3
- package/dist/server/cookies/index.js.map +1 -1
- package/dist/server/core/index.d.ts +242 -150
- package/dist/server/core/index.d.ts.map +1 -1
- package/dist/server/core/index.js +288 -122
- package/dist/server/core/index.js.map +1 -1
- package/dist/server/cors/index.d.ts +11 -12
- package/dist/server/cors/index.d.ts.map +1 -1
- package/dist/server/health/index.d.ts +0 -1
- package/dist/server/health/index.d.ts.map +1 -1
- package/dist/server/helmet/index.d.ts +2 -2
- package/dist/server/helmet/index.d.ts.map +1 -1
- package/dist/server/links/index.browser.js.map +1 -1
- package/dist/server/links/index.d.ts +84 -85
- package/dist/server/links/index.d.ts.map +1 -1
- package/dist/server/links/index.js +1 -2
- package/dist/server/links/index.js.map +1 -1
- package/dist/server/metrics/index.d.ts.map +1 -1
- package/dist/server/multipart/index.d.ts +6 -6
- package/dist/server/multipart/index.d.ts.map +1 -1
- package/dist/server/proxy/index.d.ts +102 -103
- package/dist/server/proxy/index.d.ts.map +1 -1
- package/dist/server/rate-limit/index.d.ts +16 -16
- package/dist/server/rate-limit/index.d.ts.map +1 -1
- package/dist/server/static/index.d.ts +44 -44
- package/dist/server/static/index.d.ts.map +1 -1
- package/dist/server/swagger/index.d.ts +48 -49
- package/dist/server/swagger/index.d.ts.map +1 -1
- package/dist/server/swagger/index.js +1 -2
- package/dist/server/swagger/index.js.map +1 -1
- package/dist/sms/index.d.ts +13 -11
- package/dist/sms/index.d.ts.map +1 -1
- package/dist/sms/index.js +7 -7
- package/dist/sms/index.js.map +1 -1
- package/dist/thread/index.d.ts +71 -72
- package/dist/thread/index.d.ts.map +1 -1
- package/dist/topic/core/index.d.ts +318 -318
- package/dist/topic/core/index.d.ts.map +1 -1
- package/dist/topic/redis/index.d.ts +6 -6
- package/dist/topic/redis/index.d.ts.map +1 -1
- package/dist/vite/index.d.ts +5720 -159
- package/dist/vite/index.d.ts.map +1 -1
- package/dist/vite/index.js +41 -18
- package/dist/vite/index.js.map +1 -1
- package/dist/websocket/index.browser.js +6 -6
- package/dist/websocket/index.browser.js.map +1 -1
- package/dist/websocket/index.d.ts +247 -247
- package/dist/websocket/index.d.ts.map +1 -1
- package/dist/websocket/index.js +6 -6
- package/dist/websocket/index.js.map +1 -1
- package/package.json +9 -14
- package/src/api/files/controllers/AdminFileStatsController.ts +0 -1
- package/src/api/users/atoms/realmAuthSettingsAtom.ts +5 -0
- package/src/api/users/controllers/{UserRealmController.ts → RealmController.ts} +11 -11
- package/src/api/users/entities/users.ts +1 -1
- package/src/api/users/index.ts +8 -8
- package/src/api/users/primitives/{$userRealm.ts → $realm.ts} +17 -19
- package/src/api/users/providers/{UserRealmProvider.ts → RealmProvider.ts} +26 -30
- package/src/api/users/schemas/{userRealmConfigSchema.ts → realmConfigSchema.ts} +2 -2
- package/src/api/users/services/CredentialService.ts +7 -7
- package/src/api/users/services/IdentityService.ts +4 -4
- package/src/api/users/services/RegistrationService.spec.ts +25 -27
- package/src/api/users/services/RegistrationService.ts +38 -27
- package/src/api/users/services/SessionCrudService.ts +3 -3
- package/src/api/users/services/SessionService.spec.ts +3 -3
- package/src/api/users/services/SessionService.ts +28 -9
- package/src/api/users/services/UserService.ts +7 -7
- package/src/batch/providers/BatchProvider.ts +1 -2
- package/src/cli/apps/AlephaPackageBuilderCli.ts +38 -19
- package/src/cli/assets/apiHelloControllerTs.ts +18 -0
- package/src/cli/assets/apiIndexTs.ts +16 -0
- package/src/cli/assets/claudeMd.ts +303 -0
- package/src/cli/assets/mainBrowserTs.ts +2 -2
- package/src/cli/assets/mainServerTs.ts +24 -0
- package/src/cli/assets/webAppRouterTs.ts +15 -0
- package/src/cli/assets/webHelloComponentTsx.ts +16 -0
- package/src/cli/assets/webIndexTs.ts +16 -0
- package/src/cli/commands/build.ts +41 -21
- package/src/cli/commands/db.ts +21 -18
- package/src/cli/commands/deploy.ts +17 -5
- package/src/cli/commands/dev.ts +13 -17
- package/src/cli/commands/format.ts +8 -2
- package/src/cli/commands/init.ts +74 -29
- package/src/cli/commands/lint.ts +8 -2
- package/src/cli/commands/test.ts +8 -2
- package/src/cli/commands/typecheck.ts +5 -1
- package/src/cli/commands/verify.ts +4 -2
- package/src/cli/services/AlephaCliUtils.ts +39 -600
- package/src/cli/services/PackageManagerUtils.ts +301 -0
- package/src/cli/services/ProjectScaffolder.ts +306 -0
- package/src/command/helpers/Runner.ts +15 -3
- package/src/core/__tests__/Alepha-graph.spec.ts +4 -0
- package/src/core/index.shared.ts +1 -0
- package/src/core/index.ts +2 -0
- package/src/core/primitives/$hook.ts +6 -2
- package/src/core/primitives/$module.spec.ts +4 -0
- package/src/core/providers/AlsProvider.ts +1 -1
- package/src/core/providers/CodecManager.spec.ts +12 -6
- package/src/core/providers/CodecManager.ts +26 -6
- package/src/core/providers/EventManager.ts +169 -13
- package/src/core/providers/KeylessJsonSchemaCodec.spec.ts +621 -0
- package/src/core/providers/KeylessJsonSchemaCodec.ts +407 -0
- package/src/core/providers/StateManager.spec.ts +27 -16
- package/src/email/providers/LocalEmailProvider.spec.ts +111 -87
- package/src/email/providers/LocalEmailProvider.ts +52 -15
- package/src/email/providers/NodemailerEmailProvider.ts +167 -56
- package/src/file/errors/FileError.ts +7 -0
- package/src/file/index.ts +9 -1
- package/src/file/providers/MemoryFileSystemProvider.ts +393 -0
- package/src/orm/index.browser.ts +1 -19
- package/src/orm/index.bun.ts +77 -0
- package/src/orm/index.shared-server.ts +22 -0
- package/src/orm/index.shared.ts +15 -0
- package/src/orm/index.ts +19 -39
- package/src/orm/providers/drivers/BunPostgresProvider.ts +3 -5
- package/src/orm/providers/drivers/BunSqliteProvider.ts +1 -1
- package/src/orm/providers/drivers/CloudflareD1Provider.ts +4 -0
- package/src/orm/providers/drivers/DatabaseProvider.ts +4 -0
- package/src/orm/providers/drivers/PglitePostgresProvider.ts +4 -0
- package/src/orm/services/Repository.ts +8 -0
- package/src/redis/index.bun.ts +35 -0
- package/src/redis/providers/BunRedisProvider.ts +12 -43
- package/src/redis/providers/BunRedisSubscriberProvider.ts +2 -3
- package/src/redis/providers/NodeRedisProvider.ts +16 -34
- package/src/{server/security → security}/__tests__/BasicAuth.spec.ts +11 -11
- package/src/{server/security → security}/__tests__/ServerSecurityProvider-realm.spec.ts +21 -16
- package/src/{server/security/providers → security/__tests__}/ServerSecurityProvider.spec.ts +5 -5
- package/src/security/index.browser.ts +5 -0
- package/src/security/index.ts +90 -7
- package/src/security/primitives/{$realm.spec.ts → $issuer.spec.ts} +11 -11
- package/src/security/primitives/{$realm.ts → $issuer.ts} +20 -17
- package/src/security/primitives/$role.ts +5 -5
- package/src/security/primitives/$serviceAccount.spec.ts +5 -5
- package/src/security/primitives/$serviceAccount.ts +3 -3
- package/src/{server/security → security}/providers/ServerSecurityProvider.ts +5 -7
- package/src/server/auth/primitives/$auth.ts +10 -10
- package/src/server/auth/primitives/$authCredentials.ts +3 -3
- package/src/server/auth/primitives/$authGithub.ts +3 -3
- package/src/server/auth/primitives/$authGoogle.ts +3 -3
- package/src/server/auth/providers/ServerAuthProvider.ts +13 -13
- package/src/server/cache/providers/ServerCacheProvider.ts +1 -1
- package/src/server/cookies/providers/ServerCookiesProvider.ts +3 -3
- package/src/server/core/providers/NodeHttpServerProvider.ts +25 -6
- package/src/server/core/providers/ServerBodyParserProvider.ts +19 -23
- package/src/server/core/providers/ServerLoggerProvider.ts +23 -19
- package/src/server/core/providers/ServerProvider.ts +144 -21
- package/src/server/core/providers/ServerRouterProvider.ts +259 -115
- package/src/server/core/providers/ServerTimingProvider.ts +2 -2
- package/src/server/links/index.ts +1 -1
- package/src/server/links/providers/LinkProvider.ts +1 -1
- package/src/server/swagger/index.ts +1 -1
- package/src/sms/providers/LocalSmsProvider.spec.ts +153 -111
- package/src/sms/providers/LocalSmsProvider.ts +8 -7
- package/src/vite/helpers/boot.ts +28 -17
- package/src/vite/tasks/buildServer.ts +12 -1
- package/src/vite/tasks/devServer.ts +3 -1
- package/src/vite/tasks/generateCloudflare.ts +7 -0
- package/dist/server/security/index.browser.js +0 -13
- package/dist/server/security/index.browser.js.map +0 -1
- package/dist/server/security/index.d.ts +0 -173
- package/dist/server/security/index.d.ts.map +0 -1
- package/dist/server/security/index.js +0 -311
- package/dist/server/security/index.js.map +0 -1
- package/src/cli/assets/appRouterTs.ts +0 -9
- package/src/cli/assets/mainTs.ts +0 -13
- package/src/server/security/index.browser.ts +0 -10
- package/src/server/security/index.ts +0 -94
- /package/src/{server/security → security}/primitives/$basicAuth.ts +0 -0
- /package/src/{server/security → security}/providers/ServerBasicAuthProvider.ts +0 -0
|
@@ -0,0 +1,2814 @@
|
|
|
1
|
+
import { createRequire } from "node:module";
|
|
2
|
+
import { $atom, $context, $env, $hook, $inject, $module, $use, Alepha, AlephaError, KIND, Primitive, Value, createPagination, createPrimitive, pageQuerySchema, pageSchema, pageSchema as pageSchema$1, t } from "alepha";
|
|
3
|
+
import { AlephaDateTime, DateTimeProvider } from "alepha/datetime";
|
|
4
|
+
import * as pg$2 from "drizzle-orm/pg-core";
|
|
5
|
+
import { alias, check, customType, foreignKey, index, pgEnum, pgSchema, pgTable, unique, uniqueIndex } from "drizzle-orm/pg-core";
|
|
6
|
+
import * as drizzle from "drizzle-orm";
|
|
7
|
+
import { and, arrayContained, arrayContains, arrayOverlaps, asc, between, desc, eq, getTableName, gt, gte, ilike, inArray, isNotNull, isNull, isSQLWrapper, like, lt, lte, ne, not, notBetween, notIlike, notInArray, notLike, or, sql, sql as sql$1 } from "drizzle-orm";
|
|
8
|
+
import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
|
|
9
|
+
import { $logger } from "alepha/logger";
|
|
10
|
+
import { $lock } from "alepha/lock";
|
|
11
|
+
import { randomUUID } from "node:crypto";
|
|
12
|
+
import * as pg$1 from "drizzle-orm/sqlite-core";
|
|
13
|
+
import { check as check$1, foreignKey as foreignKey$1, index as index$1, sqliteTable, unique as unique$1, uniqueIndex as uniqueIndex$1 } from "drizzle-orm/sqlite-core";
|
|
14
|
+
import { migrate } from "drizzle-orm/pglite/migrator";
|
|
15
|
+
import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
|
|
16
|
+
import { $retry } from "alepha/retry";
|
|
17
|
+
|
|
18
|
+
export * from "drizzle-orm/pg-core"
|
|
19
|
+
|
|
20
|
+
//#region rolldown:runtime
|
|
21
|
+
var __defProp = Object.defineProperty;
|
|
22
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
23
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
24
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
25
|
+
var __exportAll = (all, symbols) => {
|
|
26
|
+
let target = {};
|
|
27
|
+
for (var name in all) {
|
|
28
|
+
__defProp(target, name, {
|
|
29
|
+
get: all[name],
|
|
30
|
+
enumerable: true
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
if (symbols) {
|
|
34
|
+
__defProp(target, Symbol.toStringTag, { value: "Module" });
|
|
35
|
+
}
|
|
36
|
+
return target;
|
|
37
|
+
};
|
|
38
|
+
var __copyProps = (to, from, except, desc$1) => {
|
|
39
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
40
|
+
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
41
|
+
key = keys[i];
|
|
42
|
+
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
43
|
+
__defProp(to, key, {
|
|
44
|
+
get: ((k) => from[k]).bind(null, key),
|
|
45
|
+
enumerable: !(desc$1 = __getOwnPropDesc(from, key)) || desc$1.enumerable
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return to;
|
|
51
|
+
};
|
|
52
|
+
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
|
|
53
|
+
|
|
54
|
+
//#endregion
|
|
55
|
+
//#region ../../src/orm/constants/PG_SYMBOLS.ts
|
|
56
|
+
const PG_DEFAULT = Symbol.for("Alepha.Postgres.Default");
|
|
57
|
+
const PG_PRIMARY_KEY = Symbol.for("Alepha.Postgres.PrimaryKey");
|
|
58
|
+
const PG_CREATED_AT = Symbol.for("Alepha.Postgres.CreatedAt");
|
|
59
|
+
const PG_UPDATED_AT = Symbol.for("Alepha.Postgres.UpdatedAt");
|
|
60
|
+
const PG_DELETED_AT = Symbol.for("Alepha.Postgres.DeletedAt");
|
|
61
|
+
const PG_VERSION = Symbol.for("Alepha.Postgres.Version");
|
|
62
|
+
const PG_IDENTITY = Symbol.for("Alepha.Postgres.Identity");
|
|
63
|
+
const PG_ENUM = Symbol.for("Alepha.Postgres.Enum");
|
|
64
|
+
const PG_REF = Symbol.for("Alepha.Postgres.Ref");
|
|
65
|
+
/**
|
|
66
|
+
* @deprecated Use `PG_IDENTITY` instead.
|
|
67
|
+
*/
|
|
68
|
+
const PG_SERIAL = Symbol.for("Alepha.Postgres.Serial");
|
|
69
|
+
|
|
70
|
+
//#endregion
|
|
71
|
+
//#region ../../src/orm/types/schema.ts
|
|
72
|
+
/**
|
|
73
|
+
* Postgres schema type.
|
|
74
|
+
*/
|
|
75
|
+
const schema = (name, document) => customType({
|
|
76
|
+
dataType: () => "jsonb",
|
|
77
|
+
toDriver: (value) => JSON.stringify(value),
|
|
78
|
+
fromDriver: (value) => value && typeof value === "string" ? JSON.parse(value) : value
|
|
79
|
+
})(name, { document }).$type();
|
|
80
|
+
|
|
81
|
+
//#endregion
|
|
82
|
+
//#region ../../src/orm/schemas/insertSchema.ts
|
|
83
|
+
const insertSchema = (obj) => {
|
|
84
|
+
const newProperties = {};
|
|
85
|
+
for (const key in obj.properties) {
|
|
86
|
+
const prop = obj.properties[key];
|
|
87
|
+
if (PG_DEFAULT in prop) newProperties[key] = t.optional(prop);
|
|
88
|
+
else newProperties[key] = prop;
|
|
89
|
+
}
|
|
90
|
+
return t.object(newProperties, "options" in schema && typeof schema.options === "object" ? { ...schema.options } : {});
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
//#endregion
|
|
94
|
+
//#region ../../src/orm/schemas/updateSchema.ts
|
|
95
|
+
const updateSchema = (schema$1) => {
|
|
96
|
+
const newProperties = {};
|
|
97
|
+
for (const key in schema$1.properties) {
|
|
98
|
+
const prop = schema$1.properties[key];
|
|
99
|
+
if (t.schema.isOptional(prop)) newProperties[key] = t.optional(t.union([prop, t.raw.Null()]));
|
|
100
|
+
else newProperties[key] = prop;
|
|
101
|
+
}
|
|
102
|
+
return t.object(newProperties, "options" in schema$1 && typeof schema$1.options === "object" ? { ...schema$1.options } : {});
|
|
103
|
+
};
|
|
104
|
+
|
|
105
|
+
//#endregion
|
|
106
|
+
//#region ../../src/orm/primitives/$entity.ts
|
|
107
|
+
/**
|
|
108
|
+
* Creates a database entity primitive that defines table structure using TypeBox schemas.
|
|
109
|
+
*
|
|
110
|
+
* @example
|
|
111
|
+
* ```ts
|
|
112
|
+
* import { t } from "alepha";
|
|
113
|
+
* import { $entity } from "alepha/orm";
|
|
114
|
+
*
|
|
115
|
+
* const userEntity = $entity({
|
|
116
|
+
* name: "users",
|
|
117
|
+
* schema: t.object({
|
|
118
|
+
* id: pg.primaryKey(),
|
|
119
|
+
* name: t.text(),
|
|
120
|
+
* email: t.email(),
|
|
121
|
+
* }),
|
|
122
|
+
* });
|
|
123
|
+
* ```
|
|
124
|
+
*/
|
|
125
|
+
const $entity = (options) => {
|
|
126
|
+
return new EntityPrimitive(options);
|
|
127
|
+
};
|
|
128
|
+
var EntityPrimitive = class EntityPrimitive {
|
|
129
|
+
options;
|
|
130
|
+
constructor(options) {
|
|
131
|
+
this.options = options;
|
|
132
|
+
}
|
|
133
|
+
alias(alias$1) {
|
|
134
|
+
const aliased = new EntityPrimitive(this.options);
|
|
135
|
+
return new Proxy(aliased, { get(target, prop, receiver) {
|
|
136
|
+
if (prop === "$alias") return alias$1;
|
|
137
|
+
return Reflect.get(target, prop, receiver);
|
|
138
|
+
} });
|
|
139
|
+
}
|
|
140
|
+
get cols() {
|
|
141
|
+
const cols = {};
|
|
142
|
+
for (const key of Object.keys(this.schema.properties)) cols[key] = {
|
|
143
|
+
name: key,
|
|
144
|
+
entity: this
|
|
145
|
+
};
|
|
146
|
+
return cols;
|
|
147
|
+
}
|
|
148
|
+
get name() {
|
|
149
|
+
return this.options.name;
|
|
150
|
+
}
|
|
151
|
+
get schema() {
|
|
152
|
+
return this.options.schema;
|
|
153
|
+
}
|
|
154
|
+
get insertSchema() {
|
|
155
|
+
return insertSchema(this.options.schema);
|
|
156
|
+
}
|
|
157
|
+
get updateSchema() {
|
|
158
|
+
return updateSchema(this.options.schema);
|
|
159
|
+
}
|
|
160
|
+
};
|
|
161
|
+
$entity[KIND] = EntityPrimitive;
|
|
162
|
+
|
|
163
|
+
//#endregion
|
|
164
|
+
//#region ../../src/orm/errors/DbError.ts
|
|
165
|
+
var DbError = class extends AlephaError {
|
|
166
|
+
name = "DbError";
|
|
167
|
+
constructor(message, cause) {
|
|
168
|
+
super(message, { cause });
|
|
169
|
+
}
|
|
170
|
+
};
|
|
171
|
+
|
|
172
|
+
//#endregion
|
|
173
|
+
//#region ../../src/orm/providers/drivers/DatabaseProvider.ts
|
|
174
|
+
var DatabaseProvider = class {
|
|
175
|
+
alepha = $inject(Alepha);
|
|
176
|
+
log = $logger();
|
|
177
|
+
enums = /* @__PURE__ */ new Map();
|
|
178
|
+
tables = /* @__PURE__ */ new Map();
|
|
179
|
+
sequences = /* @__PURE__ */ new Map();
|
|
180
|
+
get name() {
|
|
181
|
+
return "default";
|
|
182
|
+
}
|
|
183
|
+
get driver() {
|
|
184
|
+
return this.dialect;
|
|
185
|
+
}
|
|
186
|
+
get schema() {
|
|
187
|
+
return "public";
|
|
188
|
+
}
|
|
189
|
+
table(entity) {
|
|
190
|
+
const table = this.tables.get(entity.name);
|
|
191
|
+
if (!table) throw new AlephaError(`Table '${entity.name}' is not registered`);
|
|
192
|
+
const hasAlias = entity.$alias;
|
|
193
|
+
if (hasAlias) return alias(table, hasAlias);
|
|
194
|
+
return table;
|
|
195
|
+
}
|
|
196
|
+
registerEntity(entity) {
|
|
197
|
+
this.builder.buildTable(entity, this);
|
|
198
|
+
}
|
|
199
|
+
registerSequence(sequence) {
|
|
200
|
+
this.builder.buildSequence(sequence, this);
|
|
201
|
+
}
|
|
202
|
+
async run(statement, schema$1) {
|
|
203
|
+
return (await this.execute(statement)).map((row) => this.alepha.codec.decode(schema$1, row));
|
|
204
|
+
}
|
|
205
|
+
/**
|
|
206
|
+
* Get migrations folder path - can be overridden
|
|
207
|
+
*/
|
|
208
|
+
getMigrationsFolder() {
|
|
209
|
+
return `migrations/${this.name}`;
|
|
210
|
+
}
|
|
211
|
+
/**
|
|
212
|
+
* Base migration orchestration - handles environment logic
|
|
213
|
+
*/
|
|
214
|
+
async migrate() {
|
|
215
|
+
const migrationsFolder = this.getMigrationsFolder();
|
|
216
|
+
if (this.alepha.isProduction()) await this.runProductionMigration(migrationsFolder);
|
|
217
|
+
else if (this.alepha.isTest()) await this.runTestMigration();
|
|
218
|
+
else await this.runDevelopmentMigration(migrationsFolder);
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Production: run migrations from folder
|
|
222
|
+
*/
|
|
223
|
+
async runProductionMigration(migrationsFolder) {
|
|
224
|
+
if (!await stat(migrationsFolder).catch(() => false)) {
|
|
225
|
+
this.log.warn("Migration SKIPPED - no migrations found");
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
this.log.debug(`Migrate from '${migrationsFolder}' directory ...`);
|
|
229
|
+
await this.executeMigrations(migrationsFolder);
|
|
230
|
+
this.log.info("Migration OK");
|
|
231
|
+
}
|
|
232
|
+
/**
|
|
233
|
+
* Test: always synchronize
|
|
234
|
+
*/
|
|
235
|
+
async runTestMigration() {
|
|
236
|
+
await this.synchronizeSchema();
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Development: default to synchronize (can be overridden)
|
|
240
|
+
*/
|
|
241
|
+
async runDevelopmentMigration(migrationsFolder) {
|
|
242
|
+
try {
|
|
243
|
+
if (!this.url.includes(":memory:")) await this.executeMigrations(migrationsFolder);
|
|
244
|
+
} catch {}
|
|
245
|
+
await this.synchronizeSchema();
|
|
246
|
+
}
|
|
247
|
+
/**
|
|
248
|
+
* Common synchronization with error handling
|
|
249
|
+
*/
|
|
250
|
+
async synchronizeSchema() {
|
|
251
|
+
try {
|
|
252
|
+
await this.kit.synchronize(this);
|
|
253
|
+
} catch (error) {
|
|
254
|
+
throw new DbError(`Failed to synchronize ${this.dialect} database schema`, error);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
/**
|
|
258
|
+
* For testing purposes, generate a unique schema name.
|
|
259
|
+
* The schema name will be generated based on the current date and time.
|
|
260
|
+
* It will be in the format of `test_YYYYMMDD_HHMMSS_randomSuffix`.
|
|
261
|
+
*/
|
|
262
|
+
generateTestSchemaName() {
|
|
263
|
+
const pad = (n) => n.toString().padStart(2, "0");
|
|
264
|
+
const now = /* @__PURE__ */ new Date();
|
|
265
|
+
return `test_${`${now.getUTCFullYear()}${pad(now.getUTCMonth() + 1)}${pad(now.getUTCDate())}_${pad(now.getUTCHours())}${pad(now.getUTCMinutes())}${pad(now.getUTCSeconds())}`}_${Math.random().toString(36).slice(2, 6)}`;
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
|
|
269
|
+
//#endregion
|
|
270
|
+
//#region ../../src/orm/primitives/$sequence.ts
|
|
271
|
+
/**
|
|
272
|
+
* Creates a PostgreSQL sequence primitive for generating unique numeric values.
|
|
273
|
+
*/
|
|
274
|
+
const $sequence = (options = {}) => {
|
|
275
|
+
return createPrimitive(SequencePrimitive, options);
|
|
276
|
+
};
|
|
277
|
+
var SequencePrimitive = class extends Primitive {
|
|
278
|
+
provider = this.$provider();
|
|
279
|
+
onInit() {
|
|
280
|
+
this.provider.registerSequence(this);
|
|
281
|
+
}
|
|
282
|
+
get name() {
|
|
283
|
+
return this.options.name ?? this.config.propertyKey;
|
|
284
|
+
}
|
|
285
|
+
async next() {
|
|
286
|
+
return this.provider.execute(sql$1`SELECT nextval('${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
|
|
287
|
+
}
|
|
288
|
+
async current() {
|
|
289
|
+
return this.provider.execute(sql$1`SELECT last_value FROM ${sql$1.raw(this.provider.schema)}."${sql$1.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
|
|
290
|
+
}
|
|
291
|
+
$provider() {
|
|
292
|
+
return this.options.provider ?? this.alepha.inject(DatabaseProvider);
|
|
293
|
+
}
|
|
294
|
+
};
|
|
295
|
+
$sequence[KIND] = SequencePrimitive;
|
|
296
|
+
|
|
297
|
+
//#endregion
|
|
298
|
+
//#region ../../src/orm/providers/DrizzleKitProvider.ts
|
|
299
|
+
var DrizzleKitProvider = class {
|
|
300
|
+
log = $logger();
|
|
301
|
+
alepha = $inject(Alepha);
|
|
302
|
+
/**
|
|
303
|
+
* Synchronize database with current schema definitions.
|
|
304
|
+
*
|
|
305
|
+
* In development mode, it will generate and execute migrations based on the current state.
|
|
306
|
+
* In testing mode, it will generate migrations from scratch without applying them.
|
|
307
|
+
*
|
|
308
|
+
* Does nothing in production mode, you must handle migrations manually.
|
|
309
|
+
*/
|
|
310
|
+
async synchronize(provider) {
|
|
311
|
+
if (this.alepha.isProduction()) {
|
|
312
|
+
this.log.warn("Synchronization skipped in production mode.");
|
|
313
|
+
return;
|
|
314
|
+
}
|
|
315
|
+
if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
|
|
316
|
+
const now = Date.now();
|
|
317
|
+
if (this.alepha.isTest()) {
|
|
318
|
+
const { statements } = await this.generateMigration(provider);
|
|
319
|
+
await this.executeStatements(statements, provider);
|
|
320
|
+
} else {
|
|
321
|
+
const entry = await this.loadDevMigrations(provider);
|
|
322
|
+
const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
|
|
323
|
+
await this.executeStatements(statements, provider, true);
|
|
324
|
+
await this.saveDevMigrations(provider, snapshot, entry);
|
|
325
|
+
}
|
|
326
|
+
this.log.info(`Db '${provider.name}' synchronization OK [${Date.now() - now}ms]`);
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Mostly used for testing purposes. You can generate SQL migration statements without executing them.
|
|
330
|
+
*/
|
|
331
|
+
async generateMigration(provider, prevSnapshot) {
|
|
332
|
+
const kit = this.importDrizzleKit();
|
|
333
|
+
const models = this.getModels(provider);
|
|
334
|
+
if (Object.keys(models).length > 0) {
|
|
335
|
+
if (provider.dialect === "sqlite") {
|
|
336
|
+
const prev$1 = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
|
|
337
|
+
const curr$1 = await kit.generateSQLiteDrizzleJson(models);
|
|
338
|
+
return {
|
|
339
|
+
models,
|
|
340
|
+
statements: await kit.generateSQLiteMigration(prev$1, curr$1),
|
|
341
|
+
snapshot: curr$1
|
|
342
|
+
};
|
|
343
|
+
}
|
|
344
|
+
const prev = prevSnapshot ?? await kit.generateDrizzleJson({});
|
|
345
|
+
const curr = await kit.generateDrizzleJson(models);
|
|
346
|
+
return {
|
|
347
|
+
models,
|
|
348
|
+
statements: await kit.generateMigration(prev, curr),
|
|
349
|
+
snapshot: curr
|
|
350
|
+
};
|
|
351
|
+
}
|
|
352
|
+
return {
|
|
353
|
+
models,
|
|
354
|
+
statements: [],
|
|
355
|
+
snapshot: {}
|
|
356
|
+
};
|
|
357
|
+
}
|
|
358
|
+
/**
|
|
359
|
+
* Load all tables, enums, sequences, etc. from the provider's repositories.
|
|
360
|
+
*/
|
|
361
|
+
getModels(provider) {
|
|
362
|
+
const models = {};
|
|
363
|
+
for (const [key, value] of provider.tables.entries()) {
|
|
364
|
+
if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
365
|
+
models[key] = value;
|
|
366
|
+
}
|
|
367
|
+
for (const [key, value] of provider.enums.entries()) {
|
|
368
|
+
if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
369
|
+
models[key] = value;
|
|
370
|
+
}
|
|
371
|
+
for (const [key, value] of provider.sequences.entries()) {
|
|
372
|
+
if (models[key]) throw new AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
373
|
+
models[key] = value;
|
|
374
|
+
}
|
|
375
|
+
return models;
|
|
376
|
+
}
|
|
377
|
+
/**
|
|
378
|
+
* Load the migration snapshot from the database.
|
|
379
|
+
*/
|
|
380
|
+
async loadDevMigrations(provider) {
|
|
381
|
+
const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
|
|
382
|
+
if (provider.url.includes(":memory:")) {
|
|
383
|
+
this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
|
|
384
|
+
return;
|
|
385
|
+
}
|
|
386
|
+
if (provider.dialect === "sqlite") {
|
|
387
|
+
try {
|
|
388
|
+
const text = await readFile(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
|
|
389
|
+
return this.alepha.codec.decode(devMigrationsSchema, text);
|
|
390
|
+
} catch (e) {
|
|
391
|
+
this.log.trace(`No existing migration snapshot for '${name}'`, e);
|
|
392
|
+
}
|
|
393
|
+
return;
|
|
394
|
+
}
|
|
395
|
+
await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
|
|
396
|
+
await provider.execute(sql$1`
|
|
397
|
+
CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
|
|
398
|
+
"id" SERIAL PRIMARY KEY,
|
|
399
|
+
"name" TEXT NOT NULL,
|
|
400
|
+
"created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
401
|
+
"snapshot" TEXT NOT NULL
|
|
402
|
+
);
|
|
403
|
+
`);
|
|
404
|
+
const rows = await provider.run(sql$1`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
|
|
405
|
+
if (rows.length === 0) {
|
|
406
|
+
this.log.trace(`No existing migration snapshot for '${name}'`);
|
|
407
|
+
return;
|
|
408
|
+
}
|
|
409
|
+
return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
|
|
410
|
+
}
|
|
411
|
+
async saveDevMigrations(provider, curr, devMigrations) {
|
|
412
|
+
if (provider.url.includes(":memory:")) {
|
|
413
|
+
this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
|
|
414
|
+
return;
|
|
415
|
+
}
|
|
416
|
+
const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
|
|
417
|
+
if (provider.dialect === "sqlite") {
|
|
418
|
+
const filePath = `node_modules/.alepha/sqlite-${name}.json`;
|
|
419
|
+
await mkdir("node_modules/.alepha", { recursive: true }).catch(() => null);
|
|
420
|
+
await writeFile(filePath, JSON.stringify({
|
|
421
|
+
id: devMigrations?.id ?? 1,
|
|
422
|
+
name,
|
|
423
|
+
created_at: /* @__PURE__ */ new Date(),
|
|
424
|
+
snapshot: JSON.stringify(curr)
|
|
425
|
+
}, null, 2));
|
|
426
|
+
this.log.debug(`Saved migration snapshot to '${filePath}'`);
|
|
427
|
+
return;
|
|
428
|
+
}
|
|
429
|
+
if (!devMigrations) await provider.execute(sql$1`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
|
|
430
|
+
else {
|
|
431
|
+
const newSnapshot = JSON.stringify(curr);
|
|
432
|
+
if (devMigrations.snapshot !== newSnapshot) await provider.execute(sql$1`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
async executeStatements(statements, provider, catchErrors = false) {
|
|
436
|
+
let nErrors = 0;
|
|
437
|
+
for (const statement of statements) {
|
|
438
|
+
if (statement.startsWith("DROP SCHEMA")) continue;
|
|
439
|
+
try {
|
|
440
|
+
await provider.execute(sql$1.raw(statement));
|
|
441
|
+
} catch (error) {
|
|
442
|
+
const errorMessage = `Error executing statement: ${statement}`;
|
|
443
|
+
if (catchErrors) {
|
|
444
|
+
nErrors++;
|
|
445
|
+
this.log.warn(errorMessage, { context: [error] });
|
|
446
|
+
} else throw error;
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
|
|
450
|
+
}
|
|
451
|
+
async createSchemaIfNotExists(provider, schemaName) {
|
|
452
|
+
if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
|
|
453
|
+
const sqlSchema = sql$1.raw(schemaName);
|
|
454
|
+
if (schemaName.startsWith("test_")) {
|
|
455
|
+
this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
|
|
456
|
+
await provider.execute(sql$1`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
|
|
457
|
+
}
|
|
458
|
+
this.log.debug(`Ensuring schema '${schemaName}' exists`);
|
|
459
|
+
await provider.execute(sql$1`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
|
|
460
|
+
}
|
|
461
|
+
/**
|
|
462
|
+
* Try to load the official Drizzle Kit API.
|
|
463
|
+
* If not available, fallback to the local kit import.
|
|
464
|
+
*/
|
|
465
|
+
importDrizzleKit() {
|
|
466
|
+
try {
|
|
467
|
+
return createRequire(import.meta.url)("drizzle-kit/api");
|
|
468
|
+
} catch (_) {
|
|
469
|
+
throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
};
|
|
473
|
+
const devMigrationsSchema = t.object({
|
|
474
|
+
id: t.number(),
|
|
475
|
+
name: t.text(),
|
|
476
|
+
snapshot: t.string(),
|
|
477
|
+
created_at: t.string()
|
|
478
|
+
});
|
|
479
|
+
|
|
480
|
+
//#endregion
|
|
481
|
+
//#region ../../src/orm/errors/DbMigrationError.ts
|
|
482
|
+
var DbMigrationError = class extends DbError {
|
|
483
|
+
name = "DbMigrationError";
|
|
484
|
+
constructor(cause) {
|
|
485
|
+
super("Failed to migrate database", cause);
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
|
|
489
|
+
//#endregion
|
|
490
|
+
//#region ../../src/orm/types/byte.ts
|
|
491
|
+
/**
|
|
492
|
+
* Postgres bytea type.
|
|
493
|
+
*/
|
|
494
|
+
const byte = customType({ dataType: () => "bytea" });
|
|
495
|
+
|
|
496
|
+
//#endregion
|
|
497
|
+
//#region ../../src/orm/services/ModelBuilder.ts
|
|
498
|
+
/**
|
|
499
|
+
* Abstract base class for transforming Alepha Primitives (Entity, Sequence, etc...)
|
|
500
|
+
* into drizzle models (tables, enums, sequences, etc...).
|
|
501
|
+
*/
|
|
502
|
+
var ModelBuilder = class {
|
|
503
|
+
/**
|
|
504
|
+
* Convert camelCase to snake_case for column names.
|
|
505
|
+
*/
|
|
506
|
+
toColumnName(str) {
|
|
507
|
+
return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
|
|
508
|
+
}
|
|
509
|
+
/**
|
|
510
|
+
* Build the table configuration function for any database.
|
|
511
|
+
* This includes indexes, foreign keys, constraints, and custom config.
|
|
512
|
+
*
|
|
513
|
+
* @param entity - The entity primitive
|
|
514
|
+
* @param builders - Database-specific builder functions
|
|
515
|
+
* @param tableResolver - Function to resolve entity references to table columns
|
|
516
|
+
* @param customConfigHandler - Optional handler for custom config
|
|
517
|
+
*/
|
|
518
|
+
buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
|
|
519
|
+
if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
|
|
520
|
+
return (self) => {
|
|
521
|
+
const configs = [];
|
|
522
|
+
if (entity.options.indexes) {
|
|
523
|
+
for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
|
|
524
|
+
const columnName = this.toColumnName(indexDef);
|
|
525
|
+
const indexName = `${entity.name}_${columnName}_idx`;
|
|
526
|
+
if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
|
|
527
|
+
} else if (typeof indexDef === "object" && indexDef !== null) {
|
|
528
|
+
if ("column" in indexDef) {
|
|
529
|
+
const columnName = this.toColumnName(indexDef.column);
|
|
530
|
+
const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
|
|
531
|
+
if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
|
|
532
|
+
else configs.push(builders.index(indexName).on(self[indexDef.column]));
|
|
533
|
+
} else if ("columns" in indexDef) {
|
|
534
|
+
const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
|
|
535
|
+
const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
|
|
536
|
+
const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
|
|
537
|
+
if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
|
|
538
|
+
else configs.push(builders.index(indexName).on(...cols));
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
|
|
543
|
+
const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
|
|
544
|
+
const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
|
|
545
|
+
if (cols.length === fkDef.columns.length) {
|
|
546
|
+
const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
|
|
547
|
+
const foreignColumns = fkDef.foreignColumns.map((colRef) => {
|
|
548
|
+
const entityCol = colRef();
|
|
549
|
+
if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
|
|
550
|
+
if (tableResolver) {
|
|
551
|
+
const foreignTable = tableResolver(entityCol.entity.name);
|
|
552
|
+
if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
|
|
553
|
+
return foreignTable[entityCol.name];
|
|
554
|
+
}
|
|
555
|
+
return entityCol;
|
|
556
|
+
});
|
|
557
|
+
configs.push(builders.foreignKey({
|
|
558
|
+
name: fkName,
|
|
559
|
+
columns: cols,
|
|
560
|
+
foreignColumns
|
|
561
|
+
}));
|
|
562
|
+
}
|
|
563
|
+
}
|
|
564
|
+
if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
|
|
565
|
+
const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
|
|
566
|
+
const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
|
|
567
|
+
if (cols.length === constraintDef.columns.length) {
|
|
568
|
+
if (constraintDef.unique) {
|
|
569
|
+
const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
|
|
570
|
+
configs.push(builders.unique(constraintName).on(...cols));
|
|
571
|
+
}
|
|
572
|
+
if (constraintDef.check) {
|
|
573
|
+
const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
|
|
574
|
+
configs.push(builders.check(constraintName, constraintDef.check));
|
|
575
|
+
}
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
|
|
579
|
+
else if (entity.options.config) {
|
|
580
|
+
const customConfigs = entity.options.config(self);
|
|
581
|
+
if (Array.isArray(customConfigs)) configs.push(...customConfigs);
|
|
582
|
+
}
|
|
583
|
+
return configs;
|
|
584
|
+
};
|
|
585
|
+
}
|
|
586
|
+
};
|
|
587
|
+
|
|
588
|
+
//#endregion
|
|
589
|
+
//#region ../../src/orm/services/PostgresModelBuilder.ts
|
|
590
|
+
var PostgresModelBuilder = class extends ModelBuilder {
|
|
591
|
+
schemas = /* @__PURE__ */ new Map();
|
|
592
|
+
getPgSchema(name) {
|
|
593
|
+
if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
|
|
594
|
+
const nsp = name !== "public" ? this.schemas.get(name) : {
|
|
595
|
+
enum: pgEnum,
|
|
596
|
+
table: pgTable
|
|
597
|
+
};
|
|
598
|
+
if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
|
|
599
|
+
return nsp;
|
|
600
|
+
}
|
|
601
|
+
buildTable(entity, options) {
|
|
602
|
+
const tableName = entity.name;
|
|
603
|
+
if (options.tables.has(tableName)) return;
|
|
604
|
+
const nsp = this.getPgSchema(options.schema);
|
|
605
|
+
const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
|
|
606
|
+
const configFn = this.getTableConfig(entity, options.tables);
|
|
607
|
+
const table = nsp.table(tableName, columns, configFn);
|
|
608
|
+
options.tables.set(tableName, table);
|
|
609
|
+
}
|
|
610
|
+
buildSequence(sequence, options) {
|
|
611
|
+
const sequenceName = sequence.name;
|
|
612
|
+
if (options.sequences.has(sequenceName)) return;
|
|
613
|
+
const nsp = this.getPgSchema(options.schema);
|
|
614
|
+
options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
|
|
615
|
+
}
|
|
616
|
+
/**
|
|
617
|
+
* Get PostgreSQL-specific config builder for the table.
|
|
618
|
+
*/
|
|
619
|
+
getTableConfig(entity, tables) {
|
|
620
|
+
const pgBuilders = {
|
|
621
|
+
index,
|
|
622
|
+
uniqueIndex,
|
|
623
|
+
unique,
|
|
624
|
+
check,
|
|
625
|
+
foreignKey
|
|
626
|
+
};
|
|
627
|
+
const tableResolver = (entityName) => {
|
|
628
|
+
return tables.get(entityName);
|
|
629
|
+
};
|
|
630
|
+
return this.buildTableConfig(entity, pgBuilders, tableResolver);
|
|
631
|
+
}
|
|
632
|
+
schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
|
|
633
|
+
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
634
|
+
let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
|
|
635
|
+
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
636
|
+
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
637
|
+
if (PG_REF in value) {
|
|
638
|
+
const config = value[PG_REF];
|
|
639
|
+
col = col.references(() => {
|
|
640
|
+
const ref = config.ref();
|
|
641
|
+
const table = tables.get(ref.entity.name);
|
|
642
|
+
if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
643
|
+
const target = table[ref.name];
|
|
644
|
+
if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
645
|
+
return target;
|
|
646
|
+
}, config.actions);
|
|
647
|
+
}
|
|
648
|
+
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
649
|
+
return {
|
|
650
|
+
...columns,
|
|
651
|
+
[key]: col
|
|
652
|
+
};
|
|
653
|
+
}, {});
|
|
654
|
+
};
|
|
655
|
+
mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
|
|
656
|
+
const key = this.toColumnName(fieldName);
|
|
657
|
+
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
|
|
658
|
+
if (t.schema.isInteger(value)) {
|
|
659
|
+
if (PG_SERIAL in value) return pg$2.serial(key);
|
|
660
|
+
if (PG_IDENTITY in value) {
|
|
661
|
+
const options = value[PG_IDENTITY];
|
|
662
|
+
if (options.mode === "byDefault") return pg$2.integer().generatedByDefaultAsIdentity(options);
|
|
663
|
+
return pg$2.integer().generatedAlwaysAsIdentity(options);
|
|
664
|
+
}
|
|
665
|
+
return pg$2.integer(key);
|
|
666
|
+
}
|
|
667
|
+
if (t.schema.isBigInt(value)) {
|
|
668
|
+
if (PG_IDENTITY in value) {
|
|
669
|
+
const options = value[PG_IDENTITY];
|
|
670
|
+
if (options.mode === "byDefault") return pg$2.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
|
|
671
|
+
return pg$2.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
|
|
672
|
+
}
|
|
673
|
+
}
|
|
674
|
+
if (t.schema.isNumber(value)) {
|
|
675
|
+
if (PG_IDENTITY in value) {
|
|
676
|
+
const options = value[PG_IDENTITY];
|
|
677
|
+
if (options.mode === "byDefault") return pg$2.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
|
|
678
|
+
return pg$2.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
|
|
679
|
+
}
|
|
680
|
+
if (value.format === "int64") return pg$2.bigint(key, { mode: "number" });
|
|
681
|
+
return pg$2.numeric(key);
|
|
682
|
+
}
|
|
683
|
+
if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
|
|
684
|
+
if (t.schema.isBoolean(value)) return pg$2.boolean(key);
|
|
685
|
+
if (t.schema.isObject(value)) return schema(key, value);
|
|
686
|
+
if (t.schema.isRecord(value)) return schema(key, value);
|
|
687
|
+
const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
|
|
688
|
+
if (t.schema.isArray(value)) {
|
|
689
|
+
if (t.schema.isObject(value.items)) return schema(key, value);
|
|
690
|
+
if (t.schema.isRecord(value.items)) return schema(key, value);
|
|
691
|
+
if (t.schema.isString(value.items)) return pg$2.text(key).array();
|
|
692
|
+
if (t.schema.isInteger(value.items)) return pg$2.integer(key).array();
|
|
693
|
+
if (t.schema.isNumber(value.items)) return pg$2.numeric(key).array();
|
|
694
|
+
if (t.schema.isBoolean(value.items)) return pg$2.boolean(key).array();
|
|
695
|
+
if (isTypeEnum(value.items)) return pg$2.text(key).array();
|
|
696
|
+
}
|
|
697
|
+
if (isTypeEnum(value)) {
|
|
698
|
+
if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
|
|
699
|
+
if (PG_ENUM in value && value[PG_ENUM]) {
|
|
700
|
+
const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
|
|
701
|
+
if (enums.has(enumName)) {
|
|
702
|
+
const values = enums.get(enumName).enumValues.join(",");
|
|
703
|
+
const newValues = value.enum.join(",");
|
|
704
|
+
if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
|
|
705
|
+
}
|
|
706
|
+
enums.set(enumName, nsp.enum(enumName, value.enum));
|
|
707
|
+
return enums.get(enumName)(key);
|
|
708
|
+
}
|
|
709
|
+
return this.mapStringToColumn(key, value);
|
|
710
|
+
}
|
|
711
|
+
throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
|
|
712
|
+
};
|
|
713
|
+
/**
|
|
714
|
+
* Map a string to a PG column.
|
|
715
|
+
*
|
|
716
|
+
* @param key The key of the field.
|
|
717
|
+
* @param value The value of the field.
|
|
718
|
+
*/
|
|
719
|
+
mapStringToColumn = (key, value) => {
|
|
720
|
+
if ("format" in value) {
|
|
721
|
+
if (value.format === "uuid") {
|
|
722
|
+
if (PG_PRIMARY_KEY in value) return pg$2.uuid(key).defaultRandom();
|
|
723
|
+
return pg$2.uuid(key);
|
|
724
|
+
}
|
|
725
|
+
if (value.format === "byte") return byte(key);
|
|
726
|
+
if (value.format === "date-time") {
|
|
727
|
+
if (PG_CREATED_AT in value) return pg$2.timestamp(key, {
|
|
728
|
+
mode: "string",
|
|
729
|
+
withTimezone: true
|
|
730
|
+
}).defaultNow();
|
|
731
|
+
if (PG_UPDATED_AT in value) return pg$2.timestamp(key, {
|
|
732
|
+
mode: "string",
|
|
733
|
+
withTimezone: true
|
|
734
|
+
}).defaultNow();
|
|
735
|
+
return pg$2.timestamp(key, {
|
|
736
|
+
mode: "string",
|
|
737
|
+
withTimezone: true
|
|
738
|
+
});
|
|
739
|
+
}
|
|
740
|
+
if (value.format === "date") return pg$2.date(key, { mode: "string" });
|
|
741
|
+
}
|
|
742
|
+
return pg$2.text(key);
|
|
743
|
+
};
|
|
744
|
+
};
|
|
745
|
+
|
|
746
|
+
//#endregion
|
|
747
|
+
//#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
|
|
748
|
+
const envSchema$2 = t.object({
|
|
749
|
+
DATABASE_URL: t.optional(t.text()),
|
|
750
|
+
POSTGRES_SCHEMA: t.optional(t.text())
|
|
751
|
+
});
|
|
752
|
+
/**
|
|
753
|
+
* Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
|
|
754
|
+
*
|
|
755
|
+
* This provider uses Bun's built-in SQL class for PostgreSQL connections,
|
|
756
|
+
* which provides excellent performance on the Bun runtime.
|
|
757
|
+
*
|
|
758
|
+
* @example
|
|
759
|
+
* ```ts
|
|
760
|
+
* // Set DATABASE_URL environment variable
|
|
761
|
+
* // DATABASE_URL=postgres://user:password@localhost:5432/database
|
|
762
|
+
*
|
|
763
|
+
* // Or configure programmatically
|
|
764
|
+
* alepha.with({
|
|
765
|
+
* provide: DatabaseProvider,
|
|
766
|
+
* use: BunPostgresProvider,
|
|
767
|
+
* });
|
|
768
|
+
* ```
|
|
769
|
+
*/
|
|
770
|
+
var BunPostgresProvider = class extends DatabaseProvider {
|
|
771
|
+
log = $logger();
|
|
772
|
+
env = $env(envSchema$2);
|
|
773
|
+
kit = $inject(DrizzleKitProvider);
|
|
774
|
+
builder = $inject(PostgresModelBuilder);
|
|
775
|
+
client;
|
|
776
|
+
bunDb;
|
|
777
|
+
dialect = "postgresql";
|
|
778
|
+
get name() {
|
|
779
|
+
return "postgres";
|
|
780
|
+
}
|
|
781
|
+
/**
|
|
782
|
+
* In testing mode, the schema name will be generated and deleted after the test.
|
|
783
|
+
*/
|
|
784
|
+
schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
|
|
785
|
+
get url() {
|
|
786
|
+
if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
|
|
787
|
+
return this.env.DATABASE_URL;
|
|
788
|
+
}
|
|
789
|
+
/**
|
|
790
|
+
* Execute a SQL statement.
|
|
791
|
+
*/
|
|
792
|
+
execute(statement) {
|
|
793
|
+
try {
|
|
794
|
+
return this.db.execute(statement);
|
|
795
|
+
} catch (error) {
|
|
796
|
+
throw new DbError("Error executing statement", error);
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
/**
|
|
800
|
+
* Get Postgres schema used by this provider.
|
|
801
|
+
*/
|
|
802
|
+
get schema() {
|
|
803
|
+
if (this.schemaForTesting) return this.schemaForTesting;
|
|
804
|
+
if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
|
|
805
|
+
return "public";
|
|
806
|
+
}
|
|
807
|
+
/**
|
|
808
|
+
* Get the Drizzle Postgres database instance.
|
|
809
|
+
*/
|
|
810
|
+
get db() {
|
|
811
|
+
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
812
|
+
return this.bunDb;
|
|
813
|
+
}
|
|
814
|
+
async executeMigrations(migrationsFolder) {
|
|
815
|
+
const { migrate: migrate$1 } = await import("drizzle-orm/bun-sql/migrator");
|
|
816
|
+
await migrate$1(this.bunDb, { migrationsFolder });
|
|
817
|
+
}
|
|
818
|
+
onStart = $hook({
|
|
819
|
+
on: "start",
|
|
820
|
+
handler: async () => {
|
|
821
|
+
await this.connect();
|
|
822
|
+
if (!this.alepha.isServerless()) try {
|
|
823
|
+
await this.migrateLock.run();
|
|
824
|
+
} catch (error) {
|
|
825
|
+
throw new DbMigrationError(error);
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
});
|
|
829
|
+
onStop = $hook({
|
|
830
|
+
on: "stop",
|
|
831
|
+
handler: async () => {
|
|
832
|
+
if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
|
|
833
|
+
if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
|
|
834
|
+
this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
|
|
835
|
+
await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
|
|
836
|
+
this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
|
|
837
|
+
}
|
|
838
|
+
await this.close();
|
|
839
|
+
}
|
|
840
|
+
});
|
|
841
|
+
async connect() {
|
|
842
|
+
this.log.debug("Connect ..");
|
|
843
|
+
if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
|
|
844
|
+
const { drizzle: drizzle$1 } = await import("drizzle-orm/bun-sql");
|
|
845
|
+
this.client = new Bun.SQL(this.url);
|
|
846
|
+
await this.client.unsafe("SELECT 1");
|
|
847
|
+
this.bunDb = drizzle$1({
|
|
848
|
+
client: this.client,
|
|
849
|
+
logger: { logQuery: (query, params) => {
|
|
850
|
+
this.log.trace(query, { params });
|
|
851
|
+
} }
|
|
852
|
+
});
|
|
853
|
+
this.log.info("Connection OK");
|
|
854
|
+
}
|
|
855
|
+
async close() {
|
|
856
|
+
if (this.client) {
|
|
857
|
+
this.log.debug("Close...");
|
|
858
|
+
await this.client.close();
|
|
859
|
+
this.client = void 0;
|
|
860
|
+
this.bunDb = void 0;
|
|
861
|
+
this.log.info("Connection closed");
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
migrateLock = $lock({ handler: async () => {
|
|
865
|
+
await this.migrate();
|
|
866
|
+
} });
|
|
867
|
+
};
|
|
868
|
+
|
|
869
|
+
//#endregion
|
|
870
|
+
//#region ../../src/orm/services/SqliteModelBuilder.ts
|
|
871
|
+
var SqliteModelBuilder = class extends ModelBuilder {
|
|
872
|
+
buildTable(entity, options) {
|
|
873
|
+
const tableName = entity.name;
|
|
874
|
+
if (options.tables.has(tableName)) return;
|
|
875
|
+
const table = sqliteTable(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
|
|
876
|
+
options.tables.set(tableName, table);
|
|
877
|
+
}
|
|
878
|
+
buildSequence(sequence, options) {
|
|
879
|
+
throw new AlephaError("SQLite does not support sequences");
|
|
880
|
+
}
|
|
881
|
+
/**
|
|
882
|
+
* Get SQLite-specific config builder for the table.
|
|
883
|
+
*/
|
|
884
|
+
getTableConfig(entity, tables) {
|
|
885
|
+
const sqliteBuilders = {
|
|
886
|
+
index: index$1,
|
|
887
|
+
uniqueIndex: uniqueIndex$1,
|
|
888
|
+
unique: unique$1,
|
|
889
|
+
check: check$1,
|
|
890
|
+
foreignKey: foreignKey$1
|
|
891
|
+
};
|
|
892
|
+
const tableResolver = (entityName) => {
|
|
893
|
+
return tables.get(entityName);
|
|
894
|
+
};
|
|
895
|
+
return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
|
|
896
|
+
const customConfigs = config(self);
|
|
897
|
+
return Array.isArray(customConfigs) ? customConfigs : [];
|
|
898
|
+
});
|
|
899
|
+
}
|
|
900
|
+
schemaToSqliteColumns = (tableName, schema$1, enums, tables) => {
|
|
901
|
+
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
902
|
+
let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
|
|
903
|
+
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
904
|
+
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
905
|
+
if (PG_REF in value) {
|
|
906
|
+
const config = value[PG_REF];
|
|
907
|
+
col = col.references(() => {
|
|
908
|
+
const ref = config.ref();
|
|
909
|
+
const table = tables.get(ref.entity.name);
|
|
910
|
+
if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
911
|
+
const target = table[ref.name];
|
|
912
|
+
if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
913
|
+
return target;
|
|
914
|
+
}, config.actions);
|
|
915
|
+
}
|
|
916
|
+
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
917
|
+
return {
|
|
918
|
+
...columns,
|
|
919
|
+
[key]: col
|
|
920
|
+
};
|
|
921
|
+
}, {});
|
|
922
|
+
};
|
|
923
|
+
mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
|
|
924
|
+
const key = this.toColumnName(fieldName);
|
|
925
|
+
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
|
|
926
|
+
if (t.schema.isInteger(value)) {
|
|
927
|
+
if (PG_SERIAL in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
928
|
+
return pg$1.integer(key);
|
|
929
|
+
}
|
|
930
|
+
if (t.schema.isBigInt(value)) {
|
|
931
|
+
if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
932
|
+
return pg$1.integer(key, { mode: "number" });
|
|
933
|
+
}
|
|
934
|
+
if (t.schema.isNumber(value)) {
|
|
935
|
+
if (PG_IDENTITY in value) return pg$1.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
936
|
+
return pg$1.numeric(key);
|
|
937
|
+
}
|
|
938
|
+
if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
|
|
939
|
+
if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
|
|
940
|
+
if (t.schema.isObject(value)) return this.sqliteJson(key, value);
|
|
941
|
+
if (t.schema.isRecord(value)) return this.sqliteJson(key, value);
|
|
942
|
+
if (t.schema.isAny(value)) return this.sqliteJson(key, value);
|
|
943
|
+
if (t.schema.isArray(value)) {
|
|
944
|
+
if (t.schema.isObject(value.items)) return this.sqliteJson(key, value);
|
|
945
|
+
if (t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
|
|
946
|
+
if (t.schema.isAny(value.items)) return this.sqliteJson(key, value);
|
|
947
|
+
if (t.schema.isString(value.items)) return this.sqliteJson(key, value);
|
|
948
|
+
if (t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
|
|
949
|
+
if (t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
|
|
950
|
+
if (t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
|
|
951
|
+
}
|
|
952
|
+
if (t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
|
|
953
|
+
throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
|
|
954
|
+
};
|
|
955
|
+
mapStringToSqliteColumn = (key, value) => {
|
|
956
|
+
if (value.format === "uuid") {
|
|
957
|
+
if (PG_PRIMARY_KEY in value) return pg$1.text(key).primaryKey().$defaultFn(() => randomUUID());
|
|
958
|
+
return pg$1.text(key);
|
|
959
|
+
}
|
|
960
|
+
if (value.format === "byte") return this.sqliteJson(key, value);
|
|
961
|
+
if (value.format === "date-time") {
|
|
962
|
+
if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
|
|
963
|
+
if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(sql$1`(unixepoch('subsec') * 1000)`);
|
|
964
|
+
return this.sqliteDateTime(key, {});
|
|
965
|
+
}
|
|
966
|
+
if (value.format === "date") return this.sqliteDate(key, {});
|
|
967
|
+
return pg$1.text(key);
|
|
968
|
+
};
|
|
969
|
+
sqliteJson = (name, document) => pg$1.customType({
|
|
970
|
+
dataType: () => "text",
|
|
971
|
+
toDriver: (value) => JSON.stringify(value),
|
|
972
|
+
fromDriver: (value) => {
|
|
973
|
+
return value && typeof value === "string" ? JSON.parse(value) : value;
|
|
974
|
+
}
|
|
975
|
+
})(name, { document }).$type();
|
|
976
|
+
sqliteDateTime = pg$1.customType({
|
|
977
|
+
dataType: () => "integer",
|
|
978
|
+
toDriver: (value) => new Date(value).getTime(),
|
|
979
|
+
fromDriver: (value) => {
|
|
980
|
+
return new Date(value).toISOString();
|
|
981
|
+
}
|
|
982
|
+
});
|
|
983
|
+
sqliteBool = pg$1.customType({
|
|
984
|
+
dataType: () => "integer",
|
|
985
|
+
toDriver: (value) => value ? 1 : 0,
|
|
986
|
+
fromDriver: (value) => value === 1
|
|
987
|
+
});
|
|
988
|
+
sqliteDate = pg$1.customType({
|
|
989
|
+
dataType: () => "integer",
|
|
990
|
+
toDriver: (value) => new Date(value).getTime(),
|
|
991
|
+
fromDriver: (value) => {
|
|
992
|
+
return new Date(value).toISOString().split("T")[0];
|
|
993
|
+
}
|
|
994
|
+
});
|
|
995
|
+
};
|
|
996
|
+
|
|
997
|
+
//#endregion
|
|
998
|
+
//#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
|
|
999
|
+
const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
|
|
1000
|
+
/**
|
|
1001
|
+
* Configuration options for the Bun SQLite database provider.
|
|
1002
|
+
*/
|
|
1003
|
+
const bunSqliteOptions = $atom({
|
|
1004
|
+
name: "alepha.postgres.bun-sqlite.options",
|
|
1005
|
+
schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
|
|
1006
|
+
default: {}
|
|
1007
|
+
});
|
|
1008
|
+
/**
|
|
1009
|
+
* Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
|
|
1010
|
+
*
|
|
1011
|
+
* This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
|
|
1012
|
+
* which provides excellent performance on the Bun runtime.
|
|
1013
|
+
*
|
|
1014
|
+
* @example
|
|
1015
|
+
* ```ts
|
|
1016
|
+
* // Set DATABASE_URL environment variable
|
|
1017
|
+
* // DATABASE_URL=sqlite://./my-database.db
|
|
1018
|
+
*
|
|
1019
|
+
* // Or configure programmatically
|
|
1020
|
+
* alepha.with({
|
|
1021
|
+
* provide: DatabaseProvider,
|
|
1022
|
+
* use: BunSqliteProvider,
|
|
1023
|
+
* });
|
|
1024
|
+
*
|
|
1025
|
+
* // Or use options atom
|
|
1026
|
+
* alepha.store.mut(bunSqliteOptions, (old) => ({
|
|
1027
|
+
* ...old,
|
|
1028
|
+
* path: ":memory:",
|
|
1029
|
+
* }));
|
|
1030
|
+
* ```
|
|
1031
|
+
*/
|
|
1032
|
+
var BunSqliteProvider = class extends DatabaseProvider {
|
|
1033
|
+
kit = $inject(DrizzleKitProvider);
|
|
1034
|
+
log = $logger();
|
|
1035
|
+
env = $env(envSchema$1);
|
|
1036
|
+
builder = $inject(SqliteModelBuilder);
|
|
1037
|
+
options = $use(bunSqliteOptions);
|
|
1038
|
+
sqlite;
|
|
1039
|
+
bunDb;
|
|
1040
|
+
get name() {
|
|
1041
|
+
return "sqlite";
|
|
1042
|
+
}
|
|
1043
|
+
dialect = "sqlite";
|
|
1044
|
+
get url() {
|
|
1045
|
+
const path = this.options.path ?? this.env.DATABASE_URL;
|
|
1046
|
+
if (path) {
|
|
1047
|
+
if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
|
|
1048
|
+
return path;
|
|
1049
|
+
}
|
|
1050
|
+
if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
|
|
1051
|
+
else return "node_modules/.alepha/bun-sqlite.db";
|
|
1052
|
+
}
|
|
1053
|
+
get db() {
|
|
1054
|
+
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
1055
|
+
return this.bunDb;
|
|
1056
|
+
}
|
|
1057
|
+
async execute(query) {
|
|
1058
|
+
return this.bunDb.all(query);
|
|
1059
|
+
}
|
|
1060
|
+
onStart = $hook({
|
|
1061
|
+
on: "start",
|
|
1062
|
+
handler: async () => {
|
|
1063
|
+
if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
|
|
1064
|
+
const { Database } = await import("bun:sqlite");
|
|
1065
|
+
const { drizzle: drizzle$1 } = await import("drizzle-orm/bun-sqlite");
|
|
1066
|
+
const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
|
|
1067
|
+
if (filepath !== ":memory:" && filepath !== "") {
|
|
1068
|
+
const dirname = filepath.split("/").slice(0, -1).join("/");
|
|
1069
|
+
if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
|
|
1070
|
+
}
|
|
1071
|
+
this.sqlite = new Database(filepath);
|
|
1072
|
+
this.bunDb = drizzle$1({
|
|
1073
|
+
client: this.sqlite,
|
|
1074
|
+
logger: { logQuery: (query, params) => {
|
|
1075
|
+
this.log.trace(query, { params });
|
|
1076
|
+
} }
|
|
1077
|
+
});
|
|
1078
|
+
await this.migrate();
|
|
1079
|
+
this.log.info(`Using Bun SQLite database at ${filepath}`);
|
|
1080
|
+
}
|
|
1081
|
+
});
|
|
1082
|
+
onStop = $hook({
|
|
1083
|
+
on: "stop",
|
|
1084
|
+
handler: async () => {
|
|
1085
|
+
if (this.sqlite) {
|
|
1086
|
+
this.log.debug("Closing Bun SQLite connection...");
|
|
1087
|
+
this.sqlite.close();
|
|
1088
|
+
this.sqlite = void 0;
|
|
1089
|
+
this.bunDb = void 0;
|
|
1090
|
+
this.log.info("Bun SQLite connection closed");
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
});
|
|
1094
|
+
async executeMigrations(migrationsFolder) {
|
|
1095
|
+
const { migrate: migrate$1 } = await import("drizzle-orm/bun-sqlite/migrator");
|
|
1096
|
+
await migrate$1(this.bunDb, { migrationsFolder });
|
|
1097
|
+
}
|
|
1098
|
+
};
|
|
1099
|
+
|
|
1100
|
+
//#endregion
|
|
1101
|
+
//#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
|
|
1102
|
+
/**
|
|
1103
|
+
* Cloudflare D1 SQLite provider using Drizzle ORM.
|
|
1104
|
+
*
|
|
1105
|
+
* This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
|
|
1106
|
+
* The binding is typically obtained from the Cloudflare Workers environment.
|
|
1107
|
+
*
|
|
1108
|
+
* @example
|
|
1109
|
+
* ```ts
|
|
1110
|
+
* // In your Cloudflare Worker
|
|
1111
|
+
* alepha.set(cloudflareD1Options, { binding: env.DB });
|
|
1112
|
+
* ```
|
|
1113
|
+
*/
|
|
1114
|
+
var CloudflareD1Provider = class extends DatabaseProvider {
|
|
1115
|
+
kit = $inject(DrizzleKitProvider);
|
|
1116
|
+
log = $logger();
|
|
1117
|
+
builder = $inject(SqliteModelBuilder);
|
|
1118
|
+
env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
|
|
1119
|
+
d1;
|
|
1120
|
+
drizzleDb;
|
|
1121
|
+
get name() {
|
|
1122
|
+
return "sqlite";
|
|
1123
|
+
}
|
|
1124
|
+
get driver() {
|
|
1125
|
+
return "d1";
|
|
1126
|
+
}
|
|
1127
|
+
dialect = "sqlite";
|
|
1128
|
+
get url() {
|
|
1129
|
+
return this.env.DATABASE_URL;
|
|
1130
|
+
}
|
|
1131
|
+
get db() {
|
|
1132
|
+
if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
|
|
1133
|
+
return this.drizzleDb;
|
|
1134
|
+
}
|
|
1135
|
+
async execute(query) {
|
|
1136
|
+
const { rows } = await this.db.run(query);
|
|
1137
|
+
return rows;
|
|
1138
|
+
}
|
|
1139
|
+
onStart = $hook({
|
|
1140
|
+
on: "start",
|
|
1141
|
+
handler: async () => {
|
|
1142
|
+
const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
|
|
1143
|
+
const cloudflareEnv = this.alepha.store.get("cloudflare.env");
|
|
1144
|
+
if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
|
|
1145
|
+
const binding = cloudflareEnv[bindingName];
|
|
1146
|
+
if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
|
|
1147
|
+
this.d1 = binding;
|
|
1148
|
+
const { drizzle: drizzle$1 } = await import("drizzle-orm/d1");
|
|
1149
|
+
this.drizzleDb = drizzle$1(this.d1);
|
|
1150
|
+
await this.migrate();
|
|
1151
|
+
this.log.info("Using Cloudflare D1 database");
|
|
1152
|
+
}
|
|
1153
|
+
});
|
|
1154
|
+
async executeMigrations(migrationsFolder) {
|
|
1155
|
+
const { migrate: migrate$1 } = await import("drizzle-orm/d1/migrator");
|
|
1156
|
+
await migrate$1(this.db, { migrationsFolder });
|
|
1157
|
+
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Override development migration to skip sync (not supported on D1).
|
|
1160
|
+
* D1 requires proper migrations to be applied.
|
|
1161
|
+
*/
|
|
1162
|
+
async runDevelopmentMigration(migrationsFolder) {
|
|
1163
|
+
await this.executeMigrations(migrationsFolder);
|
|
1164
|
+
}
|
|
1165
|
+
/**
|
|
1166
|
+
* Override test migration to run migrations instead of sync.
|
|
1167
|
+
* D1 doesn't support schema synchronization.
|
|
1168
|
+
*/
|
|
1169
|
+
async runTestMigration() {
|
|
1170
|
+
const migrationsFolder = this.getMigrationsFolder();
|
|
1171
|
+
try {
|
|
1172
|
+
await this.executeMigrations(migrationsFolder);
|
|
1173
|
+
} catch {
|
|
1174
|
+
this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
};
|
|
1178
|
+
|
|
1179
|
+
//#endregion
|
|
1180
|
+
//#region ../../src/orm/providers/drivers/PglitePostgresProvider.ts
|
|
1181
|
+
const envSchema = t.object({ DATABASE_URL: t.optional(t.text()) });
|
|
1182
|
+
var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
|
|
1183
|
+
static importPglite() {
|
|
1184
|
+
try {
|
|
1185
|
+
return createRequire(import.meta.url)("@electric-sql/pglite");
|
|
1186
|
+
} catch {}
|
|
1187
|
+
}
|
|
1188
|
+
env = $env(envSchema);
|
|
1189
|
+
log = $logger();
|
|
1190
|
+
kit = $inject(DrizzleKitProvider);
|
|
1191
|
+
builder = $inject(PostgresModelBuilder);
|
|
1192
|
+
client;
|
|
1193
|
+
pglite;
|
|
1194
|
+
get name() {
|
|
1195
|
+
return "postgres";
|
|
1196
|
+
}
|
|
1197
|
+
get driver() {
|
|
1198
|
+
return "pglite";
|
|
1199
|
+
}
|
|
1200
|
+
dialect = "postgresql";
|
|
1201
|
+
get url() {
|
|
1202
|
+
let path = this.env.DATABASE_URL;
|
|
1203
|
+
if (!path) if (this.alepha.isTest()) path = ":memory:";
|
|
1204
|
+
else path = "node_modules/.alepha/pglite";
|
|
1205
|
+
else if (path.includes(":memory:")) path = ":memory:";
|
|
1206
|
+
else if (path.startsWith("file://")) path = path.replace("file://", "");
|
|
1207
|
+
return path;
|
|
1208
|
+
}
|
|
1209
|
+
get db() {
|
|
1210
|
+
if (!this.pglite) throw new AlephaError("Database not initialized");
|
|
1211
|
+
return this.pglite;
|
|
1212
|
+
}
|
|
1213
|
+
async execute(statement) {
|
|
1214
|
+
const { rows } = await this.db.execute(statement);
|
|
1215
|
+
return rows;
|
|
1216
|
+
}
|
|
1217
|
+
onStart = $hook({
|
|
1218
|
+
on: "start",
|
|
1219
|
+
handler: async () => {
|
|
1220
|
+
if (Object.keys(this.kit.getModels(this)).length === 0) return;
|
|
1221
|
+
const module = PglitePostgresProvider.importPglite();
|
|
1222
|
+
if (!module) throw new AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
|
|
1223
|
+
const { drizzle: drizzle$1 } = createRequire(import.meta.url)("drizzle-orm/pglite");
|
|
1224
|
+
const path = this.url;
|
|
1225
|
+
if (path !== ":memory:") {
|
|
1226
|
+
await mkdir(path, { recursive: true }).catch(() => null);
|
|
1227
|
+
this.client = new module.PGlite(path);
|
|
1228
|
+
} else this.client = new module.PGlite();
|
|
1229
|
+
this.pglite = drizzle$1({ client: this.client });
|
|
1230
|
+
await this.migrate();
|
|
1231
|
+
this.log.info(`Using PGlite database at ${path}`);
|
|
1232
|
+
}
|
|
1233
|
+
});
|
|
1234
|
+
onStop = $hook({
|
|
1235
|
+
on: "stop",
|
|
1236
|
+
handler: async () => {
|
|
1237
|
+
if (this.client) {
|
|
1238
|
+
this.log.debug("Closing PGlite connection...");
|
|
1239
|
+
await this.client.close();
|
|
1240
|
+
this.client = void 0;
|
|
1241
|
+
this.pglite = void 0;
|
|
1242
|
+
this.log.info("PGlite connection closed");
|
|
1243
|
+
}
|
|
1244
|
+
}
|
|
1245
|
+
});
|
|
1246
|
+
async executeMigrations(migrationsFolder) {
|
|
1247
|
+
await migrate(this.db, { migrationsFolder });
|
|
1248
|
+
}
|
|
1249
|
+
};
|
|
1250
|
+
|
|
1251
|
+
//#endregion
|
|
1252
|
+
//#region ../../src/orm/errors/DbConflictError.ts
|
|
1253
|
+
var DbConflictError = class extends DbError {
|
|
1254
|
+
name = "DbConflictError";
|
|
1255
|
+
status = 409;
|
|
1256
|
+
};
|
|
1257
|
+
|
|
1258
|
+
//#endregion
|
|
1259
|
+
//#region ../../src/orm/errors/DbEntityNotFoundError.ts
|
|
1260
|
+
var DbEntityNotFoundError = class extends DbError {
|
|
1261
|
+
name = "DbEntityNotFoundError";
|
|
1262
|
+
status = 404;
|
|
1263
|
+
constructor(entityName) {
|
|
1264
|
+
super(`Entity from '${entityName}' was not found`);
|
|
1265
|
+
}
|
|
1266
|
+
};
|
|
1267
|
+
|
|
1268
|
+
//#endregion
|
|
1269
|
+
//#region ../../src/orm/errors/DbVersionMismatchError.ts
|
|
1270
|
+
/**
|
|
1271
|
+
* Error thrown when there is a version mismatch.
|
|
1272
|
+
* It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
|
|
1273
|
+
* This is used for optimistic concurrency control.
|
|
1274
|
+
*/
|
|
1275
|
+
var DbVersionMismatchError = class extends DbError {
|
|
1276
|
+
name = "DbVersionMismatchError";
|
|
1277
|
+
constructor(table, id) {
|
|
1278
|
+
super(`Version mismatch for table '${table}' and id '${id}'`);
|
|
1279
|
+
}
|
|
1280
|
+
};
|
|
1281
|
+
|
|
1282
|
+
//#endregion
|
|
1283
|
+
//#region ../../src/orm/helpers/pgAttr.ts
|
|
1284
|
+
/**
|
|
1285
|
+
* Decorates a typebox schema with a Postgres attribute.
|
|
1286
|
+
*
|
|
1287
|
+
* > It's just a fancy way to add Symbols to a field.
|
|
1288
|
+
*
|
|
1289
|
+
* @example
|
|
1290
|
+
* ```ts
|
|
1291
|
+
* import { t } from "alepha";
|
|
1292
|
+
* import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
|
|
1293
|
+
*
|
|
1294
|
+
* export const updatedAtSchema = pgAttr(
|
|
1295
|
+
* t.datetime(), PG_UPDATED_AT,
|
|
1296
|
+
* );
|
|
1297
|
+
* ```
|
|
1298
|
+
*/
|
|
1299
|
+
const pgAttr = (type, attr, value) => {
|
|
1300
|
+
Object.assign(type, { [attr]: value ?? {} });
|
|
1301
|
+
return type;
|
|
1302
|
+
};
|
|
1303
|
+
/**
|
|
1304
|
+
* Retrieves the fields of a schema that have a specific attribute.
|
|
1305
|
+
*/
|
|
1306
|
+
const getAttrFields = (schema$1, name) => {
|
|
1307
|
+
const fields = [];
|
|
1308
|
+
for (const key of Object.keys(schema$1.properties)) {
|
|
1309
|
+
const value = schema$1.properties[key];
|
|
1310
|
+
if (name in value) fields.push({
|
|
1311
|
+
type: value,
|
|
1312
|
+
key,
|
|
1313
|
+
data: value[name]
|
|
1314
|
+
});
|
|
1315
|
+
}
|
|
1316
|
+
return fields;
|
|
1317
|
+
};
|
|
1318
|
+
|
|
1319
|
+
//#endregion
|
|
1320
|
+
//#region ../../src/orm/services/PgRelationManager.ts
|
|
1321
|
+
var PgRelationManager = class {
|
|
1322
|
+
/**
|
|
1323
|
+
* Recursively build joins for the query builder based on the relations map
|
|
1324
|
+
*/
|
|
1325
|
+
buildJoins(provider, builder, joins, withRelations, table, parentKey) {
|
|
1326
|
+
for (const [key, join] of Object.entries(withRelations)) {
|
|
1327
|
+
const from = provider.table(join.join);
|
|
1328
|
+
const on = isSQLWrapper$1(join.on) ? join.on : sql$1`${table[join.on[0]]} = ${from[join.on[1].name]}`;
|
|
1329
|
+
if (join.type === "right") builder.rightJoin(from, on);
|
|
1330
|
+
else if (join.type === "inner") builder.innerJoin(from, on);
|
|
1331
|
+
else builder.leftJoin(from, on);
|
|
1332
|
+
joins.push({
|
|
1333
|
+
key,
|
|
1334
|
+
table: getTableName(from),
|
|
1335
|
+
schema: join.join.schema,
|
|
1336
|
+
col: (name) => from[name],
|
|
1337
|
+
parent: parentKey
|
|
1338
|
+
});
|
|
1339
|
+
if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
|
|
1340
|
+
}
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* Map a row with its joined relations based on the joins definition
|
|
1344
|
+
*/
|
|
1345
|
+
mapRowWithJoins(record, row, schema$1, joins, parentKey) {
|
|
1346
|
+
for (const join of joins) if (join.parent === parentKey) {
|
|
1347
|
+
const joinedData = row[join.table];
|
|
1348
|
+
if (this.isAllNull(joinedData)) record[join.key] = void 0;
|
|
1349
|
+
else {
|
|
1350
|
+
record[join.key] = joinedData;
|
|
1351
|
+
this.mapRowWithJoins(record[join.key], row, schema$1, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
return record;
|
|
1355
|
+
}
|
|
1356
|
+
/**
|
|
1357
|
+
* Check if all values in an object are null (indicates a left join with no match)
|
|
1358
|
+
*/
|
|
1359
|
+
isAllNull(obj) {
|
|
1360
|
+
if (obj === null || obj === void 0) return true;
|
|
1361
|
+
if (typeof obj !== "object") return false;
|
|
1362
|
+
return Object.values(obj).every((val) => val === null);
|
|
1363
|
+
}
|
|
1364
|
+
/**
|
|
1365
|
+
* Build a schema that includes all join properties recursively
|
|
1366
|
+
*/
|
|
1367
|
+
buildSchemaWithJoins(baseSchema, joins, parentPath) {
|
|
1368
|
+
const schema$1 = Value.Clone(baseSchema);
|
|
1369
|
+
const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
|
|
1370
|
+
for (const join of joinsAtThisLevel) {
|
|
1371
|
+
const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
|
|
1372
|
+
const childJoins = joins.filter((j) => j.parent === joinPath);
|
|
1373
|
+
let joinSchema = join.schema;
|
|
1374
|
+
if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
|
|
1375
|
+
schema$1.properties[join.key] = t.optional(joinSchema);
|
|
1376
|
+
}
|
|
1377
|
+
return schema$1;
|
|
1378
|
+
}
|
|
1379
|
+
};
|
|
1380
|
+
|
|
1381
|
+
//#endregion
|
|
1382
|
+
//#region ../../src/orm/services/QueryManager.ts
|
|
1383
|
+
var QueryManager = class {
|
|
1384
|
+
alepha = $inject(Alepha);
|
|
1385
|
+
/**
|
|
1386
|
+
* Convert a query object to a SQL query.
|
|
1387
|
+
*/
|
|
1388
|
+
toSQL(query, options) {
|
|
1389
|
+
const { schema: schema$1, col, joins } = options;
|
|
1390
|
+
const conditions = [];
|
|
1391
|
+
if (isSQLWrapper(query)) conditions.push(query);
|
|
1392
|
+
else {
|
|
1393
|
+
const keys = Object.keys(query);
|
|
1394
|
+
for (const key of keys) {
|
|
1395
|
+
const operator = query[key];
|
|
1396
|
+
if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
|
|
1397
|
+
const matchingJoins = joins.filter((j) => j.key === key);
|
|
1398
|
+
if (matchingJoins.length > 0) {
|
|
1399
|
+
const join = matchingJoins[0];
|
|
1400
|
+
const joinPath = join.parent ? `${join.parent}.${key}` : key;
|
|
1401
|
+
const recursiveJoins = joins.filter((j) => {
|
|
1402
|
+
if (!j.parent) return false;
|
|
1403
|
+
return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
|
|
1404
|
+
}).map((j) => {
|
|
1405
|
+
const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
|
|
1406
|
+
return {
|
|
1407
|
+
...j,
|
|
1408
|
+
parent: newParent
|
|
1409
|
+
};
|
|
1410
|
+
});
|
|
1411
|
+
const sql$2 = this.toSQL(query[key], {
|
|
1412
|
+
schema: join.schema,
|
|
1413
|
+
col: join.col,
|
|
1414
|
+
joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
|
|
1415
|
+
dialect: options.dialect
|
|
1416
|
+
});
|
|
1417
|
+
if (sql$2) conditions.push(sql$2);
|
|
1418
|
+
continue;
|
|
1419
|
+
}
|
|
1420
|
+
}
|
|
1421
|
+
if (Array.isArray(operator)) {
|
|
1422
|
+
const operations = operator.map((it) => {
|
|
1423
|
+
if (isSQLWrapper(it)) return it;
|
|
1424
|
+
return this.toSQL(it, {
|
|
1425
|
+
schema: schema$1,
|
|
1426
|
+
col,
|
|
1427
|
+
joins,
|
|
1428
|
+
dialect: options.dialect
|
|
1429
|
+
});
|
|
1430
|
+
}).filter((it) => it != null);
|
|
1431
|
+
if (key === "and") return and(...operations);
|
|
1432
|
+
if (key === "or") return or(...operations);
|
|
1433
|
+
}
|
|
1434
|
+
if (key === "not") {
|
|
1435
|
+
const where = this.toSQL(operator, {
|
|
1436
|
+
schema: schema$1,
|
|
1437
|
+
col,
|
|
1438
|
+
joins,
|
|
1439
|
+
dialect: options.dialect
|
|
1440
|
+
});
|
|
1441
|
+
if (where) return not(where);
|
|
1442
|
+
}
|
|
1443
|
+
if (operator) {
|
|
1444
|
+
const column = col(key);
|
|
1445
|
+
const sql$2 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
|
|
1446
|
+
if (sql$2) conditions.push(sql$2);
|
|
1447
|
+
}
|
|
1448
|
+
}
|
|
1449
|
+
}
|
|
1450
|
+
if (conditions.length === 1) return conditions[0];
|
|
1451
|
+
return and(...conditions);
|
|
1452
|
+
}
|
|
1453
|
+
/**
|
|
1454
|
+
* Check if an object has any filter operator properties.
|
|
1455
|
+
*/
|
|
1456
|
+
hasFilterOperatorProperties(obj) {
|
|
1457
|
+
if (!obj || typeof obj !== "object") return false;
|
|
1458
|
+
return [
|
|
1459
|
+
"eq",
|
|
1460
|
+
"ne",
|
|
1461
|
+
"gt",
|
|
1462
|
+
"gte",
|
|
1463
|
+
"lt",
|
|
1464
|
+
"lte",
|
|
1465
|
+
"inArray",
|
|
1466
|
+
"notInArray",
|
|
1467
|
+
"isNull",
|
|
1468
|
+
"isNotNull",
|
|
1469
|
+
"like",
|
|
1470
|
+
"notLike",
|
|
1471
|
+
"ilike",
|
|
1472
|
+
"notIlike",
|
|
1473
|
+
"contains",
|
|
1474
|
+
"startsWith",
|
|
1475
|
+
"endsWith",
|
|
1476
|
+
"between",
|
|
1477
|
+
"notBetween",
|
|
1478
|
+
"arrayContains",
|
|
1479
|
+
"arrayContained",
|
|
1480
|
+
"arrayOverlaps"
|
|
1481
|
+
].some((key) => key in obj);
|
|
1482
|
+
}
|
|
1483
|
+
/**
|
|
1484
|
+
* Map a filter operator to a SQL query.
|
|
1485
|
+
*/
|
|
1486
|
+
mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
|
|
1487
|
+
const encodeValue = (value) => {
|
|
1488
|
+
if (value == null) return value;
|
|
1489
|
+
if (columnSchema && columnName) try {
|
|
1490
|
+
const fieldSchema = columnSchema.properties[columnName];
|
|
1491
|
+
if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
|
|
1492
|
+
} catch (error) {}
|
|
1493
|
+
return value;
|
|
1494
|
+
};
|
|
1495
|
+
const encodeArray = (values) => {
|
|
1496
|
+
return values.map((v) => encodeValue(v));
|
|
1497
|
+
};
|
|
1498
|
+
if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return eq(column, encodeValue(operator));
|
|
1499
|
+
const conditions = [];
|
|
1500
|
+
if (operator?.eq != null) conditions.push(eq(column, encodeValue(operator.eq)));
|
|
1501
|
+
if (operator?.ne != null) conditions.push(ne(column, encodeValue(operator.ne)));
|
|
1502
|
+
if (operator?.gt != null) conditions.push(gt(column, encodeValue(operator.gt)));
|
|
1503
|
+
if (operator?.gte != null) conditions.push(gte(column, encodeValue(operator.gte)));
|
|
1504
|
+
if (operator?.lt != null) conditions.push(lt(column, encodeValue(operator.lt)));
|
|
1505
|
+
if (operator?.lte != null) conditions.push(lte(column, encodeValue(operator.lte)));
|
|
1506
|
+
if (operator?.inArray != null) {
|
|
1507
|
+
if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new AlephaError("inArray operator requires at least one value");
|
|
1508
|
+
conditions.push(inArray(column, encodeArray(operator.inArray)));
|
|
1509
|
+
}
|
|
1510
|
+
if (operator?.notInArray != null) {
|
|
1511
|
+
if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new AlephaError("notInArray operator requires at least one value");
|
|
1512
|
+
conditions.push(notInArray(column, encodeArray(operator.notInArray)));
|
|
1513
|
+
}
|
|
1514
|
+
if (operator?.isNull != null) conditions.push(isNull(column));
|
|
1515
|
+
if (operator?.isNotNull != null) conditions.push(isNotNull(column));
|
|
1516
|
+
if (operator?.like != null) conditions.push(like(column, encodeValue(operator.like)));
|
|
1517
|
+
if (operator?.notLike != null) conditions.push(notLike(column, encodeValue(operator.notLike)));
|
|
1518
|
+
if (operator?.ilike != null) conditions.push(ilike(column, encodeValue(operator.ilike)));
|
|
1519
|
+
if (operator?.notIlike != null) conditions.push(notIlike(column, encodeValue(operator.notIlike)));
|
|
1520
|
+
if (operator?.contains != null) {
|
|
1521
|
+
const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
1522
|
+
if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
|
|
1523
|
+
else conditions.push(ilike(column, encodeValue(`%${escapedValue}%`)));
|
|
1524
|
+
}
|
|
1525
|
+
if (operator?.startsWith != null) {
|
|
1526
|
+
const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
1527
|
+
if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
|
|
1528
|
+
else conditions.push(ilike(column, encodeValue(`${escapedValue}%`)));
|
|
1529
|
+
}
|
|
1530
|
+
if (operator?.endsWith != null) {
|
|
1531
|
+
const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
1532
|
+
if (dialect === "sqlite") conditions.push(sql$1`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
|
|
1533
|
+
else conditions.push(ilike(column, encodeValue(`%${escapedValue}`)));
|
|
1534
|
+
}
|
|
1535
|
+
if (operator?.between != null) {
|
|
1536
|
+
if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
|
|
1537
|
+
conditions.push(between(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
|
|
1538
|
+
}
|
|
1539
|
+
if (operator?.notBetween != null) {
|
|
1540
|
+
if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
|
|
1541
|
+
conditions.push(notBetween(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
|
|
1542
|
+
}
|
|
1543
|
+
if (operator?.arrayContains != null) conditions.push(arrayContains(column, encodeValue(operator.arrayContains)));
|
|
1544
|
+
if (operator?.arrayContained != null) conditions.push(arrayContained(column, encodeValue(operator.arrayContained)));
|
|
1545
|
+
if (operator?.arrayOverlaps != null) conditions.push(arrayOverlaps(column, encodeValue(operator.arrayOverlaps)));
|
|
1546
|
+
if (conditions.length === 0) return;
|
|
1547
|
+
if (conditions.length === 1) return conditions[0];
|
|
1548
|
+
return and(...conditions);
|
|
1549
|
+
}
|
|
1550
|
+
/**
|
|
1551
|
+
* Parse pagination sort string to orderBy format.
|
|
1552
|
+
* Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
|
|
1553
|
+
* - Columns separated by comma
|
|
1554
|
+
* - Prefix with '-' for DESC direction
|
|
1555
|
+
*
|
|
1556
|
+
* @param sort Pagination sort string
|
|
1557
|
+
* @returns OrderBy array or single object
|
|
1558
|
+
*/
|
|
1559
|
+
parsePaginationSort(sort) {
|
|
1560
|
+
const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
|
|
1561
|
+
if (field.startsWith("-")) return {
|
|
1562
|
+
column: field.substring(1),
|
|
1563
|
+
direction: "desc"
|
|
1564
|
+
};
|
|
1565
|
+
return {
|
|
1566
|
+
column: field,
|
|
1567
|
+
direction: "asc"
|
|
1568
|
+
};
|
|
1569
|
+
});
|
|
1570
|
+
return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
|
|
1571
|
+
}
|
|
1572
|
+
/**
|
|
1573
|
+
* Normalize orderBy parameter to array format.
|
|
1574
|
+
* Supports 3 modes:
|
|
1575
|
+
* 1. String: "name" -> [{ column: "name", direction: "asc" }]
|
|
1576
|
+
* 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
|
|
1577
|
+
* 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
|
|
1578
|
+
*
|
|
1579
|
+
* @param orderBy The orderBy parameter
|
|
1580
|
+
* @returns Normalized array of order by clauses
|
|
1581
|
+
*/
|
|
1582
|
+
normalizeOrderBy(orderBy) {
|
|
1583
|
+
if (typeof orderBy === "string") return [{
|
|
1584
|
+
column: orderBy,
|
|
1585
|
+
direction: "asc"
|
|
1586
|
+
}];
|
|
1587
|
+
if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
|
|
1588
|
+
column: orderBy.column,
|
|
1589
|
+
direction: orderBy.direction ?? "asc"
|
|
1590
|
+
}];
|
|
1591
|
+
if (Array.isArray(orderBy)) return orderBy.map((item) => ({
|
|
1592
|
+
column: item.column,
|
|
1593
|
+
direction: item.direction ?? "asc"
|
|
1594
|
+
}));
|
|
1595
|
+
return [];
|
|
1596
|
+
}
|
|
1597
|
+
/**
|
|
1598
|
+
* Create a pagination object.
|
|
1599
|
+
*
|
|
1600
|
+
* @deprecated Use `createPagination` from alepha instead.
|
|
1601
|
+
* This method now delegates to the framework-level helper.
|
|
1602
|
+
*
|
|
1603
|
+
* @param entities The entities to paginate.
|
|
1604
|
+
* @param limit The limit of the pagination.
|
|
1605
|
+
* @param offset The offset of the pagination.
|
|
1606
|
+
* @param sort Optional sort metadata to include in response.
|
|
1607
|
+
*/
|
|
1608
|
+
createPagination(entities, limit = 10, offset = 0, sort) {
|
|
1609
|
+
return createPagination(entities, limit, offset, sort);
|
|
1610
|
+
}
|
|
1611
|
+
};
|
|
1612
|
+
|
|
1613
|
+
//#endregion
|
|
1614
|
+
//#region ../../src/orm/services/Repository.ts
|
|
1615
|
+
var Repository = class {
|
|
1616
|
+
entity;
|
|
1617
|
+
provider;
|
|
1618
|
+
log = $logger();
|
|
1619
|
+
relationManager = $inject(PgRelationManager);
|
|
1620
|
+
queryManager = $inject(QueryManager);
|
|
1621
|
+
dateTimeProvider = $inject(DateTimeProvider);
|
|
1622
|
+
alepha = $inject(Alepha);
|
|
1623
|
+
constructor(entity, provider = DatabaseProvider) {
|
|
1624
|
+
this.entity = entity;
|
|
1625
|
+
this.provider = this.alepha.inject(provider);
|
|
1626
|
+
this.provider.registerEntity(entity);
|
|
1627
|
+
}
|
|
1628
|
+
/**
|
|
1629
|
+
* Represents the primary key of the table.
|
|
1630
|
+
* - Key is the name of the primary key column.
|
|
1631
|
+
* - Type is the type (TypeBox) of the primary key column.
|
|
1632
|
+
*
|
|
1633
|
+
* ID is mandatory. If the table does not have a primary key, it will throw an error.
|
|
1634
|
+
*/
|
|
1635
|
+
get id() {
|
|
1636
|
+
return this.getPrimaryKey(this.entity.schema);
|
|
1637
|
+
}
|
|
1638
|
+
/**
|
|
1639
|
+
* Get Drizzle table object.
|
|
1640
|
+
*/
|
|
1641
|
+
get table() {
|
|
1642
|
+
return this.provider.table(this.entity);
|
|
1643
|
+
}
|
|
1644
|
+
/**
|
|
1645
|
+
* Get SQL table name. (from Drizzle table object)
|
|
1646
|
+
*/
|
|
1647
|
+
get tableName() {
|
|
1648
|
+
return this.entity.name;
|
|
1649
|
+
}
|
|
1650
|
+
/**
|
|
1651
|
+
* Getter for the database connection from the database provider.
|
|
1652
|
+
*/
|
|
1653
|
+
get db() {
|
|
1654
|
+
return this.provider.db;
|
|
1655
|
+
}
|
|
1656
|
+
/**
|
|
1657
|
+
* Execute a SQL query.
|
|
1658
|
+
*
|
|
1659
|
+
* This method allows executing raw SQL queries against the database.
|
|
1660
|
+
* This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
|
|
1661
|
+
*
|
|
1662
|
+
* You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
|
|
1663
|
+
*
|
|
1664
|
+
* @example
|
|
1665
|
+
* ```ts
|
|
1666
|
+
* class App {
|
|
1667
|
+
* repository = $repository({ ... });
|
|
1668
|
+
* async getAdults() {
|
|
1669
|
+
* const users = repository.table; // Drizzle table object
|
|
1670
|
+
* await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
|
|
1671
|
+
* // or better
|
|
1672
|
+
* await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
|
|
1673
|
+
* }
|
|
1674
|
+
* }
|
|
1675
|
+
* ```
|
|
1676
|
+
*/
|
|
1677
|
+
async query(query, schema$1) {
|
|
1678
|
+
const raw = typeof query === "function" ? query(this.table, this.db) : query;
|
|
1679
|
+
if (typeof raw === "string" && raw.includes("[object Object]")) throw new AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
|
|
1680
|
+
return (await this.provider.execute(raw)).map((it) => {
|
|
1681
|
+
return this.clean(this.mapRawFieldsToEntity(it), schema$1 ?? this.entity.schema);
|
|
1682
|
+
});
|
|
1683
|
+
}
|
|
1684
|
+
/**
|
|
1685
|
+
* Map raw database fields to entity fields. (handles column name differences)
|
|
1686
|
+
*/
|
|
1687
|
+
mapRawFieldsToEntity(row) {
|
|
1688
|
+
const entity = {};
|
|
1689
|
+
for (const key of Object.keys(row)) {
|
|
1690
|
+
entity[key] = row[key];
|
|
1691
|
+
for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
|
|
1692
|
+
entity[colKey] = row[key];
|
|
1693
|
+
break;
|
|
1694
|
+
}
|
|
1695
|
+
}
|
|
1696
|
+
return entity;
|
|
1697
|
+
}
|
|
1698
|
+
/**
|
|
1699
|
+
* Get a Drizzle column from the table by his name.
|
|
1700
|
+
*/
|
|
1701
|
+
col(name) {
|
|
1702
|
+
const column = this.table[name];
|
|
1703
|
+
if (!column) throw new AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
|
|
1704
|
+
return column;
|
|
1705
|
+
}
|
|
1706
|
+
/**
|
|
1707
|
+
* Run a transaction.
|
|
1708
|
+
*/
|
|
1709
|
+
async transaction(transaction, config) {
|
|
1710
|
+
if (this.provider.driver === "pglite") {
|
|
1711
|
+
this.log.warn("Transactions are not supported with pglite driver");
|
|
1712
|
+
return await transaction(null);
|
|
1713
|
+
}
|
|
1714
|
+
this.log.debug(`Starting transaction on table ${this.tableName}`);
|
|
1715
|
+
return await this.db.transaction(transaction, config);
|
|
1716
|
+
}
|
|
1717
|
+
/**
|
|
1718
|
+
* Start a SELECT query on the table.
|
|
1719
|
+
*/
|
|
1720
|
+
rawSelect(opts = {}) {
|
|
1721
|
+
return (opts.tx ?? this.db).select().from(this.table);
|
|
1722
|
+
}
|
|
1723
|
+
/**
|
|
1724
|
+
* Start a SELECT DISTINCT query on the table.
|
|
1725
|
+
*/
|
|
1726
|
+
rawSelectDistinct(opts = {}, columns = []) {
|
|
1727
|
+
const db$1 = opts.tx ?? this.db;
|
|
1728
|
+
const table = this.table;
|
|
1729
|
+
const fields = {};
|
|
1730
|
+
for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
|
|
1731
|
+
return db$1.selectDistinct(fields).from(table);
|
|
1732
|
+
}
|
|
1733
|
+
/**
|
|
1734
|
+
* Start an INSERT query on the table.
|
|
1735
|
+
*/
|
|
1736
|
+
rawInsert(opts = {}) {
|
|
1737
|
+
return (opts.tx ?? this.db).insert(this.table);
|
|
1738
|
+
}
|
|
1739
|
+
/**
|
|
1740
|
+
* Start an UPDATE query on the table.
|
|
1741
|
+
*/
|
|
1742
|
+
rawUpdate(opts = {}) {
|
|
1743
|
+
return (opts.tx ?? this.db).update(this.table);
|
|
1744
|
+
}
|
|
1745
|
+
/**
|
|
1746
|
+
* Start a DELETE query on the table.
|
|
1747
|
+
*/
|
|
1748
|
+
rawDelete(opts = {}) {
|
|
1749
|
+
return (opts.tx ?? this.db).delete(this.table);
|
|
1750
|
+
}
|
|
1751
|
+
/**
|
|
1752
|
+
* Create a Drizzle `select` query based on a JSON query object.
|
|
1753
|
+
*
|
|
1754
|
+
* > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
|
|
1755
|
+
*/
|
|
1756
|
+
async findMany(query = {}, opts = {}) {
|
|
1757
|
+
await this.alepha.events.emit("repository:read:before", {
|
|
1758
|
+
tableName: this.tableName,
|
|
1759
|
+
query
|
|
1760
|
+
});
|
|
1761
|
+
const columns = query.columns ?? query.distinct;
|
|
1762
|
+
const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
|
|
1763
|
+
const joins = [];
|
|
1764
|
+
if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
|
|
1765
|
+
const where = this.withDeletedAt(query.where ?? {}, opts);
|
|
1766
|
+
builder.where(() => this.toSQL(where, joins));
|
|
1767
|
+
if (query.offset) {
|
|
1768
|
+
builder.offset(query.offset);
|
|
1769
|
+
if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
|
|
1770
|
+
}
|
|
1771
|
+
if (query.limit) builder.limit(query.limit);
|
|
1772
|
+
if (query.orderBy) {
|
|
1773
|
+
const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
|
|
1774
|
+
builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? desc(this.col(clause.column)) : asc(this.col(clause.column))));
|
|
1775
|
+
}
|
|
1776
|
+
if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
|
|
1777
|
+
if (opts.for) {
|
|
1778
|
+
if (typeof opts.for === "string") builder.for(opts.for);
|
|
1779
|
+
else if (opts.for) builder.for(opts.for.strength, opts.for.config);
|
|
1780
|
+
}
|
|
1781
|
+
try {
|
|
1782
|
+
let rows = await builder.execute();
|
|
1783
|
+
let schema$1 = this.entity.schema;
|
|
1784
|
+
if (columns) schema$1 = t.pick(schema$1, columns);
|
|
1785
|
+
if (joins.length) rows = rows.map((row) => {
|
|
1786
|
+
const rowSchema = {
|
|
1787
|
+
...schema$1,
|
|
1788
|
+
properties: { ...schema$1.properties }
|
|
1789
|
+
};
|
|
1790
|
+
return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
|
|
1791
|
+
});
|
|
1792
|
+
rows = rows.map((row) => {
|
|
1793
|
+
if (joins.length) {
|
|
1794
|
+
const joinedSchema = this.relationManager.buildSchemaWithJoins(schema$1, joins);
|
|
1795
|
+
return this.cleanWithJoins(row, joinedSchema, joins);
|
|
1796
|
+
}
|
|
1797
|
+
return this.clean(row, schema$1);
|
|
1798
|
+
});
|
|
1799
|
+
await this.alepha.events.emit("repository:read:after", {
|
|
1800
|
+
tableName: this.tableName,
|
|
1801
|
+
query,
|
|
1802
|
+
entities: rows
|
|
1803
|
+
});
|
|
1804
|
+
return rows;
|
|
1805
|
+
} catch (error) {
|
|
1806
|
+
throw new DbError("Query select has failed", error);
|
|
1807
|
+
}
|
|
1808
|
+
}
|
|
1809
|
+
/**
|
|
1810
|
+
* Find a single entity.
|
|
1811
|
+
*/
|
|
1812
|
+
async findOne(query, opts = {}) {
|
|
1813
|
+
const [entity] = await this.findMany({
|
|
1814
|
+
limit: 1,
|
|
1815
|
+
...query
|
|
1816
|
+
}, opts);
|
|
1817
|
+
if (!entity) throw new DbEntityNotFoundError(this.tableName);
|
|
1818
|
+
return entity;
|
|
1819
|
+
}
|
|
1820
|
+
/**
|
|
1821
|
+
* Find entities with pagination.
|
|
1822
|
+
*
|
|
1823
|
+
* It uses the same parameters as `find()`, but adds pagination metadata to the response.
|
|
1824
|
+
*
|
|
1825
|
+
* > Pagination CAN also do a count query to get the total number of elements.
|
|
1826
|
+
*/
|
|
1827
|
+
async paginate(pagination = {}, query = {}, opts = {}) {
|
|
1828
|
+
const limit = query.limit ?? pagination.size ?? 10;
|
|
1829
|
+
const page = pagination.page ?? 0;
|
|
1830
|
+
const offset = query.offset ?? page * limit;
|
|
1831
|
+
let orderBy = query.orderBy;
|
|
1832
|
+
if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
|
|
1833
|
+
const now = Date.now();
|
|
1834
|
+
const timers = {
|
|
1835
|
+
query: now,
|
|
1836
|
+
count: now
|
|
1837
|
+
};
|
|
1838
|
+
const tasks = [];
|
|
1839
|
+
tasks.push(this.findMany({
|
|
1840
|
+
offset,
|
|
1841
|
+
limit: limit + 1,
|
|
1842
|
+
orderBy,
|
|
1843
|
+
...query
|
|
1844
|
+
}, opts).then((it) => {
|
|
1845
|
+
timers.query = Date.now() - timers.query;
|
|
1846
|
+
return it;
|
|
1847
|
+
}));
|
|
1848
|
+
if (opts.count) {
|
|
1849
|
+
const where = isSQLWrapper(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
|
|
1850
|
+
tasks.push(this.db.$count(this.table, where).then((it) => {
|
|
1851
|
+
timers.count = Date.now() - timers.count;
|
|
1852
|
+
return it;
|
|
1853
|
+
}));
|
|
1854
|
+
}
|
|
1855
|
+
const [entities, countResult] = await Promise.all(tasks);
|
|
1856
|
+
let sortMetadata;
|
|
1857
|
+
if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
|
|
1858
|
+
const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
|
|
1859
|
+
response.page.totalElements = countResult;
|
|
1860
|
+
if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
|
|
1861
|
+
return response;
|
|
1862
|
+
}
|
|
1863
|
+
/**
|
|
1864
|
+
* Find an entity by ID.
|
|
1865
|
+
*
|
|
1866
|
+
* This is a convenience method for `findOne` with a where clause on the primary key.
|
|
1867
|
+
* If you need more complex queries, use `findOne` instead.
|
|
1868
|
+
*/
|
|
1869
|
+
async findById(id, opts = {}) {
|
|
1870
|
+
return await this.findOne({ where: this.getWhereId(id) }, opts);
|
|
1871
|
+
}
|
|
1872
|
+
/**
|
|
1873
|
+
* Helper to create a type-safe query object.
|
|
1874
|
+
*/
|
|
1875
|
+
createQuery() {
|
|
1876
|
+
return {};
|
|
1877
|
+
}
|
|
1878
|
+
/**
|
|
1879
|
+
* Helper to create a type-safe where clause.
|
|
1880
|
+
*/
|
|
1881
|
+
createQueryWhere() {
|
|
1882
|
+
return {};
|
|
1883
|
+
}
|
|
1884
|
+
/**
|
|
1885
|
+
* Create an entity.
|
|
1886
|
+
*
|
|
1887
|
+
* @param data The entity to create.
|
|
1888
|
+
* @param opts The options for creating the entity.
|
|
1889
|
+
* @returns The ID of the created entity.
|
|
1890
|
+
*/
|
|
1891
|
+
async create(data, opts = {}) {
|
|
1892
|
+
await this.alepha.events.emit("repository:create:before", {
|
|
1893
|
+
tableName: this.tableName,
|
|
1894
|
+
data
|
|
1895
|
+
});
|
|
1896
|
+
try {
|
|
1897
|
+
const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
|
|
1898
|
+
await this.alepha.events.emit("repository:create:after", {
|
|
1899
|
+
tableName: this.tableName,
|
|
1900
|
+
data,
|
|
1901
|
+
entity
|
|
1902
|
+
});
|
|
1903
|
+
return entity;
|
|
1904
|
+
} catch (error) {
|
|
1905
|
+
throw this.handleError(error, "Insert query has failed");
|
|
1906
|
+
}
|
|
1907
|
+
}
|
|
1908
|
+
/**
|
|
1909
|
+
* Create many entities.
|
|
1910
|
+
*
|
|
1911
|
+
* Inserts are batched in chunks of 1000 to avoid hitting database limits.
|
|
1912
|
+
*
|
|
1913
|
+
* @param values The entities to create.
|
|
1914
|
+
* @param opts The statement options.
|
|
1915
|
+
* @returns The created entities.
|
|
1916
|
+
*/
|
|
1917
|
+
async createMany(values, opts = {}) {
|
|
1918
|
+
if (values.length === 0) return [];
|
|
1919
|
+
await this.alepha.events.emit("repository:create:before", {
|
|
1920
|
+
tableName: this.tableName,
|
|
1921
|
+
data: values
|
|
1922
|
+
});
|
|
1923
|
+
const batchSize = opts.batchSize ?? 1e3;
|
|
1924
|
+
const allEntities = [];
|
|
1925
|
+
try {
|
|
1926
|
+
for (let i = 0; i < values.length; i += batchSize) {
|
|
1927
|
+
const batch = values.slice(i, i + batchSize);
|
|
1928
|
+
const entities = await this.rawInsert(opts).values(batch.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
|
|
1929
|
+
allEntities.push(...entities);
|
|
1930
|
+
}
|
|
1931
|
+
await this.alepha.events.emit("repository:create:after", {
|
|
1932
|
+
tableName: this.tableName,
|
|
1933
|
+
data: values,
|
|
1934
|
+
entity: allEntities
|
|
1935
|
+
});
|
|
1936
|
+
return allEntities;
|
|
1937
|
+
} catch (error) {
|
|
1938
|
+
throw this.handleError(error, "Insert query has failed");
|
|
1939
|
+
}
|
|
1940
|
+
}
|
|
1941
|
+
/**
|
|
1942
|
+
* Find an entity and update it.
|
|
1943
|
+
*/
|
|
1944
|
+
async updateOne(where, data, opts = {}) {
|
|
1945
|
+
await this.alepha.events.emit("repository:update:before", {
|
|
1946
|
+
tableName: this.tableName,
|
|
1947
|
+
where,
|
|
1948
|
+
data
|
|
1949
|
+
});
|
|
1950
|
+
let row = data;
|
|
1951
|
+
const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
|
|
1952
|
+
if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
|
|
1953
|
+
where = this.withDeletedAt(where, opts);
|
|
1954
|
+
row = this.cast(row, false);
|
|
1955
|
+
delete row[this.id.key];
|
|
1956
|
+
const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
|
|
1957
|
+
throw this.handleError(error, "Update query has failed");
|
|
1958
|
+
});
|
|
1959
|
+
if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
|
|
1960
|
+
try {
|
|
1961
|
+
const entity = this.clean(response[0], this.entity.schema);
|
|
1962
|
+
await this.alepha.events.emit("repository:update:after", {
|
|
1963
|
+
tableName: this.tableName,
|
|
1964
|
+
where,
|
|
1965
|
+
data,
|
|
1966
|
+
entities: [entity]
|
|
1967
|
+
});
|
|
1968
|
+
return entity;
|
|
1969
|
+
} catch (error) {
|
|
1970
|
+
throw this.handleError(error, "Update query has failed");
|
|
1971
|
+
}
|
|
1972
|
+
}
|
|
1973
|
+
/**
|
|
1974
|
+
* Save a given entity.
|
|
1975
|
+
*
|
|
1976
|
+
* @example
|
|
1977
|
+
* ```ts
|
|
1978
|
+
* const entity = await repository.findById(1);
|
|
1979
|
+
* entity.name = "New Name"; // update a field
|
|
1980
|
+
* delete entity.description; // delete a field
|
|
1981
|
+
* await repository.save(entity);
|
|
1982
|
+
* ```
|
|
1983
|
+
*
|
|
1984
|
+
* Difference with `updateById/updateOne`:
|
|
1985
|
+
*
|
|
1986
|
+
* - requires the entity to be fetched first (whole object is expected)
|
|
1987
|
+
* - check pg.version() if present -> optimistic locking
|
|
1988
|
+
* - validate entity against schema
|
|
1989
|
+
* - undefined values will be set to null, not ignored!
|
|
1990
|
+
*
|
|
1991
|
+
* @see {@link DbVersionMismatchError}
|
|
1992
|
+
*/
|
|
1993
|
+
async save(entity, opts = {}) {
|
|
1994
|
+
const row = entity;
|
|
1995
|
+
const id = row[this.id.key];
|
|
1996
|
+
if (id == null) throw new AlephaError("Cannot save entity without ID - missing primary key in value");
|
|
1997
|
+
for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
|
|
1998
|
+
let where = this.createQueryWhere();
|
|
1999
|
+
where.id = { eq: id };
|
|
2000
|
+
const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
|
|
2001
|
+
if (versionField && typeof row[versionField.key] === "number") {
|
|
2002
|
+
where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
|
|
2003
|
+
row[versionField.key] += 1;
|
|
2004
|
+
}
|
|
2005
|
+
try {
|
|
2006
|
+
const newValue = await this.updateOne(where, row, opts);
|
|
2007
|
+
for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
|
|
2008
|
+
Object.assign(row, newValue);
|
|
2009
|
+
} catch (error) {
|
|
2010
|
+
if (error instanceof DbEntityNotFoundError && versionField) try {
|
|
2011
|
+
await this.findById(id);
|
|
2012
|
+
throw new DbVersionMismatchError(this.tableName, id);
|
|
2013
|
+
} catch (lookupError) {
|
|
2014
|
+
if (lookupError instanceof DbEntityNotFoundError) throw error;
|
|
2015
|
+
if (lookupError instanceof DbVersionMismatchError) throw lookupError;
|
|
2016
|
+
throw lookupError;
|
|
2017
|
+
}
|
|
2018
|
+
throw error;
|
|
2019
|
+
}
|
|
2020
|
+
}
|
|
2021
|
+
/**
|
|
2022
|
+
* Find an entity by ID and update it.
|
|
2023
|
+
*/
|
|
2024
|
+
async updateById(id, data, opts = {}) {
|
|
2025
|
+
return await this.updateOne(this.getWhereId(id), data, opts);
|
|
2026
|
+
}
|
|
2027
|
+
/**
|
|
2028
|
+
* Find many entities and update all of them.
|
|
2029
|
+
*/
|
|
2030
|
+
async updateMany(where, data, opts = {}) {
|
|
2031
|
+
await this.alepha.events.emit("repository:update:before", {
|
|
2032
|
+
tableName: this.tableName,
|
|
2033
|
+
where,
|
|
2034
|
+
data
|
|
2035
|
+
});
|
|
2036
|
+
const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
|
|
2037
|
+
if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
|
|
2038
|
+
where = this.withDeletedAt(where, opts);
|
|
2039
|
+
data = this.cast(data, false);
|
|
2040
|
+
try {
|
|
2041
|
+
const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
|
|
2042
|
+
await this.alepha.events.emit("repository:update:after", {
|
|
2043
|
+
tableName: this.tableName,
|
|
2044
|
+
where,
|
|
2045
|
+
data,
|
|
2046
|
+
entities
|
|
2047
|
+
});
|
|
2048
|
+
return entities.map((it) => it[this.id.key]);
|
|
2049
|
+
} catch (error) {
|
|
2050
|
+
throw this.handleError(error, "Update query has failed");
|
|
2051
|
+
}
|
|
2052
|
+
}
|
|
2053
|
+
/**
|
|
2054
|
+
* Find many and delete all of them.
|
|
2055
|
+
* @returns Array of deleted entity IDs
|
|
2056
|
+
*/
|
|
2057
|
+
async deleteMany(where = {}, opts = {}) {
|
|
2058
|
+
const deletedAt = this.deletedAt();
|
|
2059
|
+
if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
|
|
2060
|
+
await this.alepha.events.emit("repository:delete:before", {
|
|
2061
|
+
tableName: this.tableName,
|
|
2062
|
+
where
|
|
2063
|
+
});
|
|
2064
|
+
try {
|
|
2065
|
+
const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
|
|
2066
|
+
await this.alepha.events.emit("repository:delete:after", {
|
|
2067
|
+
tableName: this.tableName,
|
|
2068
|
+
where,
|
|
2069
|
+
ids
|
|
2070
|
+
});
|
|
2071
|
+
return ids;
|
|
2072
|
+
} catch (error) {
|
|
2073
|
+
throw new DbError("Delete query has failed", error);
|
|
2074
|
+
}
|
|
2075
|
+
}
|
|
2076
|
+
/**
|
|
2077
|
+
* Delete all entities.
|
|
2078
|
+
* @returns Array of deleted entity IDs
|
|
2079
|
+
*/
|
|
2080
|
+
clear(opts = {}) {
|
|
2081
|
+
return this.deleteMany({}, opts);
|
|
2082
|
+
}
|
|
2083
|
+
/**
|
|
2084
|
+
* Delete the given entity.
|
|
2085
|
+
*
|
|
2086
|
+
* You must fetch the entity first in order to delete it.
|
|
2087
|
+
* @returns Array containing the deleted entity ID
|
|
2088
|
+
*/
|
|
2089
|
+
async destroy(entity, opts = {}) {
|
|
2090
|
+
const id = entity[this.id.key];
|
|
2091
|
+
if (id == null) throw new AlephaError("Cannot destroy entity without ID");
|
|
2092
|
+
const deletedAt = this.deletedAt();
|
|
2093
|
+
if (deletedAt && !opts.force) {
|
|
2094
|
+
opts.now ??= this.dateTimeProvider.nowISOString();
|
|
2095
|
+
entity[deletedAt.key] = opts.now;
|
|
2096
|
+
}
|
|
2097
|
+
return await this.deleteById(id, opts);
|
|
2098
|
+
}
|
|
2099
|
+
/**
|
|
2100
|
+
* Find an entity and delete it.
|
|
2101
|
+
* @returns Array of deleted entity IDs (should contain at most one ID)
|
|
2102
|
+
*/
|
|
2103
|
+
async deleteOne(where = {}, opts = {}) {
|
|
2104
|
+
return await this.deleteMany(where, opts);
|
|
2105
|
+
}
|
|
2106
|
+
/**
|
|
2107
|
+
* Find an entity by ID and delete it.
|
|
2108
|
+
* @returns Array containing the deleted entity ID
|
|
2109
|
+
* @throws DbEntityNotFoundError if the entity is not found
|
|
2110
|
+
*/
|
|
2111
|
+
async deleteById(id, opts = {}) {
|
|
2112
|
+
const result = await this.deleteMany(this.getWhereId(id), opts);
|
|
2113
|
+
if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
|
|
2114
|
+
return result;
|
|
2115
|
+
}
|
|
2116
|
+
/**
|
|
2117
|
+
* Count entities.
|
|
2118
|
+
*/
|
|
2119
|
+
async count(where = {}, opts = {}) {
|
|
2120
|
+
where = this.withDeletedAt(where, opts);
|
|
2121
|
+
return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
|
|
2122
|
+
}
|
|
2123
|
+
conflictMessagePattern = "duplicate key value violates unique constraint";
|
|
2124
|
+
handleError(error, message) {
|
|
2125
|
+
if (!(error instanceof Error)) return new DbError(message);
|
|
2126
|
+
if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
|
|
2127
|
+
return new DbError(message, error);
|
|
2128
|
+
}
|
|
2129
|
+
withDeletedAt(where, opts = {}) {
|
|
2130
|
+
if (opts.force) return where;
|
|
2131
|
+
const deletedAt = this.deletedAt();
|
|
2132
|
+
if (!deletedAt) return where;
|
|
2133
|
+
return { and: [where, { [deletedAt.key]: { isNull: true } }] };
|
|
2134
|
+
}
|
|
2135
|
+
deletedAt() {
|
|
2136
|
+
const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
|
|
2137
|
+
if (deletedAtFields.length > 0) return deletedAtFields[0];
|
|
2138
|
+
}
|
|
2139
|
+
/**
|
|
2140
|
+
* Convert something to valid Pg Insert Value.
|
|
2141
|
+
*/
|
|
2142
|
+
cast(data, insert) {
|
|
2143
|
+
const schema$1 = insert ? this.entity.insertSchema : t.partial(this.entity.updateSchema);
|
|
2144
|
+
return this.alepha.codec.encode(schema$1, data);
|
|
2145
|
+
}
|
|
2146
|
+
/**
|
|
2147
|
+
* Transform a row from the database into a clean entity.
|
|
2148
|
+
*/
|
|
2149
|
+
clean(row, schema$1) {
|
|
2150
|
+
for (const key of Object.keys(schema$1.properties)) {
|
|
2151
|
+
const value = schema$1.properties[key];
|
|
2152
|
+
if (typeof row[key] === "string") {
|
|
2153
|
+
if (t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
|
|
2154
|
+
else if (t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
|
|
2155
|
+
}
|
|
2156
|
+
if (typeof row[key] === "bigint" && t.schema.isBigInt(value)) row[key] = row[key].toString();
|
|
2157
|
+
}
|
|
2158
|
+
return this.alepha.codec.decode(schema$1, row);
|
|
2159
|
+
}
|
|
2160
|
+
/**
|
|
2161
|
+
* Clean a row with joins recursively
|
|
2162
|
+
*/
|
|
2163
|
+
cleanWithJoins(row, schema$1, joins, parentPath) {
|
|
2164
|
+
const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
|
|
2165
|
+
const cleanRow = { ...row };
|
|
2166
|
+
const joinedData = {};
|
|
2167
|
+
for (const join of joinsAtThisLevel) {
|
|
2168
|
+
joinedData[join.key] = cleanRow[join.key];
|
|
2169
|
+
delete cleanRow[join.key];
|
|
2170
|
+
}
|
|
2171
|
+
const entity = this.clean(cleanRow, schema$1);
|
|
2172
|
+
for (const join of joinsAtThisLevel) {
|
|
2173
|
+
const joinedValue = joinedData[join.key];
|
|
2174
|
+
if (joinedValue != null) {
|
|
2175
|
+
const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
|
|
2176
|
+
if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
|
|
2177
|
+
else entity[join.key] = this.clean(joinedValue, join.schema);
|
|
2178
|
+
} else entity[join.key] = void 0;
|
|
2179
|
+
}
|
|
2180
|
+
return entity;
|
|
2181
|
+
}
|
|
2182
|
+
/**
|
|
2183
|
+
* Convert a where clause to SQL.
|
|
2184
|
+
*/
|
|
2185
|
+
toSQL(where, joins) {
|
|
2186
|
+
return this.queryManager.toSQL(where, {
|
|
2187
|
+
schema: this.entity.schema,
|
|
2188
|
+
col: (name) => {
|
|
2189
|
+
return this.col(name);
|
|
2190
|
+
},
|
|
2191
|
+
joins,
|
|
2192
|
+
dialect: this.provider.dialect
|
|
2193
|
+
});
|
|
2194
|
+
}
|
|
2195
|
+
/**
|
|
2196
|
+
* Get the where clause for an ID.
|
|
2197
|
+
*
|
|
2198
|
+
* @param id The ID to get the where clause for.
|
|
2199
|
+
* @returns The where clause for the ID.
|
|
2200
|
+
*/
|
|
2201
|
+
getWhereId(id) {
|
|
2202
|
+
return { [this.id.key]: { eq: t.schema.isString(this.id.type) ? String(id) : Number(id) } };
|
|
2203
|
+
}
|
|
2204
|
+
/**
|
|
2205
|
+
* Find a primary key in the schema.
|
|
2206
|
+
*/
|
|
2207
|
+
getPrimaryKey(schema$1) {
|
|
2208
|
+
const primaryKeys = getAttrFields(schema$1, PG_PRIMARY_KEY);
|
|
2209
|
+
if (primaryKeys.length === 0) throw new AlephaError("Primary key not found in schema");
|
|
2210
|
+
if (primaryKeys.length > 1) throw new AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
|
|
2211
|
+
return {
|
|
2212
|
+
key: primaryKeys[0].key,
|
|
2213
|
+
col: this.col(primaryKeys[0].key),
|
|
2214
|
+
type: primaryKeys[0].type
|
|
2215
|
+
};
|
|
2216
|
+
}
|
|
2217
|
+
};
|
|
2218
|
+
|
|
2219
|
+
//#endregion
|
|
2220
|
+
//#region ../../src/orm/providers/RepositoryProvider.ts
|
|
2221
|
+
var RepositoryProvider = class {
|
|
2222
|
+
alepha = $inject(Alepha);
|
|
2223
|
+
registry = /* @__PURE__ */ new Map();
|
|
2224
|
+
getRepositories(provider) {
|
|
2225
|
+
const repositories = this.alepha.services(Repository);
|
|
2226
|
+
if (provider) return repositories.filter((it) => it.provider === provider);
|
|
2227
|
+
return repositories;
|
|
2228
|
+
}
|
|
2229
|
+
getRepository(entity) {
|
|
2230
|
+
const RepositoryClass = this.createClassRepository(entity);
|
|
2231
|
+
return this.alepha.inject(RepositoryClass);
|
|
2232
|
+
}
|
|
2233
|
+
createClassRepository(entity) {
|
|
2234
|
+
let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
|
|
2235
|
+
if (name.endsWith("s")) name = name.slice(0, -1);
|
|
2236
|
+
name = `${name}Repository`;
|
|
2237
|
+
if (this.registry.has(entity)) return this.registry.get(entity);
|
|
2238
|
+
class GenericRepository extends Repository {
|
|
2239
|
+
constructor() {
|
|
2240
|
+
super(entity);
|
|
2241
|
+
}
|
|
2242
|
+
}
|
|
2243
|
+
Object.defineProperty(GenericRepository, "name", { value: name });
|
|
2244
|
+
this.registry.set(entity, GenericRepository);
|
|
2245
|
+
return GenericRepository;
|
|
2246
|
+
}
|
|
2247
|
+
};
|
|
2248
|
+
|
|
2249
|
+
//#endregion
|
|
2250
|
+
//#region ../../src/orm/helpers/parseQueryString.ts
|
|
2251
|
+
/**
|
|
2252
|
+
* Parse a string query into a PgQueryWhere object.
|
|
2253
|
+
*
|
|
2254
|
+
* Supported syntax:
|
|
2255
|
+
* - Simple equality: "name=John"
|
|
2256
|
+
* - Wildcard patterns: "name=John*" (startsWith), "name=*John" (endsWith), "name=*John*" (contains)
|
|
2257
|
+
* - Operators: "age>18", "age>=18", "age<65", "age<=65", "status!=active"
|
|
2258
|
+
* - NULL checks: "deletedAt=null", "email!=null"
|
|
2259
|
+
* - IN arrays: "status=[pending,active]"
|
|
2260
|
+
* - AND conditions: "name=John&age>18"
|
|
2261
|
+
* - OR conditions: "name=John|email=john@example.com"
|
|
2262
|
+
* - Nested AND/OR: "(name=John|name=Jane)&age>18"
|
|
2263
|
+
* - JSONB nested: "profile.city=Paris"
|
|
2264
|
+
*
|
|
2265
|
+
* @example
|
|
2266
|
+
* ```ts
|
|
2267
|
+
* // Simple equality
|
|
2268
|
+
* parseQueryString("name=John")
|
|
2269
|
+
* // => { name: { eq: "John" } }
|
|
2270
|
+
*
|
|
2271
|
+
* // Wildcard patterns
|
|
2272
|
+
* parseQueryString("name=John*") // startsWith
|
|
2273
|
+
* // => { name: { startsWith: "John" } }
|
|
2274
|
+
* parseQueryString("name=*Smith") // endsWith
|
|
2275
|
+
* // => { name: { endsWith: "Smith" } }
|
|
2276
|
+
* parseQueryString("name=*oh*") // contains
|
|
2277
|
+
* // => { name: { contains: "oh" } }
|
|
2278
|
+
*
|
|
2279
|
+
* // Multiple conditions
|
|
2280
|
+
* parseQueryString("name=John&age>18")
|
|
2281
|
+
* // => { and: [{ name: { eq: "John" } }, { age: { gt: 18 } }] }
|
|
2282
|
+
*
|
|
2283
|
+
* // OR conditions
|
|
2284
|
+
* parseQueryString("status=active|status=pending")
|
|
2285
|
+
* // => { or: [{ status: { eq: "active" } }, { status: { eq: "pending" } }] }
|
|
2286
|
+
*
|
|
2287
|
+
* // Complex nested
|
|
2288
|
+
* parseQueryString("(name=John|name=Jane)&age>18&status!=archived")
|
|
2289
|
+
* // => { and: [
|
|
2290
|
+
* // { or: [{ name: { eq: "John" } }, { name: { eq: "Jane" } }] },
|
|
2291
|
+
* // { age: { gt: 18 } },
|
|
2292
|
+
* // { status: { ne: "archived" } }
|
|
2293
|
+
* // ] }
|
|
2294
|
+
*
|
|
2295
|
+
* // JSONB nested query
|
|
2296
|
+
* parseQueryString("profile.city=Paris&profile.age>25")
|
|
2297
|
+
* // => { profile: { city: { eq: "Paris" }, age: { gt: 25 } } }
|
|
2298
|
+
* ```
|
|
2299
|
+
*/
|
|
2300
|
+
function parseQueryString(query) {
|
|
2301
|
+
if (!query || query.trim() === "") return {};
|
|
2302
|
+
return new QueryStringParser(query).parse();
|
|
2303
|
+
}
|
|
2304
|
+
var QueryStringParser = class {
|
|
2305
|
+
pos = 0;
|
|
2306
|
+
query;
|
|
2307
|
+
constructor(query) {
|
|
2308
|
+
this.query = query.trim();
|
|
2309
|
+
}
|
|
2310
|
+
parse() {
|
|
2311
|
+
return this.parseExpression();
|
|
2312
|
+
}
|
|
2313
|
+
parseExpression() {
|
|
2314
|
+
return this.parseOr();
|
|
2315
|
+
}
|
|
2316
|
+
parseOr() {
|
|
2317
|
+
const left = this.parseAnd();
|
|
2318
|
+
if (this.peek() === "|") {
|
|
2319
|
+
const conditions = [left];
|
|
2320
|
+
while (this.peek() === "|") {
|
|
2321
|
+
this.consume("|");
|
|
2322
|
+
conditions.push(this.parseAnd());
|
|
2323
|
+
}
|
|
2324
|
+
return { or: conditions };
|
|
2325
|
+
}
|
|
2326
|
+
return left;
|
|
2327
|
+
}
|
|
2328
|
+
parseAnd() {
|
|
2329
|
+
const left = this.parsePrimary();
|
|
2330
|
+
if (this.peek() === "&") {
|
|
2331
|
+
const conditions = [left];
|
|
2332
|
+
while (this.peek() === "&") {
|
|
2333
|
+
this.consume("&");
|
|
2334
|
+
conditions.push(this.parsePrimary());
|
|
2335
|
+
}
|
|
2336
|
+
return { and: conditions };
|
|
2337
|
+
}
|
|
2338
|
+
return left;
|
|
2339
|
+
}
|
|
2340
|
+
parsePrimary() {
|
|
2341
|
+
this.skipWhitespace();
|
|
2342
|
+
if (this.peek() === "(") {
|
|
2343
|
+
this.consume("(");
|
|
2344
|
+
const expr = this.parseExpression();
|
|
2345
|
+
this.consume(")");
|
|
2346
|
+
return expr;
|
|
2347
|
+
}
|
|
2348
|
+
return this.parseCondition();
|
|
2349
|
+
}
|
|
2350
|
+
parseCondition() {
|
|
2351
|
+
const field = this.parseFieldPath();
|
|
2352
|
+
this.skipWhitespace();
|
|
2353
|
+
const operator = this.parseOperator();
|
|
2354
|
+
this.skipWhitespace();
|
|
2355
|
+
const value = this.parseValue();
|
|
2356
|
+
if (value === "") throw new AlephaError(`Expected value for field '${field.join(".")}'`);
|
|
2357
|
+
return this.buildCondition(field, operator, value);
|
|
2358
|
+
}
|
|
2359
|
+
parseFieldPath() {
|
|
2360
|
+
const path = [];
|
|
2361
|
+
let current = "";
|
|
2362
|
+
while (this.pos < this.query.length) {
|
|
2363
|
+
const ch = this.query[this.pos];
|
|
2364
|
+
if (ch === "." && current) {
|
|
2365
|
+
path.push(current);
|
|
2366
|
+
current = "";
|
|
2367
|
+
this.pos++;
|
|
2368
|
+
continue;
|
|
2369
|
+
}
|
|
2370
|
+
if (ch === "=" || ch === "!" || ch === ">" || ch === "<" || ch === " ") break;
|
|
2371
|
+
current += ch;
|
|
2372
|
+
this.pos++;
|
|
2373
|
+
}
|
|
2374
|
+
if (current) path.push(current);
|
|
2375
|
+
return path;
|
|
2376
|
+
}
|
|
2377
|
+
parseOperator() {
|
|
2378
|
+
this.skipWhitespace();
|
|
2379
|
+
const remaining = this.query.slice(this.pos);
|
|
2380
|
+
if (remaining.startsWith(">=")) {
|
|
2381
|
+
this.pos += 2;
|
|
2382
|
+
return ">=";
|
|
2383
|
+
}
|
|
2384
|
+
if (remaining.startsWith("<=")) {
|
|
2385
|
+
this.pos += 2;
|
|
2386
|
+
return "<=";
|
|
2387
|
+
}
|
|
2388
|
+
if (remaining.startsWith("!=")) {
|
|
2389
|
+
this.pos += 2;
|
|
2390
|
+
return "!=";
|
|
2391
|
+
}
|
|
2392
|
+
const ch = this.query[this.pos];
|
|
2393
|
+
if (ch === "=" || ch === ">" || ch === "<") {
|
|
2394
|
+
this.pos++;
|
|
2395
|
+
return ch;
|
|
2396
|
+
}
|
|
2397
|
+
throw new Error(`Expected operator at position ${this.pos}`);
|
|
2398
|
+
}
|
|
2399
|
+
parseValue() {
|
|
2400
|
+
this.skipWhitespace();
|
|
2401
|
+
if (this.query.slice(this.pos, this.pos + 4).toLowerCase() === "null") {
|
|
2402
|
+
this.pos += 4;
|
|
2403
|
+
return null;
|
|
2404
|
+
}
|
|
2405
|
+
if (this.query[this.pos] === "[") return this.parseArray();
|
|
2406
|
+
if (this.query[this.pos] === "\"" || this.query[this.pos] === "'") return this.parseQuotedString();
|
|
2407
|
+
let value = "";
|
|
2408
|
+
while (this.pos < this.query.length) {
|
|
2409
|
+
const ch = this.query[this.pos];
|
|
2410
|
+
if (ch === "&" || ch === "|" || ch === ")") break;
|
|
2411
|
+
value += ch;
|
|
2412
|
+
this.pos++;
|
|
2413
|
+
}
|
|
2414
|
+
return this.coerceValue(value.trim());
|
|
2415
|
+
}
|
|
2416
|
+
parseArray() {
|
|
2417
|
+
this.consume("[");
|
|
2418
|
+
const values = [];
|
|
2419
|
+
while (this.pos < this.query.length && this.query[this.pos] !== "]") {
|
|
2420
|
+
this.skipWhitespace();
|
|
2421
|
+
if (this.query[this.pos] === "\"" || this.query[this.pos] === "'") values.push(this.parseQuotedString());
|
|
2422
|
+
else {
|
|
2423
|
+
let value = "";
|
|
2424
|
+
while (this.pos < this.query.length && this.query[this.pos] !== "," && this.query[this.pos] !== "]") {
|
|
2425
|
+
value += this.query[this.pos];
|
|
2426
|
+
this.pos++;
|
|
2427
|
+
}
|
|
2428
|
+
values.push(this.coerceValue(value.trim()));
|
|
2429
|
+
}
|
|
2430
|
+
this.skipWhitespace();
|
|
2431
|
+
if (this.query[this.pos] === ",") this.pos++;
|
|
2432
|
+
}
|
|
2433
|
+
this.consume("]");
|
|
2434
|
+
return values;
|
|
2435
|
+
}
|
|
2436
|
+
parseQuotedString() {
|
|
2437
|
+
const quote = this.query[this.pos];
|
|
2438
|
+
this.pos++;
|
|
2439
|
+
let value = "";
|
|
2440
|
+
let escaped = false;
|
|
2441
|
+
while (this.pos < this.query.length) {
|
|
2442
|
+
const ch = this.query[this.pos];
|
|
2443
|
+
if (escaped) {
|
|
2444
|
+
value += ch;
|
|
2445
|
+
escaped = false;
|
|
2446
|
+
this.pos++;
|
|
2447
|
+
continue;
|
|
2448
|
+
}
|
|
2449
|
+
if (ch === "\\") {
|
|
2450
|
+
escaped = true;
|
|
2451
|
+
this.pos++;
|
|
2452
|
+
continue;
|
|
2453
|
+
}
|
|
2454
|
+
if (ch === quote) {
|
|
2455
|
+
this.pos++;
|
|
2456
|
+
break;
|
|
2457
|
+
}
|
|
2458
|
+
value += ch;
|
|
2459
|
+
this.pos++;
|
|
2460
|
+
}
|
|
2461
|
+
return value;
|
|
2462
|
+
}
|
|
2463
|
+
coerceValue(value) {
|
|
2464
|
+
if (/^-?\d+$/.test(value)) return parseInt(value, 10);
|
|
2465
|
+
if (/^-?\d+\.\d+$/.test(value)) return parseFloat(value);
|
|
2466
|
+
if (value.toLowerCase() === "true") return true;
|
|
2467
|
+
if (value.toLowerCase() === "false") return false;
|
|
2468
|
+
return value;
|
|
2469
|
+
}
|
|
2470
|
+
buildCondition(path, operator, value) {
|
|
2471
|
+
let filterOp;
|
|
2472
|
+
if (operator === "=") if (value === null) filterOp = { isNull: true };
|
|
2473
|
+
else if (Array.isArray(value)) filterOp = { inArray: value };
|
|
2474
|
+
else if (typeof value === "string" && value.includes("*")) {
|
|
2475
|
+
const startsWithAsterisk = value.startsWith("*");
|
|
2476
|
+
const endsWithAsterisk = value.endsWith("*");
|
|
2477
|
+
const cleanValue = value.replace(/^\*|\*$/g, "");
|
|
2478
|
+
if (startsWithAsterisk && endsWithAsterisk) filterOp = { contains: cleanValue };
|
|
2479
|
+
else if (startsWithAsterisk) filterOp = { endsWith: cleanValue };
|
|
2480
|
+
else if (endsWithAsterisk) filterOp = { startsWith: cleanValue };
|
|
2481
|
+
else filterOp = { eq: value };
|
|
2482
|
+
} else filterOp = { eq: value };
|
|
2483
|
+
else if (operator === "!=") if (value === null) filterOp = { isNotNull: true };
|
|
2484
|
+
else filterOp = { ne: value };
|
|
2485
|
+
else if (operator === ">") filterOp = { gt: value };
|
|
2486
|
+
else if (operator === ">=") filterOp = { gte: value };
|
|
2487
|
+
else if (operator === "<") filterOp = { lt: value };
|
|
2488
|
+
else if (operator === "<=") filterOp = { lte: value };
|
|
2489
|
+
else throw new Error(`Unsupported operator: ${operator}`);
|
|
2490
|
+
if (path.length === 1) return { [path[0]]: filterOp };
|
|
2491
|
+
let result = filterOp;
|
|
2492
|
+
for (let i = path.length - 1; i >= 0; i--) result = { [path[i]]: result };
|
|
2493
|
+
return result;
|
|
2494
|
+
}
|
|
2495
|
+
peek() {
|
|
2496
|
+
this.skipWhitespace();
|
|
2497
|
+
return this.query[this.pos] || "";
|
|
2498
|
+
}
|
|
2499
|
+
consume(expected) {
|
|
2500
|
+
this.skipWhitespace();
|
|
2501
|
+
if (this.query[this.pos] !== expected) throw new Error(`Expected '${expected}' at position ${this.pos}, got '${this.query[this.pos]}'`);
|
|
2502
|
+
this.pos++;
|
|
2503
|
+
}
|
|
2504
|
+
skipWhitespace() {
|
|
2505
|
+
while (this.pos < this.query.length && /\s/.test(this.query[this.pos])) this.pos++;
|
|
2506
|
+
}
|
|
2507
|
+
};
|
|
2508
|
+
/**
|
|
2509
|
+
* Helper function to build query strings programmatically
|
|
2510
|
+
*
|
|
2511
|
+
* @example
|
|
2512
|
+
* ```ts
|
|
2513
|
+
* buildQueryString({
|
|
2514
|
+
* and: [
|
|
2515
|
+
* { name: "eq:John" },
|
|
2516
|
+
* { age: "gt:18" }
|
|
2517
|
+
* ]
|
|
2518
|
+
* })
|
|
2519
|
+
* // => "name=John&age>18"
|
|
2520
|
+
* ```
|
|
2521
|
+
*/
|
|
2522
|
+
function buildQueryString(where) {
|
|
2523
|
+
if (!where || typeof where !== "object") return "";
|
|
2524
|
+
if ("and" in where && Array.isArray(where.and)) return where.and.map((w) => buildQueryString(w)).join("&");
|
|
2525
|
+
if ("or" in where && Array.isArray(where.or)) {
|
|
2526
|
+
const parts$1 = where.or.map((w) => buildQueryString(w));
|
|
2527
|
+
return parts$1.length > 1 ? `(${parts$1.join("|")})` : parts$1[0];
|
|
2528
|
+
}
|
|
2529
|
+
if ("not" in where) return "";
|
|
2530
|
+
const parts = [];
|
|
2531
|
+
for (const [field, condition] of Object.entries(where)) {
|
|
2532
|
+
if (typeof condition !== "object" || condition === null) {
|
|
2533
|
+
parts.push(`${field}=${condition}`);
|
|
2534
|
+
continue;
|
|
2535
|
+
}
|
|
2536
|
+
if ("eq" in condition) parts.push(`${field}=${condition.eq}`);
|
|
2537
|
+
else if ("ne" in condition) parts.push(`${field}!=${condition.ne}`);
|
|
2538
|
+
else if ("gt" in condition) parts.push(`${field}>${condition.gt}`);
|
|
2539
|
+
else if ("gte" in condition) parts.push(`${field}>=${condition.gte}`);
|
|
2540
|
+
else if ("lt" in condition) parts.push(`${field}<${condition.lt}`);
|
|
2541
|
+
else if ("lte" in condition) parts.push(`${field}<=${condition.lte}`);
|
|
2542
|
+
else if ("contains" in condition) parts.push(`${field}=*${condition.contains}*`);
|
|
2543
|
+
else if ("startsWith" in condition) parts.push(`${field}=${condition.startsWith}*`);
|
|
2544
|
+
else if ("endsWith" in condition) parts.push(`${field}=*${condition.endsWith}`);
|
|
2545
|
+
else if ("isNull" in condition && condition.isNull) parts.push(`${field}=null`);
|
|
2546
|
+
else if ("isNotNull" in condition && condition.isNotNull) parts.push(`${field}!=null`);
|
|
2547
|
+
else if ("inArray" in condition && Array.isArray(condition.inArray)) {
|
|
2548
|
+
const values = condition.inArray.map((v) => typeof v === "string" ? `"${v}"` : v);
|
|
2549
|
+
parts.push(`${field}=[${values.join(",")}]`);
|
|
2550
|
+
} else {
|
|
2551
|
+
const nested = buildQueryString(condition);
|
|
2552
|
+
if (nested) parts.push(`${field}.${nested}`);
|
|
2553
|
+
}
|
|
2554
|
+
}
|
|
2555
|
+
return parts.join("&");
|
|
2556
|
+
}
|
|
2557
|
+
|
|
2558
|
+
//#endregion
|
|
2559
|
+
//#region ../../src/orm/providers/DatabaseTypeProvider.ts
|
|
2560
|
+
var DatabaseTypeProvider = class {
|
|
2561
|
+
attr = pgAttr;
|
|
2562
|
+
/**
|
|
2563
|
+
* Creates a primary key with an identity column.
|
|
2564
|
+
*/
|
|
2565
|
+
identityPrimaryKey = (identity, options) => pgAttr(pgAttr(pgAttr(t.integer(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2566
|
+
/**
|
|
2567
|
+
* Creates a primary key with a big identity column. (default)
|
|
2568
|
+
*/
|
|
2569
|
+
bigIdentityPrimaryKey = (identity, options) => pgAttr(pgAttr(pgAttr(t.int64(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2570
|
+
/**
|
|
2571
|
+
* Creates a primary key with a UUID column.
|
|
2572
|
+
*/
|
|
2573
|
+
uuidPrimaryKey = () => pgAttr(pgAttr(t.uuid(), PG_PRIMARY_KEY), PG_DEFAULT);
|
|
2574
|
+
primaryKey(type, options, identity) {
|
|
2575
|
+
if (!type || t.schema.isInteger(type)) return pgAttr(pgAttr(pgAttr(t.integer(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2576
|
+
if (t.schema.isString(type) && type.format === "uuid") return pgAttr(pgAttr(t.uuid(), PG_PRIMARY_KEY), PG_DEFAULT);
|
|
2577
|
+
if (t.schema.isNumber(type) && type.format === "int64") return pgAttr(pgAttr(pgAttr(t.number(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2578
|
+
if (t.schema.isBigInt(type)) return pgAttr(pgAttr(pgAttr(t.bigint(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2579
|
+
throw new AlephaError(`Unsupported type for primary key: ${type}`);
|
|
2580
|
+
}
|
|
2581
|
+
/**
|
|
2582
|
+
* Wrap a schema with "default" attribute.
|
|
2583
|
+
* This is used to set a default value for a column in the database.
|
|
2584
|
+
*/
|
|
2585
|
+
default = (type, value) => {
|
|
2586
|
+
if (value != null) Object.assign(type, { default: value });
|
|
2587
|
+
return this.attr(type, PG_DEFAULT);
|
|
2588
|
+
};
|
|
2589
|
+
/**
|
|
2590
|
+
* Creates a column 'version'.
|
|
2591
|
+
*
|
|
2592
|
+
* This is used to track the version of a row in the database.
|
|
2593
|
+
*
|
|
2594
|
+
* You can use it for optimistic concurrency control (OCC) with {@link RepositoryPrimitive#save}.
|
|
2595
|
+
*
|
|
2596
|
+
* @see {@link RepositoryPrimitive#save}
|
|
2597
|
+
* @see {@link PgVersionMismatchError}
|
|
2598
|
+
*/
|
|
2599
|
+
version = (options = {}) => this.default(pgAttr(t.integer(options), PG_VERSION), 0);
|
|
2600
|
+
/**
|
|
2601
|
+
* Creates a column Created At. So just a datetime column with a default value of the current timestamp.
|
|
2602
|
+
*/
|
|
2603
|
+
createdAt = (options) => pgAttr(pgAttr(t.datetime(options), PG_CREATED_AT), PG_DEFAULT);
|
|
2604
|
+
/**
|
|
2605
|
+
* Creates a column Updated At. Like createdAt, but it is updated on every update of the row.
|
|
2606
|
+
*/
|
|
2607
|
+
updatedAt = (options) => pgAttr(pgAttr(t.datetime(options), PG_UPDATED_AT), PG_DEFAULT);
|
|
2608
|
+
/**
|
|
2609
|
+
* Creates a column Deleted At for soft delete functionality.
|
|
2610
|
+
* This is used to mark rows as deleted without actually removing them from the database.
|
|
2611
|
+
* The column is nullable - NULL means not deleted, timestamp means deleted.
|
|
2612
|
+
*/
|
|
2613
|
+
deletedAt = (options) => pgAttr(t.optional(t.datetime(options)), PG_DELETED_AT);
|
|
2614
|
+
/**
|
|
2615
|
+
* Creates a Postgres ENUM type.
|
|
2616
|
+
*
|
|
2617
|
+
* > By default, `t.enum()` is mapped to a TEXT column in Postgres.
|
|
2618
|
+
* > Using this method, you can create a real ENUM type in the database.
|
|
2619
|
+
*
|
|
2620
|
+
* @example
|
|
2621
|
+
* ```ts
|
|
2622
|
+
* const statusEnum = pg.enum(["pending", "active", "archived"], { name: "status_enum" });
|
|
2623
|
+
* ```
|
|
2624
|
+
*/
|
|
2625
|
+
enum = (values, pgEnumOptions, typeOptions) => {
|
|
2626
|
+
return pgAttr(t.enum(values, {
|
|
2627
|
+
description: pgEnumOptions?.description,
|
|
2628
|
+
...typeOptions
|
|
2629
|
+
}), PG_ENUM, pgEnumOptions);
|
|
2630
|
+
};
|
|
2631
|
+
/**
|
|
2632
|
+
* Creates a reference to another table or schema. Basically a foreign key.
|
|
2633
|
+
*/
|
|
2634
|
+
ref = (type, ref, actions) => {
|
|
2635
|
+
const finalActions = actions ?? { onDelete: t.schema.isOptional(type) ? "set null" : "cascade" };
|
|
2636
|
+
return this.attr(type, PG_REF, {
|
|
2637
|
+
ref,
|
|
2638
|
+
actions: finalActions
|
|
2639
|
+
});
|
|
2640
|
+
};
|
|
2641
|
+
/**
|
|
2642
|
+
* Creates a page schema for a given object schema.
|
|
2643
|
+
* It's used by {@link Repository#paginate} method.
|
|
2644
|
+
*/
|
|
2645
|
+
page = (resource, options) => {
|
|
2646
|
+
return pageSchema$1(resource, options);
|
|
2647
|
+
};
|
|
2648
|
+
};
|
|
2649
|
+
/**
|
|
2650
|
+
* Wrapper of TypeProvider (`t`) for database types.
|
|
2651
|
+
*
|
|
2652
|
+
* Use `db` for improve TypeBox schema definitions with database-specific attributes.
|
|
2653
|
+
*
|
|
2654
|
+
* @example
|
|
2655
|
+
* ```ts
|
|
2656
|
+
* import { t } from "alepha";
|
|
2657
|
+
* import { db } from "alepha/orm";
|
|
2658
|
+
*
|
|
2659
|
+
* const userSchema = t.object({
|
|
2660
|
+
* id: db.primaryKey(t.uuid()),
|
|
2661
|
+
* email: t.email(),
|
|
2662
|
+
* createdAt: db.createdAt(),
|
|
2663
|
+
* });
|
|
2664
|
+
* ```
|
|
2665
|
+
*/
|
|
2666
|
+
const db = new DatabaseTypeProvider();
|
|
2667
|
+
/**
|
|
2668
|
+
* @deprecated Use `db` instead.
|
|
2669
|
+
*/
|
|
2670
|
+
const pg = db;
|
|
2671
|
+
|
|
2672
|
+
//#endregion
|
|
2673
|
+
//#region ../../src/orm/schemas/legacyIdSchema.ts
|
|
2674
|
+
/**
|
|
2675
|
+
* @deprecated Use `pg.primaryKey()` instead.
|
|
2676
|
+
*/
|
|
2677
|
+
const legacyIdSchema = pgAttr(pgAttr(pgAttr(t.integer(), PG_PRIMARY_KEY), PG_SERIAL), PG_DEFAULT);
|
|
2678
|
+
|
|
2679
|
+
//#endregion
|
|
2680
|
+
//#region ../../src/orm/primitives/$repository.ts
|
|
2681
|
+
/**
|
|
2682
|
+
* Get the repository for the given entity.
|
|
2683
|
+
*/
|
|
2684
|
+
const $repository = (entity) => {
|
|
2685
|
+
const { alepha } = $context();
|
|
2686
|
+
return $inject(alepha.inject(RepositoryProvider).createClassRepository(entity));
|
|
2687
|
+
};
|
|
2688
|
+
|
|
2689
|
+
//#endregion
|
|
2690
|
+
//#region ../../src/orm/primitives/$transaction.ts
|
|
2691
|
+
/**
|
|
2692
|
+
* Creates a transaction primitive for database operations requiring atomicity and consistency.
|
|
2693
|
+
*
|
|
2694
|
+
* This primitive provides a convenient way to wrap database operations in PostgreSQL
|
|
2695
|
+
* transactions, ensuring ACID properties and automatic retry logic for version conflicts.
|
|
2696
|
+
* It integrates seamlessly with the repository pattern and provides built-in handling
|
|
2697
|
+
* for optimistic locking scenarios with automatic retry on version mismatches.
|
|
2698
|
+
*
|
|
2699
|
+
* **Important Notes**:
|
|
2700
|
+
* - All operations within the transaction handler are atomic
|
|
2701
|
+
* - Automatic retry on `PgVersionMismatchError` for optimistic locking
|
|
2702
|
+
* - Pass `{ tx }` option to all repository operations within the transaction
|
|
2703
|
+
* - Transactions are automatically rolled back on any unhandled error
|
|
2704
|
+
* - Use appropriate isolation levels based on your consistency requirements
|
|
2705
|
+
*/
|
|
2706
|
+
const $transaction = (opts) => {
|
|
2707
|
+
const { alepha } = $context();
|
|
2708
|
+
const provider = alepha.inject(DatabaseProvider);
|
|
2709
|
+
return $retry({
|
|
2710
|
+
when: (err) => err instanceof DbVersionMismatchError,
|
|
2711
|
+
handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
|
|
2712
|
+
});
|
|
2713
|
+
};
|
|
2714
|
+
|
|
2715
|
+
//#endregion
|
|
2716
|
+
//#region ../../src/orm/index.bun.ts
|
|
2717
|
+
var index_bun_exports = /* @__PURE__ */ __exportAll({
|
|
2718
|
+
$entity: () => $entity,
|
|
2719
|
+
$repository: () => $repository,
|
|
2720
|
+
$sequence: () => $sequence,
|
|
2721
|
+
$transaction: () => $transaction,
|
|
2722
|
+
AlephaPostgres: () => AlephaPostgres,
|
|
2723
|
+
BunPostgresProvider: () => BunPostgresProvider,
|
|
2724
|
+
BunSqliteProvider: () => BunSqliteProvider,
|
|
2725
|
+
CloudflareD1Provider: () => CloudflareD1Provider,
|
|
2726
|
+
DatabaseProvider: () => DatabaseProvider,
|
|
2727
|
+
DatabaseTypeProvider: () => DatabaseTypeProvider,
|
|
2728
|
+
DbConflictError: () => DbConflictError,
|
|
2729
|
+
DbEntityNotFoundError: () => DbEntityNotFoundError,
|
|
2730
|
+
DbError: () => DbError,
|
|
2731
|
+
DbMigrationError: () => DbMigrationError,
|
|
2732
|
+
DbVersionMismatchError: () => DbVersionMismatchError,
|
|
2733
|
+
DrizzleKitProvider: () => DrizzleKitProvider,
|
|
2734
|
+
EntityPrimitive: () => EntityPrimitive,
|
|
2735
|
+
PG_CREATED_AT: () => PG_CREATED_AT,
|
|
2736
|
+
PG_DEFAULT: () => PG_DEFAULT,
|
|
2737
|
+
PG_DELETED_AT: () => PG_DELETED_AT,
|
|
2738
|
+
PG_ENUM: () => PG_ENUM,
|
|
2739
|
+
PG_IDENTITY: () => PG_IDENTITY,
|
|
2740
|
+
PG_PRIMARY_KEY: () => PG_PRIMARY_KEY,
|
|
2741
|
+
PG_REF: () => PG_REF,
|
|
2742
|
+
PG_SERIAL: () => PG_SERIAL,
|
|
2743
|
+
PG_UPDATED_AT: () => PG_UPDATED_AT,
|
|
2744
|
+
PG_VERSION: () => PG_VERSION,
|
|
2745
|
+
Repository: () => Repository,
|
|
2746
|
+
RepositoryProvider: () => RepositoryProvider,
|
|
2747
|
+
SequencePrimitive: () => SequencePrimitive,
|
|
2748
|
+
buildQueryString: () => buildQueryString,
|
|
2749
|
+
bunSqliteOptions: () => bunSqliteOptions,
|
|
2750
|
+
db: () => db,
|
|
2751
|
+
drizzle: () => drizzle,
|
|
2752
|
+
getAttrFields: () => getAttrFields,
|
|
2753
|
+
insertSchema: () => insertSchema,
|
|
2754
|
+
legacyIdSchema: () => legacyIdSchema,
|
|
2755
|
+
pageQuerySchema: () => pageQuerySchema,
|
|
2756
|
+
pageSchema: () => pageSchema,
|
|
2757
|
+
parseQueryString: () => parseQueryString,
|
|
2758
|
+
pg: () => pg,
|
|
2759
|
+
pgAttr: () => pgAttr,
|
|
2760
|
+
schema: () => schema,
|
|
2761
|
+
sql: () => sql,
|
|
2762
|
+
updateSchema: () => updateSchema
|
|
2763
|
+
});
|
|
2764
|
+
const AlephaPostgres = $module({
|
|
2765
|
+
name: "alepha.postgres",
|
|
2766
|
+
primitives: [$sequence, $entity],
|
|
2767
|
+
services: [
|
|
2768
|
+
AlephaDateTime,
|
|
2769
|
+
DatabaseProvider,
|
|
2770
|
+
BunPostgresProvider,
|
|
2771
|
+
BunSqliteProvider,
|
|
2772
|
+
PglitePostgresProvider,
|
|
2773
|
+
CloudflareD1Provider,
|
|
2774
|
+
SqliteModelBuilder,
|
|
2775
|
+
PostgresModelBuilder,
|
|
2776
|
+
DrizzleKitProvider,
|
|
2777
|
+
RepositoryProvider,
|
|
2778
|
+
Repository,
|
|
2779
|
+
PgRelationManager,
|
|
2780
|
+
QueryManager
|
|
2781
|
+
],
|
|
2782
|
+
register: (alepha) => {
|
|
2783
|
+
const env = alepha.parseEnv(t.object({ DATABASE_URL: t.optional(t.text()) }));
|
|
2784
|
+
alepha.with(DrizzleKitProvider);
|
|
2785
|
+
alepha.with(RepositoryProvider);
|
|
2786
|
+
const url = env.DATABASE_URL;
|
|
2787
|
+
const isPostgres = url?.startsWith("postgres:");
|
|
2788
|
+
if (url?.startsWith("cloudflare-d1:")) {
|
|
2789
|
+
alepha.with({
|
|
2790
|
+
optional: true,
|
|
2791
|
+
provide: DatabaseProvider,
|
|
2792
|
+
use: CloudflareD1Provider
|
|
2793
|
+
});
|
|
2794
|
+
return;
|
|
2795
|
+
}
|
|
2796
|
+
if (isPostgres) {
|
|
2797
|
+
alepha.with({
|
|
2798
|
+
optional: true,
|
|
2799
|
+
provide: DatabaseProvider,
|
|
2800
|
+
use: BunPostgresProvider
|
|
2801
|
+
});
|
|
2802
|
+
return;
|
|
2803
|
+
}
|
|
2804
|
+
alepha.with({
|
|
2805
|
+
optional: true,
|
|
2806
|
+
provide: DatabaseProvider,
|
|
2807
|
+
use: BunSqliteProvider
|
|
2808
|
+
});
|
|
2809
|
+
}
|
|
2810
|
+
});
|
|
2811
|
+
|
|
2812
|
+
//#endregion
|
|
2813
|
+
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, BunPostgresProvider, BunSqliteProvider, CloudflareD1Provider, DatabaseProvider, DatabaseTypeProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, bunSqliteOptions, db, drizzle, getAttrFields, insertSchema, legacyIdSchema, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
|
|
2814
|
+
//# sourceMappingURL=index.bun.js.map
|