alepha 0.12.1 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api-notifications/index.d.ts +111 -111
- package/dist/api-users/index.d.ts +1240 -1240
- package/dist/api-verifications/index.d.ts +94 -94
- package/dist/cli/{dist-Sz2EXvQX.cjs → dist-Dl9Vl7Ur.js} +17 -13
- package/dist/cli/{dist-BBPjuQ56.js.map → dist-Dl9Vl7Ur.js.map} +1 -1
- package/dist/cli/index.d.ts +3 -11
- package/dist/cli/index.js +106 -74
- package/dist/cli/index.js.map +1 -1
- package/dist/email/index.js +71 -73
- package/dist/email/index.js.map +1 -1
- package/dist/orm/index.d.ts +1 -1
- package/dist/orm/index.js.map +1 -1
- package/dist/queue/index.d.ts +4 -4
- package/dist/redis/index.d.ts +10 -10
- package/dist/retry/index.d.ts +1 -1
- package/dist/retry/index.js +2 -2
- package/dist/retry/index.js.map +1 -1
- package/dist/scheduler/index.d.ts +6 -6
- package/dist/server/index.js +1 -1
- package/dist/server/index.js.map +1 -1
- package/dist/server-auth/index.d.ts +193 -193
- package/dist/server-health/index.d.ts +17 -17
- package/dist/server-links/index.d.ts +34 -34
- package/dist/server-metrics/index.js +170 -174
- package/dist/server-metrics/index.js.map +1 -1
- package/dist/server-security/index.d.ts +9 -9
- package/dist/vite/index.js +4 -5
- package/dist/vite/index.js.map +1 -1
- package/dist/websocket/index.d.ts +7 -7
- package/package.json +52 -103
- package/src/cli/apps/AlephaPackageBuilderCli.ts +7 -2
- package/src/cli/assets/appRouterTs.ts +9 -0
- package/src/cli/assets/indexHtml.ts +2 -1
- package/src/cli/assets/mainBrowserTs.ts +10 -0
- package/src/cli/commands/CoreCommands.ts +6 -5
- package/src/cli/commands/DrizzleCommands.ts +65 -57
- package/src/cli/commands/VerifyCommands.ts +1 -1
- package/src/cli/services/ProjectUtils.ts +44 -38
- package/src/orm/providers/DrizzleKitProvider.ts +1 -1
- package/src/retry/descriptors/$retry.ts +5 -3
- package/src/server/providers/NodeHttpServerProvider.ts +1 -1
- package/src/vite/helpers/boot.ts +3 -3
- package/dist/api-files/index.cjs +0 -1293
- package/dist/api-files/index.cjs.map +0 -1
- package/dist/api-files/index.d.cts +0 -829
- package/dist/api-jobs/index.cjs +0 -274
- package/dist/api-jobs/index.cjs.map +0 -1
- package/dist/api-jobs/index.d.cts +0 -654
- package/dist/api-notifications/index.cjs +0 -380
- package/dist/api-notifications/index.cjs.map +0 -1
- package/dist/api-notifications/index.d.cts +0 -289
- package/dist/api-parameters/index.cjs +0 -66
- package/dist/api-parameters/index.cjs.map +0 -1
- package/dist/api-parameters/index.d.cts +0 -84
- package/dist/api-users/index.cjs +0 -6009
- package/dist/api-users/index.cjs.map +0 -1
- package/dist/api-users/index.d.cts +0 -4740
- package/dist/api-verifications/index.cjs +0 -407
- package/dist/api-verifications/index.cjs.map +0 -1
- package/dist/api-verifications/index.d.cts +0 -207
- package/dist/batch/index.cjs +0 -408
- package/dist/batch/index.cjs.map +0 -1
- package/dist/batch/index.d.cts +0 -330
- package/dist/bin/index.cjs +0 -17
- package/dist/bin/index.cjs.map +0 -1
- package/dist/bin/index.d.cts +0 -1
- package/dist/bucket/index.cjs +0 -303
- package/dist/bucket/index.cjs.map +0 -1
- package/dist/bucket/index.d.cts +0 -355
- package/dist/cache/index.cjs +0 -241
- package/dist/cache/index.cjs.map +0 -1
- package/dist/cache/index.d.cts +0 -202
- package/dist/cache-redis/index.cjs +0 -84
- package/dist/cache-redis/index.cjs.map +0 -1
- package/dist/cache-redis/index.d.cts +0 -40
- package/dist/cli/chunk-DSlc6foC.cjs +0 -43
- package/dist/cli/dist-BBPjuQ56.js +0 -2778
- package/dist/cli/dist-Sz2EXvQX.cjs.map +0 -1
- package/dist/cli/index.cjs +0 -1241
- package/dist/cli/index.cjs.map +0 -1
- package/dist/cli/index.d.cts +0 -422
- package/dist/command/index.cjs +0 -693
- package/dist/command/index.cjs.map +0 -1
- package/dist/command/index.d.cts +0 -340
- package/dist/core/index.cjs +0 -2264
- package/dist/core/index.cjs.map +0 -1
- package/dist/core/index.d.cts +0 -1927
- package/dist/datetime/index.cjs +0 -318
- package/dist/datetime/index.cjs.map +0 -1
- package/dist/datetime/index.d.cts +0 -145
- package/dist/email/index.cjs +0 -10874
- package/dist/email/index.cjs.map +0 -1
- package/dist/email/index.d.cts +0 -186
- package/dist/fake/index.cjs +0 -34641
- package/dist/fake/index.cjs.map +0 -1
- package/dist/fake/index.d.cts +0 -74
- package/dist/file/index.cjs +0 -1212
- package/dist/file/index.cjs.map +0 -1
- package/dist/file/index.d.cts +0 -698
- package/dist/lock/index.cjs +0 -226
- package/dist/lock/index.cjs.map +0 -1
- package/dist/lock/index.d.cts +0 -361
- package/dist/lock-redis/index.cjs +0 -113
- package/dist/lock-redis/index.cjs.map +0 -1
- package/dist/lock-redis/index.d.cts +0 -24
- package/dist/logger/index.cjs +0 -521
- package/dist/logger/index.cjs.map +0 -1
- package/dist/logger/index.d.cts +0 -281
- package/dist/orm/index.cjs +0 -2986
- package/dist/orm/index.cjs.map +0 -1
- package/dist/orm/index.d.cts +0 -2213
- package/dist/queue/index.cjs +0 -1044
- package/dist/queue/index.cjs.map +0 -1
- package/dist/queue/index.d.cts +0 -1265
- package/dist/queue-redis/index.cjs +0 -873
- package/dist/queue-redis/index.cjs.map +0 -1
- package/dist/queue-redis/index.d.cts +0 -82
- package/dist/redis/index.cjs +0 -153
- package/dist/redis/index.cjs.map +0 -1
- package/dist/redis/index.d.cts +0 -82
- package/dist/retry/index.cjs +0 -146
- package/dist/retry/index.cjs.map +0 -1
- package/dist/retry/index.d.cts +0 -172
- package/dist/router/index.cjs +0 -111
- package/dist/router/index.cjs.map +0 -1
- package/dist/router/index.d.cts +0 -46
- package/dist/scheduler/index.cjs +0 -576
- package/dist/scheduler/index.cjs.map +0 -1
- package/dist/scheduler/index.d.cts +0 -145
- package/dist/security/index.cjs +0 -2402
- package/dist/security/index.cjs.map +0 -1
- package/dist/security/index.d.cts +0 -598
- package/dist/server/index.cjs +0 -1680
- package/dist/server/index.cjs.map +0 -1
- package/dist/server/index.d.cts +0 -810
- package/dist/server-auth/index.cjs +0 -3146
- package/dist/server-auth/index.cjs.map +0 -1
- package/dist/server-auth/index.d.cts +0 -1164
- package/dist/server-cache/index.cjs +0 -252
- package/dist/server-cache/index.cjs.map +0 -1
- package/dist/server-cache/index.d.cts +0 -164
- package/dist/server-compress/index.cjs +0 -141
- package/dist/server-compress/index.cjs.map +0 -1
- package/dist/server-compress/index.d.cts +0 -38
- package/dist/server-cookies/index.cjs +0 -234
- package/dist/server-cookies/index.cjs.map +0 -1
- package/dist/server-cookies/index.d.cts +0 -144
- package/dist/server-cors/index.cjs +0 -201
- package/dist/server-cors/index.cjs.map +0 -1
- package/dist/server-cors/index.d.cts +0 -140
- package/dist/server-health/index.cjs +0 -62
- package/dist/server-health/index.cjs.map +0 -1
- package/dist/server-health/index.d.cts +0 -58
- package/dist/server-helmet/index.cjs +0 -131
- package/dist/server-helmet/index.cjs.map +0 -1
- package/dist/server-helmet/index.d.cts +0 -97
- package/dist/server-links/index.cjs +0 -992
- package/dist/server-links/index.cjs.map +0 -1
- package/dist/server-links/index.d.cts +0 -513
- package/dist/server-metrics/index.cjs +0 -4535
- package/dist/server-metrics/index.cjs.map +0 -1
- package/dist/server-metrics/index.d.cts +0 -35
- package/dist/server-multipart/index.cjs +0 -237
- package/dist/server-multipart/index.cjs.map +0 -1
- package/dist/server-multipart/index.d.cts +0 -50
- package/dist/server-proxy/index.cjs +0 -186
- package/dist/server-proxy/index.cjs.map +0 -1
- package/dist/server-proxy/index.d.cts +0 -234
- package/dist/server-rate-limit/index.cjs +0 -241
- package/dist/server-rate-limit/index.cjs.map +0 -1
- package/dist/server-rate-limit/index.d.cts +0 -183
- package/dist/server-security/index.cjs +0 -316
- package/dist/server-security/index.cjs.map +0 -1
- package/dist/server-security/index.d.cts +0 -173
- package/dist/server-static/index.cjs +0 -170
- package/dist/server-static/index.cjs.map +0 -1
- package/dist/server-static/index.d.cts +0 -121
- package/dist/server-swagger/index.cjs +0 -1021
- package/dist/server-swagger/index.cjs.map +0 -1
- package/dist/server-swagger/index.d.cts +0 -382
- package/dist/sms/index.cjs +0 -221
- package/dist/sms/index.cjs.map +0 -1
- package/dist/sms/index.d.cts +0 -130
- package/dist/thread/index.cjs +0 -350
- package/dist/thread/index.cjs.map +0 -1
- package/dist/thread/index.d.cts +0 -260
- package/dist/topic/index.cjs +0 -282
- package/dist/topic/index.cjs.map +0 -1
- package/dist/topic/index.d.cts +0 -523
- package/dist/topic-redis/index.cjs +0 -71
- package/dist/topic-redis/index.cjs.map +0 -1
- package/dist/topic-redis/index.d.cts +0 -42
- package/dist/vite/index.cjs +0 -1077
- package/dist/vite/index.cjs.map +0 -1
- package/dist/vite/index.d.cts +0 -542
- package/dist/websocket/index.cjs +0 -1117
- package/dist/websocket/index.cjs.map +0 -1
- package/dist/websocket/index.d.cts +0 -861
package/dist/orm/index.cjs
DELETED
|
@@ -1,2986 +0,0 @@
|
|
|
1
|
-
//#region rolldown:runtime
|
|
2
|
-
var __create = Object.create;
|
|
3
|
-
var __defProp = Object.defineProperty;
|
|
4
|
-
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
-
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
-
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
-
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
-
var __copyProps = (to, from, except, desc$1) => {
|
|
9
|
-
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
-
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
-
key = keys[i];
|
|
12
|
-
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
13
|
-
__defProp(to, key, {
|
|
14
|
-
get: ((k) => from[k]).bind(null, key),
|
|
15
|
-
enumerable: !(desc$1 = __getOwnPropDesc(from, key)) || desc$1.enumerable
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
return to;
|
|
21
|
-
};
|
|
22
|
-
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
23
|
-
value: mod,
|
|
24
|
-
enumerable: true
|
|
25
|
-
}) : target, mod));
|
|
26
|
-
|
|
27
|
-
//#endregion
|
|
28
|
-
let alepha = require("alepha");
|
|
29
|
-
let alepha_datetime = require("alepha/datetime");
|
|
30
|
-
let drizzle_orm = require("drizzle-orm");
|
|
31
|
-
drizzle_orm = __toESM(drizzle_orm);
|
|
32
|
-
let drizzle_orm_pg_core = require("drizzle-orm/pg-core");
|
|
33
|
-
drizzle_orm_pg_core = __toESM(drizzle_orm_pg_core);
|
|
34
|
-
let node_fs_promises = require("node:fs/promises");
|
|
35
|
-
let alepha_logger = require("alepha/logger");
|
|
36
|
-
let drizzle_orm_sql_sql = require("drizzle-orm/sql/sql");
|
|
37
|
-
let node_module = require("node:module");
|
|
38
|
-
let alepha_lock = require("alepha/lock");
|
|
39
|
-
let drizzle_orm_postgres_js = require("drizzle-orm/postgres-js");
|
|
40
|
-
let drizzle_orm_postgres_js_migrator = require("drizzle-orm/postgres-js/migrator");
|
|
41
|
-
let postgres = require("postgres");
|
|
42
|
-
postgres = __toESM(postgres);
|
|
43
|
-
let drizzle_orm_sqlite_proxy = require("drizzle-orm/sqlite-proxy");
|
|
44
|
-
let drizzle_orm_sqlite_proxy_migrator = require("drizzle-orm/sqlite-proxy/migrator");
|
|
45
|
-
let node_crypto = require("node:crypto");
|
|
46
|
-
let drizzle_orm_sqlite_core = require("drizzle-orm/sqlite-core");
|
|
47
|
-
drizzle_orm_sqlite_core = __toESM(drizzle_orm_sqlite_core);
|
|
48
|
-
let drizzle_orm_pglite_migrator = require("drizzle-orm/pglite/migrator");
|
|
49
|
-
let alepha_retry = require("alepha/retry");
|
|
50
|
-
|
|
51
|
-
//#region src/orm/constants/PG_SYMBOLS.ts
|
|
52
|
-
const PG_DEFAULT = Symbol.for("Alepha.Postgres.Default");
|
|
53
|
-
const PG_PRIMARY_KEY = Symbol.for("Alepha.Postgres.PrimaryKey");
|
|
54
|
-
const PG_CREATED_AT = Symbol.for("Alepha.Postgres.CreatedAt");
|
|
55
|
-
const PG_UPDATED_AT = Symbol.for("Alepha.Postgres.UpdatedAt");
|
|
56
|
-
const PG_DELETED_AT = Symbol.for("Alepha.Postgres.DeletedAt");
|
|
57
|
-
const PG_VERSION = Symbol.for("Alepha.Postgres.Version");
|
|
58
|
-
const PG_IDENTITY = Symbol.for("Alepha.Postgres.Identity");
|
|
59
|
-
const PG_ENUM = Symbol.for("Alepha.Postgres.Enum");
|
|
60
|
-
const PG_REF = Symbol.for("Alepha.Postgres.Ref");
|
|
61
|
-
/**
|
|
62
|
-
* @deprecated Use `PG_IDENTITY` instead.
|
|
63
|
-
*/
|
|
64
|
-
const PG_SERIAL = Symbol.for("Alepha.Postgres.Serial");
|
|
65
|
-
|
|
66
|
-
//#endregion
|
|
67
|
-
//#region src/orm/types/schema.ts
|
|
68
|
-
/**
|
|
69
|
-
* Postgres schema type.
|
|
70
|
-
*/
|
|
71
|
-
const schema = (name, document) => (0, drizzle_orm_pg_core.customType)({
|
|
72
|
-
dataType: () => "jsonb",
|
|
73
|
-
toDriver: (value) => JSON.stringify(value),
|
|
74
|
-
fromDriver: (value) => value && typeof value === "string" ? JSON.parse(value) : value
|
|
75
|
-
})(name, { document }).$type();
|
|
76
|
-
|
|
77
|
-
//#endregion
|
|
78
|
-
//#region src/orm/schemas/insertSchema.ts
|
|
79
|
-
const insertSchema = (obj) => {
|
|
80
|
-
const newProperties = {};
|
|
81
|
-
for (const key in obj.properties) {
|
|
82
|
-
const prop = obj.properties[key];
|
|
83
|
-
if (PG_DEFAULT in prop) newProperties[key] = alepha.t.optional(prop);
|
|
84
|
-
else newProperties[key] = prop;
|
|
85
|
-
}
|
|
86
|
-
return alepha.t.object(newProperties, "options" in schema && typeof schema.options === "object" ? { ...schema.options } : {});
|
|
87
|
-
};
|
|
88
|
-
|
|
89
|
-
//#endregion
|
|
90
|
-
//#region src/orm/schemas/updateSchema.ts
|
|
91
|
-
const updateSchema = (schema$1) => {
|
|
92
|
-
const newProperties = {};
|
|
93
|
-
for (const key in schema$1.properties) {
|
|
94
|
-
const prop = schema$1.properties[key];
|
|
95
|
-
if (alepha.t.schema.isOptional(prop)) newProperties[key] = alepha.t.optional(alepha.t.union([prop, alepha.t.raw.Null()]));
|
|
96
|
-
else newProperties[key] = prop;
|
|
97
|
-
}
|
|
98
|
-
return alepha.t.object(newProperties, "options" in schema$1 && typeof schema$1.options === "object" ? { ...schema$1.options } : {});
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
//#endregion
|
|
102
|
-
//#region src/orm/descriptors/$entity.ts
|
|
103
|
-
/**
|
|
104
|
-
* Creates a database entity descriptor that defines table structure using TypeBox schemas.
|
|
105
|
-
*
|
|
106
|
-
* @example
|
|
107
|
-
* ```ts
|
|
108
|
-
* import { t } from "alepha";
|
|
109
|
-
* import { $entity } from "alepha/orm";
|
|
110
|
-
*
|
|
111
|
-
* const userEntity = $entity({
|
|
112
|
-
* name: "users",
|
|
113
|
-
* schema: t.object({
|
|
114
|
-
* id: pg.primaryKey(),
|
|
115
|
-
* name: t.text(),
|
|
116
|
-
* email: t.email(),
|
|
117
|
-
* }),
|
|
118
|
-
* });
|
|
119
|
-
* ```
|
|
120
|
-
*/
|
|
121
|
-
const $entity = (options) => {
|
|
122
|
-
return new EntityDescriptor(options);
|
|
123
|
-
};
|
|
124
|
-
var EntityDescriptor = class EntityDescriptor {
|
|
125
|
-
options;
|
|
126
|
-
constructor(options) {
|
|
127
|
-
this.options = options;
|
|
128
|
-
}
|
|
129
|
-
alias(alias$1) {
|
|
130
|
-
const aliased = new EntityDescriptor(this.options);
|
|
131
|
-
return new Proxy(aliased, { get(target, prop, receiver) {
|
|
132
|
-
if (prop === "$alias") return alias$1;
|
|
133
|
-
return Reflect.get(target, prop, receiver);
|
|
134
|
-
} });
|
|
135
|
-
}
|
|
136
|
-
get cols() {
|
|
137
|
-
const cols = {};
|
|
138
|
-
for (const key of Object.keys(this.schema.properties)) cols[key] = {
|
|
139
|
-
name: key,
|
|
140
|
-
entity: this
|
|
141
|
-
};
|
|
142
|
-
return cols;
|
|
143
|
-
}
|
|
144
|
-
get name() {
|
|
145
|
-
return this.options.name;
|
|
146
|
-
}
|
|
147
|
-
get schema() {
|
|
148
|
-
return this.options.schema;
|
|
149
|
-
}
|
|
150
|
-
get insertSchema() {
|
|
151
|
-
return insertSchema(this.options.schema);
|
|
152
|
-
}
|
|
153
|
-
get updateSchema() {
|
|
154
|
-
return updateSchema(this.options.schema);
|
|
155
|
-
}
|
|
156
|
-
};
|
|
157
|
-
$entity[alepha.KIND] = EntityDescriptor;
|
|
158
|
-
|
|
159
|
-
//#endregion
|
|
160
|
-
//#region src/orm/errors/DbError.ts
|
|
161
|
-
var DbError = class extends alepha.AlephaError {
|
|
162
|
-
name = "DbError";
|
|
163
|
-
constructor(message, cause) {
|
|
164
|
-
super(message, { cause });
|
|
165
|
-
}
|
|
166
|
-
};
|
|
167
|
-
|
|
168
|
-
//#endregion
|
|
169
|
-
//#region src/orm/errors/DbConflictError.ts
|
|
170
|
-
var DbConflictError = class extends DbError {
|
|
171
|
-
name = "DbConflictError";
|
|
172
|
-
status = 409;
|
|
173
|
-
};
|
|
174
|
-
|
|
175
|
-
//#endregion
|
|
176
|
-
//#region src/orm/errors/DbEntityNotFoundError.ts
|
|
177
|
-
var DbEntityNotFoundError = class extends DbError {
|
|
178
|
-
name = "DbEntityNotFoundError";
|
|
179
|
-
status = 404;
|
|
180
|
-
constructor(entityName) {
|
|
181
|
-
super(`Entity from '${entityName}' was not found`);
|
|
182
|
-
}
|
|
183
|
-
};
|
|
184
|
-
|
|
185
|
-
//#endregion
|
|
186
|
-
//#region src/orm/errors/DbVersionMismatchError.ts
|
|
187
|
-
/**
|
|
188
|
-
* Error thrown when there is a version mismatch.
|
|
189
|
-
* It's thrown by {@link Repository#save} when the updated entity version does not match the one in the database.
|
|
190
|
-
* This is used for optimistic concurrency control.
|
|
191
|
-
*/
|
|
192
|
-
var DbVersionMismatchError = class extends DbError {
|
|
193
|
-
name = "DbVersionMismatchError";
|
|
194
|
-
constructor(table, id) {
|
|
195
|
-
super(`Version mismatch for table '${table}' and id '${id}'`);
|
|
196
|
-
}
|
|
197
|
-
};
|
|
198
|
-
|
|
199
|
-
//#endregion
|
|
200
|
-
//#region src/orm/helpers/pgAttr.ts
|
|
201
|
-
/**
|
|
202
|
-
* Decorates a typebox schema with a Postgres attribute.
|
|
203
|
-
*
|
|
204
|
-
* > It's just a fancy way to add Symbols to a field.
|
|
205
|
-
*
|
|
206
|
-
* @example
|
|
207
|
-
* ```ts
|
|
208
|
-
* import { t } from "alepha";
|
|
209
|
-
* import { PG_UPDATED_AT } from "../constants/PG_SYMBOLS";
|
|
210
|
-
*
|
|
211
|
-
* export const updatedAtSchema = pgAttr(
|
|
212
|
-
* t.datetime(), PG_UPDATED_AT,
|
|
213
|
-
* );
|
|
214
|
-
* ```
|
|
215
|
-
*/
|
|
216
|
-
const pgAttr = (type, attr, value) => {
|
|
217
|
-
Object.assign(type, { [attr]: value ?? {} });
|
|
218
|
-
return type;
|
|
219
|
-
};
|
|
220
|
-
/**
|
|
221
|
-
* Retrieves the fields of a schema that have a specific attribute.
|
|
222
|
-
*/
|
|
223
|
-
const getAttrFields = (schema$1, name) => {
|
|
224
|
-
const fields = [];
|
|
225
|
-
for (const key of Object.keys(schema$1.properties)) {
|
|
226
|
-
const value = schema$1.properties[key];
|
|
227
|
-
if (name in value) fields.push({
|
|
228
|
-
type: value,
|
|
229
|
-
key,
|
|
230
|
-
data: value[name]
|
|
231
|
-
});
|
|
232
|
-
}
|
|
233
|
-
return fields;
|
|
234
|
-
};
|
|
235
|
-
|
|
236
|
-
//#endregion
|
|
237
|
-
//#region src/orm/providers/drivers/DatabaseProvider.ts
|
|
238
|
-
var DatabaseProvider = class {
|
|
239
|
-
alepha = (0, alepha.$inject)(alepha.Alepha);
|
|
240
|
-
log = (0, alepha_logger.$logger)();
|
|
241
|
-
enums = /* @__PURE__ */ new Map();
|
|
242
|
-
tables = /* @__PURE__ */ new Map();
|
|
243
|
-
sequences = /* @__PURE__ */ new Map();
|
|
244
|
-
get name() {
|
|
245
|
-
return "default";
|
|
246
|
-
}
|
|
247
|
-
get schema() {
|
|
248
|
-
return "public";
|
|
249
|
-
}
|
|
250
|
-
table(entity) {
|
|
251
|
-
const table = this.tables.get(entity.name);
|
|
252
|
-
if (!table) throw new alepha.AlephaError(`Table '${entity.name}' is not registered`);
|
|
253
|
-
const hasAlias = entity.$alias;
|
|
254
|
-
if (hasAlias) return (0, drizzle_orm_pg_core.alias)(table, hasAlias);
|
|
255
|
-
return table;
|
|
256
|
-
}
|
|
257
|
-
registerEntity(entity) {
|
|
258
|
-
this.builder.buildTable(entity, this);
|
|
259
|
-
}
|
|
260
|
-
registerSequence(sequence) {
|
|
261
|
-
this.builder.buildSequence(sequence, this);
|
|
262
|
-
}
|
|
263
|
-
async run(statement, schema$1) {
|
|
264
|
-
return (await this.execute(statement)).map((row) => this.alepha.codec.decode(schema$1, row));
|
|
265
|
-
}
|
|
266
|
-
/**
|
|
267
|
-
* Get migrations folder path - can be overridden
|
|
268
|
-
*/
|
|
269
|
-
getMigrationsFolder() {
|
|
270
|
-
return `migrations/${this.name}`;
|
|
271
|
-
}
|
|
272
|
-
/**
|
|
273
|
-
* Base migration orchestration - handles environment logic
|
|
274
|
-
*/
|
|
275
|
-
async migrateDatabase() {
|
|
276
|
-
const migrationsFolder = this.getMigrationsFolder();
|
|
277
|
-
if (this.alepha.isProduction()) await this.runProductionMigration(migrationsFolder);
|
|
278
|
-
else if (this.alepha.isTest()) await this.runTestMigration();
|
|
279
|
-
else await this.runDevelopmentMigration(migrationsFolder);
|
|
280
|
-
}
|
|
281
|
-
/**
|
|
282
|
-
* Production: run migrations from folder
|
|
283
|
-
*/
|
|
284
|
-
async runProductionMigration(migrationsFolder) {
|
|
285
|
-
if (!await (0, node_fs_promises.stat)(migrationsFolder).catch(() => false)) {
|
|
286
|
-
this.log.warn("Migration SKIPPED - no migrations found");
|
|
287
|
-
return;
|
|
288
|
-
}
|
|
289
|
-
this.log.debug(`Migrate from '${migrationsFolder}' directory ...`);
|
|
290
|
-
await this.executeMigrations(migrationsFolder);
|
|
291
|
-
this.log.info("Migration OK");
|
|
292
|
-
}
|
|
293
|
-
/**
|
|
294
|
-
* Test: always synchronize
|
|
295
|
-
*/
|
|
296
|
-
async runTestMigration() {
|
|
297
|
-
await this.synchronizeSchema();
|
|
298
|
-
}
|
|
299
|
-
/**
|
|
300
|
-
* Development: default to synchronize (can be overridden)
|
|
301
|
-
*/
|
|
302
|
-
async runDevelopmentMigration(migrationsFolder) {
|
|
303
|
-
try {
|
|
304
|
-
if (!this.url.includes(":memory:")) await this.executeMigrations(migrationsFolder);
|
|
305
|
-
} catch {}
|
|
306
|
-
await this.synchronizeSchema();
|
|
307
|
-
}
|
|
308
|
-
/**
|
|
309
|
-
* Common synchronization with error handling
|
|
310
|
-
*/
|
|
311
|
-
async synchronizeSchema() {
|
|
312
|
-
try {
|
|
313
|
-
await this.kit.synchronize(this);
|
|
314
|
-
} catch (error) {
|
|
315
|
-
throw new DbError(`Failed to synchronize ${this.dialect} database schema`, error);
|
|
316
|
-
}
|
|
317
|
-
}
|
|
318
|
-
};
|
|
319
|
-
|
|
320
|
-
//#endregion
|
|
321
|
-
//#region src/orm/services/PgRelationManager.ts
|
|
322
|
-
var PgRelationManager = class {
|
|
323
|
-
/**
|
|
324
|
-
* Recursively build joins for the query builder based on the relations map
|
|
325
|
-
*/
|
|
326
|
-
buildJoins(provider, builder, joins, withRelations, table, parentKey) {
|
|
327
|
-
for (const [key, join] of Object.entries(withRelations)) {
|
|
328
|
-
const from = provider.table(join.join);
|
|
329
|
-
const on = (0, drizzle_orm_sql_sql.isSQLWrapper)(join.on) ? join.on : drizzle_orm.sql`${table[join.on[0]]} = ${from[join.on[1].name]}`;
|
|
330
|
-
if (join.type === "right") builder.rightJoin(from, on);
|
|
331
|
-
else if (join.type === "inner") builder.innerJoin(from, on);
|
|
332
|
-
else builder.leftJoin(from, on);
|
|
333
|
-
joins.push({
|
|
334
|
-
key,
|
|
335
|
-
table: (0, drizzle_orm.getTableName)(from),
|
|
336
|
-
schema: join.join.schema,
|
|
337
|
-
col: (name) => from[name],
|
|
338
|
-
parent: parentKey
|
|
339
|
-
});
|
|
340
|
-
if (join.with) this.buildJoins(provider, builder, joins, join.with, from, parentKey ? `${parentKey}.${key}` : key);
|
|
341
|
-
}
|
|
342
|
-
}
|
|
343
|
-
/**
|
|
344
|
-
* Map a row with its joined relations based on the joins definition
|
|
345
|
-
*/
|
|
346
|
-
mapRowWithJoins(record, row, schema$1, joins, parentKey) {
|
|
347
|
-
for (const join of joins) if (join.parent === parentKey) {
|
|
348
|
-
const joinedData = row[join.table];
|
|
349
|
-
if (this.isAllNull(joinedData)) record[join.key] = void 0;
|
|
350
|
-
else {
|
|
351
|
-
record[join.key] = joinedData;
|
|
352
|
-
this.mapRowWithJoins(record[join.key], row, schema$1, joins, parentKey ? `${parentKey}.${join.key}` : join.key);
|
|
353
|
-
}
|
|
354
|
-
}
|
|
355
|
-
return record;
|
|
356
|
-
}
|
|
357
|
-
/**
|
|
358
|
-
* Check if all values in an object are null (indicates a left join with no match)
|
|
359
|
-
*/
|
|
360
|
-
isAllNull(obj) {
|
|
361
|
-
if (obj === null || obj === void 0) return true;
|
|
362
|
-
if (typeof obj !== "object") return false;
|
|
363
|
-
return Object.values(obj).every((val) => val === null);
|
|
364
|
-
}
|
|
365
|
-
/**
|
|
366
|
-
* Build a schema that includes all join properties recursively
|
|
367
|
-
*/
|
|
368
|
-
buildSchemaWithJoins(baseSchema, joins, parentPath) {
|
|
369
|
-
const schema$1 = alepha.TypeBoxValue.Clone(baseSchema);
|
|
370
|
-
const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
|
|
371
|
-
for (const join of joinsAtThisLevel) {
|
|
372
|
-
const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
|
|
373
|
-
const childJoins = joins.filter((j) => j.parent === joinPath);
|
|
374
|
-
let joinSchema = join.schema;
|
|
375
|
-
if (childJoins.length > 0) joinSchema = this.buildSchemaWithJoins(join.schema, joins, joinPath);
|
|
376
|
-
schema$1.properties[join.key] = alepha.t.optional(joinSchema);
|
|
377
|
-
}
|
|
378
|
-
return schema$1;
|
|
379
|
-
}
|
|
380
|
-
};
|
|
381
|
-
|
|
382
|
-
//#endregion
|
|
383
|
-
//#region src/orm/services/PgJsonQueryManager.ts
|
|
384
|
-
/**
|
|
385
|
-
* Manages JSONB query generation for nested object and array queries in PostgreSQL.
|
|
386
|
-
* This class handles complex nested queries using PostgreSQL's JSONB operators.
|
|
387
|
-
*/
|
|
388
|
-
var PgJsonQueryManager = class {
|
|
389
|
-
/**
|
|
390
|
-
* Check if a query contains nested JSONB queries.
|
|
391
|
-
* A nested query is when the value is an object with operator keys.
|
|
392
|
-
*/
|
|
393
|
-
hasNestedQuery(where) {
|
|
394
|
-
for (const [key, value] of Object.entries(where)) {
|
|
395
|
-
if (key === "and" || key === "or" || key === "not") continue;
|
|
396
|
-
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
397
|
-
const keys = Object.keys(value);
|
|
398
|
-
if (!keys.some((k) => [
|
|
399
|
-
"eq",
|
|
400
|
-
"ne",
|
|
401
|
-
"gt",
|
|
402
|
-
"gte",
|
|
403
|
-
"lt",
|
|
404
|
-
"lte",
|
|
405
|
-
"like",
|
|
406
|
-
"ilike",
|
|
407
|
-
"isNull",
|
|
408
|
-
"isNotNull",
|
|
409
|
-
"inArray",
|
|
410
|
-
"notInArray"
|
|
411
|
-
].includes(k)) && keys.length > 0) return true;
|
|
412
|
-
}
|
|
413
|
-
}
|
|
414
|
-
return false;
|
|
415
|
-
}
|
|
416
|
-
/**
|
|
417
|
-
* Build a JSONB query condition for nested object queries.
|
|
418
|
-
* Supports deep nesting like: { profile: { contact: { email: { eq: "test@example.com" } } } }
|
|
419
|
-
*
|
|
420
|
-
* @param column The JSONB column
|
|
421
|
-
* @param path The path to the nested property (e.g., ['profile', 'contact', 'email'])
|
|
422
|
-
* @param operator The filter operator (e.g., { eq: "test@example.com" })
|
|
423
|
-
* @param dialect Database dialect (postgresql or sqlite)
|
|
424
|
-
* @param columnSchema Optional schema of the JSON column for type inference
|
|
425
|
-
* @returns SQL condition
|
|
426
|
-
*/
|
|
427
|
-
buildJsonbCondition(column, path, operator, dialect, columnSchema) {
|
|
428
|
-
if (path.length === 0) return;
|
|
429
|
-
const isArrayOperator = operator.arrayContains !== void 0 || operator.arrayContained !== void 0 || operator.arrayOverlaps !== void 0;
|
|
430
|
-
let jsonValue;
|
|
431
|
-
if (dialect === "sqlite") jsonValue = drizzle_orm.sql`json_extract(${column}, ${`$.${path.join(".")}`})`;
|
|
432
|
-
else {
|
|
433
|
-
let jsonPath = drizzle_orm.sql`${column}`;
|
|
434
|
-
for (let i = 0; i < path.length - 1; i++) jsonPath = drizzle_orm.sql`${jsonPath}->${path[i]}`;
|
|
435
|
-
const lastPath = path[path.length - 1];
|
|
436
|
-
if (isArrayOperator) jsonValue = drizzle_orm.sql`${jsonPath}->${lastPath}`;
|
|
437
|
-
else jsonValue = drizzle_orm.sql`${jsonPath}->>${lastPath}`;
|
|
438
|
-
}
|
|
439
|
-
const fieldType = columnSchema ? this.getFieldType(columnSchema, path) : void 0;
|
|
440
|
-
return this.applyOperatorToJsonValue(jsonValue, operator, dialect, fieldType);
|
|
441
|
-
}
|
|
442
|
-
/**
|
|
443
|
-
* Build JSONB array query conditions.
|
|
444
|
-
* Supports queries like: { addresses: { city: { eq: "Wonderland" } } }
|
|
445
|
-
* which translates to: EXISTS (SELECT 1 FROM jsonb_array_elements(addresses) elem WHERE elem->>'city' = 'Wonderland')
|
|
446
|
-
*
|
|
447
|
-
* @param dialect Database dialect (postgresql or sqlite)
|
|
448
|
-
* Note: SQLite array queries are not yet supported
|
|
449
|
-
*/
|
|
450
|
-
buildJsonbArrayCondition(column, path, arrayPath, operator, dialect) {
|
|
451
|
-
if (dialect === "sqlite") throw new Error("Array queries in JSON columns are not yet supported for SQLite. Please use PostgreSQL for complex JSON array queries, or restructure your data.");
|
|
452
|
-
if (path.length === 0) return;
|
|
453
|
-
let jsonPath = drizzle_orm.sql`${column}`;
|
|
454
|
-
if (arrayPath) jsonPath = drizzle_orm.sql`${jsonPath}->${arrayPath}`;
|
|
455
|
-
const elemCondition = drizzle_orm.sql`elem->>${path[0]}`;
|
|
456
|
-
const condition = this.applyOperatorToJsonValue(elemCondition, operator, dialect);
|
|
457
|
-
if (!condition) return;
|
|
458
|
-
return drizzle_orm.sql`EXISTS (SELECT 1 FROM jsonb_array_elements(${jsonPath}) AS elem WHERE ${condition})`;
|
|
459
|
-
}
|
|
460
|
-
/**
|
|
461
|
-
* Apply a filter operator to a JSONB value.
|
|
462
|
-
* @param dialect Database dialect for appropriate casting syntax
|
|
463
|
-
* @param fieldType Optional field type from schema for smart casting
|
|
464
|
-
*/
|
|
465
|
-
applyOperatorToJsonValue(jsonValue, operator, dialect, fieldType) {
|
|
466
|
-
const castForNumeric = (value) => {
|
|
467
|
-
if (dialect === "sqlite") {
|
|
468
|
-
if (fieldType === "integer" || fieldType === "int") return drizzle_orm.sql`CAST(${value} AS INTEGER)`;
|
|
469
|
-
return drizzle_orm.sql`CAST(${value} AS REAL)`;
|
|
470
|
-
}
|
|
471
|
-
return drizzle_orm.sql`(${value})::numeric`;
|
|
472
|
-
};
|
|
473
|
-
if (typeof operator !== "object") return drizzle_orm.sql`${jsonValue} = ${operator}`;
|
|
474
|
-
const conditions = [];
|
|
475
|
-
if (operator.eq !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} = ${operator.eq}`);
|
|
476
|
-
if (operator.ne !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} != ${operator.ne}`);
|
|
477
|
-
if (operator.gt !== void 0) conditions.push(drizzle_orm.sql`${castForNumeric(jsonValue)} > ${operator.gt}`);
|
|
478
|
-
if (operator.gte !== void 0) conditions.push(drizzle_orm.sql`${castForNumeric(jsonValue)} >= ${operator.gte}`);
|
|
479
|
-
if (operator.lt !== void 0) conditions.push(drizzle_orm.sql`${castForNumeric(jsonValue)} < ${operator.lt}`);
|
|
480
|
-
if (operator.lte !== void 0) conditions.push(drizzle_orm.sql`${castForNumeric(jsonValue)} <= ${operator.lte}`);
|
|
481
|
-
if (operator.like !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} LIKE ${operator.like}`);
|
|
482
|
-
if (operator.ilike !== void 0) if (dialect === "sqlite") conditions.push(drizzle_orm.sql`${jsonValue} LIKE ${operator.ilike}`);
|
|
483
|
-
else conditions.push(drizzle_orm.sql`${jsonValue} ILIKE ${operator.ilike}`);
|
|
484
|
-
if (operator.notLike !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} NOT LIKE ${operator.notLike}`);
|
|
485
|
-
if (operator.notIlike !== void 0) if (dialect === "sqlite") conditions.push(drizzle_orm.sql`${jsonValue} NOT LIKE ${operator.notIlike}`);
|
|
486
|
-
else conditions.push(drizzle_orm.sql`${jsonValue} NOT ILIKE ${operator.notIlike}`);
|
|
487
|
-
if (operator.isNull !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} IS NULL`);
|
|
488
|
-
if (operator.isNotNull !== void 0) conditions.push(drizzle_orm.sql`${jsonValue} IS NOT NULL`);
|
|
489
|
-
if (operator.inArray !== void 0 && Array.isArray(operator.inArray)) conditions.push(drizzle_orm.sql`${jsonValue} IN (${drizzle_orm.sql.join(operator.inArray.map((v) => drizzle_orm.sql`${v}`), drizzle_orm.sql`, `)})`);
|
|
490
|
-
if (operator.notInArray !== void 0 && Array.isArray(operator.notInArray)) conditions.push(drizzle_orm.sql`${jsonValue} NOT IN (${drizzle_orm.sql.join(operator.notInArray.map((v) => drizzle_orm.sql`${v}`), drizzle_orm.sql`, `)})`);
|
|
491
|
-
if (operator.arrayContains !== void 0) {
|
|
492
|
-
if (dialect === "postgresql") {
|
|
493
|
-
const jsonArray = JSON.stringify(Array.isArray(operator.arrayContains) ? operator.arrayContains : [operator.arrayContains]);
|
|
494
|
-
conditions.push(drizzle_orm.sql`${jsonValue} @> ${jsonArray}::jsonb`);
|
|
495
|
-
}
|
|
496
|
-
}
|
|
497
|
-
if (operator.arrayContained !== void 0) {
|
|
498
|
-
if (dialect === "postgresql") {
|
|
499
|
-
const jsonArray = JSON.stringify(Array.isArray(operator.arrayContained) ? operator.arrayContained : [operator.arrayContained]);
|
|
500
|
-
conditions.push(drizzle_orm.sql`${jsonValue} <@ ${jsonArray}::jsonb`);
|
|
501
|
-
}
|
|
502
|
-
}
|
|
503
|
-
if (operator.arrayOverlaps !== void 0) {
|
|
504
|
-
if (dialect === "postgresql") {
|
|
505
|
-
const overlapConditions = (Array.isArray(operator.arrayOverlaps) ? operator.arrayOverlaps : [operator.arrayOverlaps]).map((val) => {
|
|
506
|
-
return drizzle_orm.sql`${jsonValue} @> ${JSON.stringify(val)}::jsonb`;
|
|
507
|
-
});
|
|
508
|
-
if (overlapConditions.length > 0) conditions.push(drizzle_orm.sql`(${drizzle_orm.sql.join(overlapConditions, drizzle_orm.sql` OR `)})`);
|
|
509
|
-
}
|
|
510
|
-
}
|
|
511
|
-
if (conditions.length === 0) return;
|
|
512
|
-
if (conditions.length === 1) return conditions[0];
|
|
513
|
-
return drizzle_orm.sql.join(conditions, drizzle_orm.sql` AND `);
|
|
514
|
-
}
|
|
515
|
-
/**
|
|
516
|
-
* Parse a nested query object and extract the path and operator.
|
|
517
|
-
* For example: { profile: { contact: { email: { eq: "test@example.com" } } } }
|
|
518
|
-
* Returns: { path: ['profile', 'contact', 'email'], operator: { eq: "test@example.com" } }
|
|
519
|
-
*/
|
|
520
|
-
parseNestedQuery(nestedQuery, currentPath = []) {
|
|
521
|
-
const results = [];
|
|
522
|
-
for (const [key, value] of Object.entries(nestedQuery)) if (value && typeof value === "object" && !Array.isArray(value)) if (Object.keys(value).some((k) => [
|
|
523
|
-
"eq",
|
|
524
|
-
"ne",
|
|
525
|
-
"gt",
|
|
526
|
-
"gte",
|
|
527
|
-
"lt",
|
|
528
|
-
"lte",
|
|
529
|
-
"like",
|
|
530
|
-
"ilike",
|
|
531
|
-
"notLike",
|
|
532
|
-
"notIlike",
|
|
533
|
-
"isNull",
|
|
534
|
-
"isNotNull",
|
|
535
|
-
"inArray",
|
|
536
|
-
"notInArray",
|
|
537
|
-
"arrayContains",
|
|
538
|
-
"arrayContained",
|
|
539
|
-
"arrayOverlaps"
|
|
540
|
-
].includes(k))) results.push({
|
|
541
|
-
path: [...currentPath, key],
|
|
542
|
-
operator: value
|
|
543
|
-
});
|
|
544
|
-
else {
|
|
545
|
-
const nestedResults = this.parseNestedQuery(value, [...currentPath, key]);
|
|
546
|
-
results.push(...nestedResults);
|
|
547
|
-
}
|
|
548
|
-
return results;
|
|
549
|
-
}
|
|
550
|
-
/**
|
|
551
|
-
* Determine if a property is a JSONB column based on the schema.
|
|
552
|
-
* A column is JSONB if it's defined as an object or array in the TypeBox schema.
|
|
553
|
-
*/
|
|
554
|
-
isJsonbColumn(schema$1, columnName) {
|
|
555
|
-
const property = schema$1.properties[columnName];
|
|
556
|
-
if (!property) return false;
|
|
557
|
-
return property.type === "object" || property.type === "array";
|
|
558
|
-
}
|
|
559
|
-
/**
|
|
560
|
-
* Check if an array property contains primitive types (string, number, boolean, etc.)
|
|
561
|
-
* rather than objects. Primitive arrays should use native Drizzle operators.
|
|
562
|
-
* @returns true if the array contains primitives, false if it contains objects
|
|
563
|
-
*/
|
|
564
|
-
isPrimitiveArray(schema$1, columnName) {
|
|
565
|
-
const property = schema$1.properties[columnName];
|
|
566
|
-
if (!property || property.type !== "array") return false;
|
|
567
|
-
const items = property.items;
|
|
568
|
-
if (!items) return false;
|
|
569
|
-
const itemType = items.type;
|
|
570
|
-
return itemType === "string" || itemType === "number" || itemType === "integer" || itemType === "boolean" || itemType === "null";
|
|
571
|
-
}
|
|
572
|
-
/**
|
|
573
|
-
* Get the type of a field by navigating through a schema path.
|
|
574
|
-
* Used for smart type casting in SQL queries.
|
|
575
|
-
*
|
|
576
|
-
* @param columnSchema The schema of the JSON column (e.g., t.object({ age: t.integer() }))
|
|
577
|
-
* @param path The path to navigate (e.g., ['contact', 'email'])
|
|
578
|
-
* @returns The type string (e.g., 'integer', 'number', 'string') or undefined if not found
|
|
579
|
-
*/
|
|
580
|
-
getFieldType(columnSchema, path) {
|
|
581
|
-
let current = columnSchema;
|
|
582
|
-
for (const segment of path) if (current.type === "object" && current.properties) {
|
|
583
|
-
current = current.properties[segment];
|
|
584
|
-
if (!current) return;
|
|
585
|
-
} else return;
|
|
586
|
-
return current.type;
|
|
587
|
-
}
|
|
588
|
-
/**
|
|
589
|
-
* Check if a nested path points to an array property.
|
|
590
|
-
*/
|
|
591
|
-
isArrayProperty(schema$1, path) {
|
|
592
|
-
if (path.length === 0) return false;
|
|
593
|
-
let currentSchema = schema$1.properties[path[0]];
|
|
594
|
-
if (!currentSchema) return false;
|
|
595
|
-
if (currentSchema.type === "array") return true;
|
|
596
|
-
for (let i = 1; i < path.length; i++) if (currentSchema.type === "object" && currentSchema.properties) {
|
|
597
|
-
currentSchema = currentSchema.properties[path[i]];
|
|
598
|
-
if (!currentSchema) return false;
|
|
599
|
-
if (currentSchema.type === "array") return true;
|
|
600
|
-
} else return false;
|
|
601
|
-
return false;
|
|
602
|
-
}
|
|
603
|
-
};
|
|
604
|
-
|
|
605
|
-
//#endregion
|
|
606
|
-
//#region src/orm/services/QueryManager.ts
|
|
607
|
-
var QueryManager = class {
|
|
608
|
-
jsonQueryManager = (0, alepha.$inject)(PgJsonQueryManager);
|
|
609
|
-
alepha = (0, alepha.$inject)(alepha.Alepha);
|
|
610
|
-
/**
|
|
611
|
-
* Convert a query object to a SQL query.
|
|
612
|
-
*/
|
|
613
|
-
toSQL(query, options) {
|
|
614
|
-
const { schema: schema$1, col, joins } = options;
|
|
615
|
-
const conditions = [];
|
|
616
|
-
if ((0, drizzle_orm.isSQLWrapper)(query)) conditions.push(query);
|
|
617
|
-
else {
|
|
618
|
-
const keys = Object.keys(query);
|
|
619
|
-
for (const key of keys) {
|
|
620
|
-
const operator = query[key];
|
|
621
|
-
if (typeof query[key] === "object" && query[key] != null && !Array.isArray(query[key]) && joins?.length) {
|
|
622
|
-
const matchingJoins = joins.filter((j) => j.key === key);
|
|
623
|
-
if (matchingJoins.length > 0) {
|
|
624
|
-
const join = matchingJoins[0];
|
|
625
|
-
const joinPath = join.parent ? `${join.parent}.${key}` : key;
|
|
626
|
-
const recursiveJoins = joins.filter((j) => {
|
|
627
|
-
if (!j.parent) return false;
|
|
628
|
-
return j.parent === joinPath || j.parent.startsWith(`${joinPath}.`);
|
|
629
|
-
}).map((j) => {
|
|
630
|
-
const newParent = j.parent === joinPath ? void 0 : j.parent.substring(joinPath.length + 1);
|
|
631
|
-
return {
|
|
632
|
-
...j,
|
|
633
|
-
parent: newParent
|
|
634
|
-
};
|
|
635
|
-
});
|
|
636
|
-
const sql$8 = this.toSQL(query[key], {
|
|
637
|
-
schema: join.schema,
|
|
638
|
-
col: join.col,
|
|
639
|
-
joins: recursiveJoins.length > 0 ? recursiveJoins : void 0,
|
|
640
|
-
dialect: options.dialect
|
|
641
|
-
});
|
|
642
|
-
if (sql$8) conditions.push(sql$8);
|
|
643
|
-
continue;
|
|
644
|
-
}
|
|
645
|
-
}
|
|
646
|
-
if (Array.isArray(operator)) {
|
|
647
|
-
const operations = operator.map((it) => {
|
|
648
|
-
if ((0, drizzle_orm.isSQLWrapper)(it)) return it;
|
|
649
|
-
return this.toSQL(it, {
|
|
650
|
-
schema: schema$1,
|
|
651
|
-
col,
|
|
652
|
-
joins,
|
|
653
|
-
dialect: options.dialect
|
|
654
|
-
});
|
|
655
|
-
}).filter((it) => it != null);
|
|
656
|
-
if (key === "and") return (0, drizzle_orm.and)(...operations);
|
|
657
|
-
if (key === "or") return (0, drizzle_orm.or)(...operations);
|
|
658
|
-
}
|
|
659
|
-
if (key === "not") {
|
|
660
|
-
const where = this.toSQL(operator, {
|
|
661
|
-
schema: schema$1,
|
|
662
|
-
col,
|
|
663
|
-
joins,
|
|
664
|
-
dialect: options.dialect
|
|
665
|
-
});
|
|
666
|
-
if (where) return (0, drizzle_orm.not)(where);
|
|
667
|
-
}
|
|
668
|
-
if (operator) if (this.jsonQueryManager.isJsonbColumn(schema$1, key) && !this.jsonQueryManager.isPrimitiveArray(schema$1, key) && typeof operator === "object" && !Array.isArray(operator) && this.jsonQueryManager.hasNestedQuery({ [key]: operator })) {
|
|
669
|
-
const column = col(key);
|
|
670
|
-
const jsonbSql = this.buildJsonbQuery(column, operator, schema$1, key, options.dialect);
|
|
671
|
-
if (jsonbSql) conditions.push(jsonbSql);
|
|
672
|
-
} else {
|
|
673
|
-
const column = col(key);
|
|
674
|
-
const sql$8 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
|
|
675
|
-
if (sql$8) conditions.push(sql$8);
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
}
|
|
679
|
-
if (conditions.length === 1) return conditions[0];
|
|
680
|
-
return (0, drizzle_orm.and)(...conditions);
|
|
681
|
-
}
|
|
682
|
-
/**
|
|
683
|
-
* Build a JSONB query for nested object/array queries.
|
|
684
|
-
*/
|
|
685
|
-
buildJsonbQuery(column, nestedQuery, schema$1, columnName, dialect) {
|
|
686
|
-
const queries = this.jsonQueryManager.parseNestedQuery(nestedQuery);
|
|
687
|
-
if (queries.length === 0) return;
|
|
688
|
-
const columnSchema = schema$1.properties[columnName];
|
|
689
|
-
const conditions = [];
|
|
690
|
-
for (const { path, operator } of queries) {
|
|
691
|
-
const isArrayOperator = operator.arrayContains !== void 0 || operator.arrayContained !== void 0 || operator.arrayOverlaps !== void 0;
|
|
692
|
-
const isArrayProp = this.jsonQueryManager.isArrayProperty(schema$1, [columnName, ...path]);
|
|
693
|
-
if (isArrayProp && isArrayOperator) {
|
|
694
|
-
const condition = this.jsonQueryManager.buildJsonbCondition(column, path, operator, dialect, columnSchema);
|
|
695
|
-
if (condition) conditions.push(condition);
|
|
696
|
-
} else if (isArrayProp && !isArrayOperator) {
|
|
697
|
-
const condition = this.jsonQueryManager.buildJsonbArrayCondition(column, path, "", operator, dialect);
|
|
698
|
-
if (condition) conditions.push(condition);
|
|
699
|
-
} else {
|
|
700
|
-
const condition = this.jsonQueryManager.buildJsonbCondition(column, path, operator, dialect, columnSchema);
|
|
701
|
-
if (condition) conditions.push(condition);
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
if (conditions.length === 0) return;
|
|
705
|
-
if (conditions.length === 1) return conditions[0];
|
|
706
|
-
return (0, drizzle_orm.and)(...conditions);
|
|
707
|
-
}
|
|
708
|
-
/**
|
|
709
|
-
* Check if an object has any filter operator properties.
|
|
710
|
-
*/
|
|
711
|
-
hasFilterOperatorProperties(obj) {
|
|
712
|
-
if (!obj || typeof obj !== "object") return false;
|
|
713
|
-
return [
|
|
714
|
-
"eq",
|
|
715
|
-
"ne",
|
|
716
|
-
"gt",
|
|
717
|
-
"gte",
|
|
718
|
-
"lt",
|
|
719
|
-
"lte",
|
|
720
|
-
"inArray",
|
|
721
|
-
"notInArray",
|
|
722
|
-
"isNull",
|
|
723
|
-
"isNotNull",
|
|
724
|
-
"like",
|
|
725
|
-
"notLike",
|
|
726
|
-
"ilike",
|
|
727
|
-
"notIlike",
|
|
728
|
-
"contains",
|
|
729
|
-
"startsWith",
|
|
730
|
-
"endsWith",
|
|
731
|
-
"between",
|
|
732
|
-
"notBetween",
|
|
733
|
-
"arrayContains",
|
|
734
|
-
"arrayContained",
|
|
735
|
-
"arrayOverlaps"
|
|
736
|
-
].some((key) => key in obj);
|
|
737
|
-
}
|
|
738
|
-
/**
|
|
739
|
-
* Map a filter operator to a SQL query.
|
|
740
|
-
*/
|
|
741
|
-
mapOperatorToSql(operator, column, columnSchema, columnName, dialect = "postgresql") {
|
|
742
|
-
const encodeValue = (value) => {
|
|
743
|
-
if (value == null) return value;
|
|
744
|
-
if (columnSchema && columnName) try {
|
|
745
|
-
const fieldSchema = columnSchema.properties[columnName];
|
|
746
|
-
if (fieldSchema) return this.alepha.codec.encode(fieldSchema, value, { encoder: "drizzle" });
|
|
747
|
-
} catch (error) {}
|
|
748
|
-
return value;
|
|
749
|
-
};
|
|
750
|
-
const encodeArray = (values) => {
|
|
751
|
-
return values.map((v) => encodeValue(v));
|
|
752
|
-
};
|
|
753
|
-
if (typeof operator !== "object" || operator == null || !this.hasFilterOperatorProperties(operator)) return (0, drizzle_orm.eq)(column, encodeValue(operator));
|
|
754
|
-
const conditions = [];
|
|
755
|
-
if (operator?.eq != null) conditions.push((0, drizzle_orm.eq)(column, encodeValue(operator.eq)));
|
|
756
|
-
if (operator?.ne != null) conditions.push((0, drizzle_orm.ne)(column, encodeValue(operator.ne)));
|
|
757
|
-
if (operator?.gt != null) conditions.push((0, drizzle_orm.gt)(column, encodeValue(operator.gt)));
|
|
758
|
-
if (operator?.gte != null) conditions.push((0, drizzle_orm.gte)(column, encodeValue(operator.gte)));
|
|
759
|
-
if (operator?.lt != null) conditions.push((0, drizzle_orm.lt)(column, encodeValue(operator.lt)));
|
|
760
|
-
if (operator?.lte != null) conditions.push((0, drizzle_orm.lte)(column, encodeValue(operator.lte)));
|
|
761
|
-
if (operator?.inArray != null) {
|
|
762
|
-
if (!Array.isArray(operator.inArray) || operator.inArray.length === 0) throw new alepha.AlephaError("inArray operator requires at least one value");
|
|
763
|
-
conditions.push((0, drizzle_orm.inArray)(column, encodeArray(operator.inArray)));
|
|
764
|
-
}
|
|
765
|
-
if (operator?.notInArray != null) {
|
|
766
|
-
if (!Array.isArray(operator.notInArray) || operator.notInArray.length === 0) throw new alepha.AlephaError("notInArray operator requires at least one value");
|
|
767
|
-
conditions.push((0, drizzle_orm.notInArray)(column, encodeArray(operator.notInArray)));
|
|
768
|
-
}
|
|
769
|
-
if (operator?.isNull != null) conditions.push((0, drizzle_orm.isNull)(column));
|
|
770
|
-
if (operator?.isNotNull != null) conditions.push((0, drizzle_orm.isNotNull)(column));
|
|
771
|
-
if (operator?.like != null) conditions.push((0, drizzle_orm.like)(column, encodeValue(operator.like)));
|
|
772
|
-
if (operator?.notLike != null) conditions.push((0, drizzle_orm.notLike)(column, encodeValue(operator.notLike)));
|
|
773
|
-
if (operator?.ilike != null) conditions.push((0, drizzle_orm.ilike)(column, encodeValue(operator.ilike)));
|
|
774
|
-
if (operator?.notIlike != null) conditions.push((0, drizzle_orm.notIlike)(column, encodeValue(operator.notIlike)));
|
|
775
|
-
if (operator?.contains != null) {
|
|
776
|
-
const escapedValue = String(operator.contains).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
777
|
-
if (dialect === "sqlite") conditions.push(drizzle_orm.sql`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}%`)})`);
|
|
778
|
-
else conditions.push((0, drizzle_orm.ilike)(column, encodeValue(`%${escapedValue}%`)));
|
|
779
|
-
}
|
|
780
|
-
if (operator?.startsWith != null) {
|
|
781
|
-
const escapedValue = String(operator.startsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
782
|
-
if (dialect === "sqlite") conditions.push(drizzle_orm.sql`LOWER(${column}) LIKE LOWER(${encodeValue(`${escapedValue}%`)})`);
|
|
783
|
-
else conditions.push((0, drizzle_orm.ilike)(column, encodeValue(`${escapedValue}%`)));
|
|
784
|
-
}
|
|
785
|
-
if (operator?.endsWith != null) {
|
|
786
|
-
const escapedValue = String(operator.endsWith).replace(/\\/g, "\\\\").replace(/%/g, "\\%").replace(/_/g, "\\_");
|
|
787
|
-
if (dialect === "sqlite") conditions.push(drizzle_orm.sql`LOWER(${column}) LIKE LOWER(${encodeValue(`%${escapedValue}`)})`);
|
|
788
|
-
else conditions.push((0, drizzle_orm.ilike)(column, encodeValue(`%${escapedValue}`)));
|
|
789
|
-
}
|
|
790
|
-
if (operator?.between != null) {
|
|
791
|
-
if (!Array.isArray(operator.between) || operator.between.length !== 2) throw new Error("between operator requires exactly 2 values [min, max]");
|
|
792
|
-
conditions.push((0, drizzle_orm.between)(column, encodeValue(operator.between[0]), encodeValue(operator.between[1])));
|
|
793
|
-
}
|
|
794
|
-
if (operator?.notBetween != null) {
|
|
795
|
-
if (!Array.isArray(operator.notBetween) || operator.notBetween.length !== 2) throw new Error("notBetween operator requires exactly 2 values [min, max]");
|
|
796
|
-
conditions.push((0, drizzle_orm.notBetween)(column, encodeValue(operator.notBetween[0]), encodeValue(operator.notBetween[1])));
|
|
797
|
-
}
|
|
798
|
-
if (operator?.arrayContains != null) conditions.push((0, drizzle_orm.arrayContains)(column, encodeValue(operator.arrayContains)));
|
|
799
|
-
if (operator?.arrayContained != null) conditions.push((0, drizzle_orm.arrayContained)(column, encodeValue(operator.arrayContained)));
|
|
800
|
-
if (operator?.arrayOverlaps != null) conditions.push((0, drizzle_orm.arrayOverlaps)(column, encodeValue(operator.arrayOverlaps)));
|
|
801
|
-
if (conditions.length === 0) return;
|
|
802
|
-
if (conditions.length === 1) return conditions[0];
|
|
803
|
-
return (0, drizzle_orm.and)(...conditions);
|
|
804
|
-
}
|
|
805
|
-
/**
|
|
806
|
-
* Parse pagination sort string to orderBy format.
|
|
807
|
-
* Format: "firstName,-lastName" -> [{ column: "firstName", direction: "asc" }, { column: "lastName", direction: "desc" }]
|
|
808
|
-
* - Columns separated by comma
|
|
809
|
-
* - Prefix with '-' for DESC direction
|
|
810
|
-
*
|
|
811
|
-
* @param sort Pagination sort string
|
|
812
|
-
* @returns OrderBy array or single object
|
|
813
|
-
*/
|
|
814
|
-
parsePaginationSort(sort) {
|
|
815
|
-
const orderByClauses = sort.split(",").map((field) => field.trim()).map((field) => {
|
|
816
|
-
if (field.startsWith("-")) return {
|
|
817
|
-
column: field.substring(1),
|
|
818
|
-
direction: "desc"
|
|
819
|
-
};
|
|
820
|
-
return {
|
|
821
|
-
column: field,
|
|
822
|
-
direction: "asc"
|
|
823
|
-
};
|
|
824
|
-
});
|
|
825
|
-
return orderByClauses.length === 1 ? orderByClauses[0] : orderByClauses;
|
|
826
|
-
}
|
|
827
|
-
/**
|
|
828
|
-
* Normalize orderBy parameter to array format.
|
|
829
|
-
* Supports 3 modes:
|
|
830
|
-
* 1. String: "name" -> [{ column: "name", direction: "asc" }]
|
|
831
|
-
* 2. Object: { column: "name", direction: "desc" } -> [{ column: "name", direction: "desc" }]
|
|
832
|
-
* 3. Array: [{ column: "name" }, { column: "age", direction: "desc" }] -> normalized array
|
|
833
|
-
*
|
|
834
|
-
* @param orderBy The orderBy parameter
|
|
835
|
-
* @returns Normalized array of order by clauses
|
|
836
|
-
*/
|
|
837
|
-
normalizeOrderBy(orderBy) {
|
|
838
|
-
if (typeof orderBy === "string") return [{
|
|
839
|
-
column: orderBy,
|
|
840
|
-
direction: "asc"
|
|
841
|
-
}];
|
|
842
|
-
if (!Array.isArray(orderBy) && typeof orderBy === "object") return [{
|
|
843
|
-
column: orderBy.column,
|
|
844
|
-
direction: orderBy.direction ?? "asc"
|
|
845
|
-
}];
|
|
846
|
-
if (Array.isArray(orderBy)) return orderBy.map((item) => ({
|
|
847
|
-
column: item.column,
|
|
848
|
-
direction: item.direction ?? "asc"
|
|
849
|
-
}));
|
|
850
|
-
return [];
|
|
851
|
-
}
|
|
852
|
-
/**
|
|
853
|
-
* Create a pagination object.
|
|
854
|
-
*
|
|
855
|
-
* @deprecated Use `createPagination` from alepha instead.
|
|
856
|
-
* This method now delegates to the framework-level helper.
|
|
857
|
-
*
|
|
858
|
-
* @param entities The entities to paginate.
|
|
859
|
-
* @param limit The limit of the pagination.
|
|
860
|
-
* @param offset The offset of the pagination.
|
|
861
|
-
* @param sort Optional sort metadata to include in response.
|
|
862
|
-
*/
|
|
863
|
-
createPagination(entities, limit = 10, offset = 0, sort) {
|
|
864
|
-
return (0, alepha.createPagination)(entities, limit, offset, sort);
|
|
865
|
-
}
|
|
866
|
-
};
|
|
867
|
-
|
|
868
|
-
//#endregion
|
|
869
|
-
//#region src/orm/services/Repository.ts
|
|
870
|
-
var Repository = class {
|
|
871
|
-
entity;
|
|
872
|
-
provider;
|
|
873
|
-
relationManager = (0, alepha.$inject)(PgRelationManager);
|
|
874
|
-
queryManager = (0, alepha.$inject)(QueryManager);
|
|
875
|
-
dateTimeProvider = (0, alepha.$inject)(alepha_datetime.DateTimeProvider);
|
|
876
|
-
alepha = (0, alepha.$inject)(alepha.Alepha);
|
|
877
|
-
constructor(entity, provider = DatabaseProvider) {
|
|
878
|
-
this.entity = entity;
|
|
879
|
-
this.provider = this.alepha.inject(provider);
|
|
880
|
-
this.provider.registerEntity(entity);
|
|
881
|
-
}
|
|
882
|
-
/**
|
|
883
|
-
* Represents the primary key of the table.
|
|
884
|
-
* - Key is the name of the primary key column.
|
|
885
|
-
* - Type is the type (TypeBox) of the primary key column.
|
|
886
|
-
*
|
|
887
|
-
* ID is mandatory. If the table does not have a primary key, it will throw an error.
|
|
888
|
-
*/
|
|
889
|
-
get id() {
|
|
890
|
-
return this.getPrimaryKey(this.entity.schema);
|
|
891
|
-
}
|
|
892
|
-
/**
|
|
893
|
-
* Get Drizzle table object.
|
|
894
|
-
*/
|
|
895
|
-
get table() {
|
|
896
|
-
return this.provider.table(this.entity);
|
|
897
|
-
}
|
|
898
|
-
/**
|
|
899
|
-
* Get SQL table name. (from Drizzle table object)
|
|
900
|
-
*/
|
|
901
|
-
get tableName() {
|
|
902
|
-
return this.entity.name;
|
|
903
|
-
}
|
|
904
|
-
/**
|
|
905
|
-
* Getter for the database connection from the database provider.
|
|
906
|
-
*/
|
|
907
|
-
get db() {
|
|
908
|
-
return this.provider.db;
|
|
909
|
-
}
|
|
910
|
-
/**
|
|
911
|
-
* Execute a SQL query.
|
|
912
|
-
*
|
|
913
|
-
* This method allows executing raw SQL queries against the database.
|
|
914
|
-
* This is by far the easiest way to run custom queries that are not covered by the repository's built-in methods!
|
|
915
|
-
*
|
|
916
|
-
* You must use the `sql` tagged template function from Drizzle ORM to create the query. https://orm.drizzle.team/docs/sql
|
|
917
|
-
*
|
|
918
|
-
* @example
|
|
919
|
-
* ```ts
|
|
920
|
-
* class App {
|
|
921
|
-
* repository = $repository({ ... });
|
|
922
|
-
* async getAdults() {
|
|
923
|
-
* const users = repository.table; // Drizzle table object
|
|
924
|
-
* await repository.query(sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
|
|
925
|
-
* // or better
|
|
926
|
-
* await repository.query((users) => sql`SELECT * FROM ${users} WHERE ${users.age} > ${18}`);
|
|
927
|
-
* }
|
|
928
|
-
* }
|
|
929
|
-
* ```
|
|
930
|
-
*/
|
|
931
|
-
async query(query, schema$1) {
|
|
932
|
-
const raw = typeof query === "function" ? query(this.table, this.db) : query;
|
|
933
|
-
if (typeof raw === "string" && raw.includes("[object Object]")) throw new alepha.AlephaError("Invalid SQL query. Did you forget to call the 'sql' function?");
|
|
934
|
-
return (await this.provider.execute(raw)).map((it) => {
|
|
935
|
-
return this.clean(this.mapRawFieldsToEntity(it), schema$1 ?? this.entity.schema);
|
|
936
|
-
});
|
|
937
|
-
}
|
|
938
|
-
/**
|
|
939
|
-
* Map raw database fields to entity fields. (handles column name differences)
|
|
940
|
-
*/
|
|
941
|
-
mapRawFieldsToEntity(row) {
|
|
942
|
-
const entity = {};
|
|
943
|
-
for (const key of Object.keys(row)) {
|
|
944
|
-
entity[key] = row[key];
|
|
945
|
-
for (const colKey of Object.keys(this.table)) if (this.table[colKey].name === key) {
|
|
946
|
-
entity[colKey] = row[key];
|
|
947
|
-
break;
|
|
948
|
-
}
|
|
949
|
-
}
|
|
950
|
-
return entity;
|
|
951
|
-
}
|
|
952
|
-
/**
|
|
953
|
-
* Get a Drizzle column from the table by his name.
|
|
954
|
-
*/
|
|
955
|
-
col(name) {
|
|
956
|
-
const column = this.table[name];
|
|
957
|
-
if (!column) throw new alepha.AlephaError(`Invalid access. Column ${String(name)} not found in table ${this.tableName}`);
|
|
958
|
-
return column;
|
|
959
|
-
}
|
|
960
|
-
/**
|
|
961
|
-
* Run a transaction.
|
|
962
|
-
*/
|
|
963
|
-
async transaction(transaction, config) {
|
|
964
|
-
return await this.db.transaction(transaction, config);
|
|
965
|
-
}
|
|
966
|
-
/**
|
|
967
|
-
* Start a SELECT query on the table.
|
|
968
|
-
*/
|
|
969
|
-
rawSelect(opts = {}) {
|
|
970
|
-
return (opts.tx ?? this.db).select().from(this.table);
|
|
971
|
-
}
|
|
972
|
-
/**
|
|
973
|
-
* Start a SELECT DISTINCT query on the table.
|
|
974
|
-
*/
|
|
975
|
-
rawSelectDistinct(opts = {}, columns = []) {
|
|
976
|
-
const db = opts.tx ?? this.db;
|
|
977
|
-
const table = this.table;
|
|
978
|
-
const fields = {};
|
|
979
|
-
for (const column of columns) if (typeof column === "string") fields[column] = this.col(column);
|
|
980
|
-
return db.selectDistinct(fields).from(table);
|
|
981
|
-
}
|
|
982
|
-
/**
|
|
983
|
-
* Start an INSERT query on the table.
|
|
984
|
-
*/
|
|
985
|
-
rawInsert(opts = {}) {
|
|
986
|
-
return (opts.tx ?? this.db).insert(this.table);
|
|
987
|
-
}
|
|
988
|
-
/**
|
|
989
|
-
* Start an UPDATE query on the table.
|
|
990
|
-
*/
|
|
991
|
-
rawUpdate(opts = {}) {
|
|
992
|
-
return (opts.tx ?? this.db).update(this.table);
|
|
993
|
-
}
|
|
994
|
-
/**
|
|
995
|
-
* Start a DELETE query on the table.
|
|
996
|
-
*/
|
|
997
|
-
rawDelete(opts = {}) {
|
|
998
|
-
return (opts.tx ?? this.db).delete(this.table);
|
|
999
|
-
}
|
|
1000
|
-
/**
|
|
1001
|
-
* Create a Drizzle `select` query based on a JSON query object.
|
|
1002
|
-
*
|
|
1003
|
-
* > This method is the base for `find`, `findOne`, `findById`, and `paginate`.
|
|
1004
|
-
*/
|
|
1005
|
-
async findMany(query = {}, opts = {}) {
|
|
1006
|
-
await this.alepha.events.emit("repository:read:before", {
|
|
1007
|
-
tableName: this.tableName,
|
|
1008
|
-
query
|
|
1009
|
-
});
|
|
1010
|
-
const columns = query.columns ?? query.distinct;
|
|
1011
|
-
const builder = query.distinct ? this.rawSelectDistinct(opts, query.distinct) : this.rawSelect(opts);
|
|
1012
|
-
const joins = [];
|
|
1013
|
-
if (query.with) this.relationManager.buildJoins(this.provider, builder, joins, query.with, this.table);
|
|
1014
|
-
const where = this.withDeletedAt(query.where ?? {}, opts);
|
|
1015
|
-
builder.where(() => this.toSQL(where, joins));
|
|
1016
|
-
if (query.offset) {
|
|
1017
|
-
builder.offset(query.offset);
|
|
1018
|
-
if (this.provider.dialect === "sqlite" && !query.limit) query.limit = 1e3;
|
|
1019
|
-
}
|
|
1020
|
-
if (query.limit) builder.limit(query.limit);
|
|
1021
|
-
if (query.orderBy) {
|
|
1022
|
-
const orderByClauses = this.queryManager.normalizeOrderBy(query.orderBy);
|
|
1023
|
-
builder.orderBy(...orderByClauses.map((clause) => clause.direction === "desc" ? (0, drizzle_orm.desc)(this.col(clause.column)) : (0, drizzle_orm.asc)(this.col(clause.column))));
|
|
1024
|
-
}
|
|
1025
|
-
if (query.groupBy) builder.groupBy(...query.groupBy.map((key) => this.col(key)));
|
|
1026
|
-
if (opts.for) {
|
|
1027
|
-
if (typeof opts.for === "string") builder.for(opts.for);
|
|
1028
|
-
else if (opts.for) builder.for(opts.for.strength, opts.for.config);
|
|
1029
|
-
}
|
|
1030
|
-
try {
|
|
1031
|
-
let rows = await builder.execute();
|
|
1032
|
-
let schema$1 = this.entity.schema;
|
|
1033
|
-
if (columns) schema$1 = alepha.t.pick(schema$1, columns);
|
|
1034
|
-
if (joins.length) rows = rows.map((row) => {
|
|
1035
|
-
const rowSchema = {
|
|
1036
|
-
...schema$1,
|
|
1037
|
-
properties: { ...schema$1.properties }
|
|
1038
|
-
};
|
|
1039
|
-
return this.relationManager.mapRowWithJoins(row[this.tableName], row, rowSchema, joins);
|
|
1040
|
-
});
|
|
1041
|
-
rows = rows.map((row) => {
|
|
1042
|
-
if (joins.length) {
|
|
1043
|
-
const joinedSchema = this.relationManager.buildSchemaWithJoins(schema$1, joins);
|
|
1044
|
-
return this.cleanWithJoins(row, joinedSchema, joins);
|
|
1045
|
-
}
|
|
1046
|
-
return this.clean(row, schema$1);
|
|
1047
|
-
});
|
|
1048
|
-
await this.alepha.events.emit("repository:read:after", {
|
|
1049
|
-
tableName: this.tableName,
|
|
1050
|
-
query,
|
|
1051
|
-
entities: rows
|
|
1052
|
-
});
|
|
1053
|
-
return rows;
|
|
1054
|
-
} catch (error) {
|
|
1055
|
-
throw new DbError("Query select has failed", error);
|
|
1056
|
-
}
|
|
1057
|
-
}
|
|
1058
|
-
/**
|
|
1059
|
-
* Find a single entity.
|
|
1060
|
-
*/
|
|
1061
|
-
async findOne(query, opts = {}) {
|
|
1062
|
-
const [entity] = await this.findMany({
|
|
1063
|
-
limit: 1,
|
|
1064
|
-
...query
|
|
1065
|
-
}, opts);
|
|
1066
|
-
if (!entity) throw new DbEntityNotFoundError(this.tableName);
|
|
1067
|
-
return entity;
|
|
1068
|
-
}
|
|
1069
|
-
/**
|
|
1070
|
-
* Find entities with pagination.
|
|
1071
|
-
*
|
|
1072
|
-
* It uses the same parameters as `find()`, but adds pagination metadata to the response.
|
|
1073
|
-
*
|
|
1074
|
-
* > Pagination CAN also do a count query to get the total number of elements.
|
|
1075
|
-
*/
|
|
1076
|
-
async paginate(pagination = {}, query = {}, opts = {}) {
|
|
1077
|
-
const limit = query.limit ?? pagination.size ?? 10;
|
|
1078
|
-
const page = pagination.page ?? 0;
|
|
1079
|
-
const offset = query.offset ?? page * limit;
|
|
1080
|
-
let orderBy = query.orderBy;
|
|
1081
|
-
if (!query.orderBy && pagination.sort) orderBy = this.queryManager.parsePaginationSort(pagination.sort);
|
|
1082
|
-
const now = Date.now();
|
|
1083
|
-
const timers = {
|
|
1084
|
-
query: now,
|
|
1085
|
-
count: now
|
|
1086
|
-
};
|
|
1087
|
-
const tasks = [];
|
|
1088
|
-
tasks.push(this.findMany({
|
|
1089
|
-
offset,
|
|
1090
|
-
limit: limit + 1,
|
|
1091
|
-
orderBy,
|
|
1092
|
-
...query
|
|
1093
|
-
}, opts).then((it) => {
|
|
1094
|
-
timers.query = Date.now() - timers.query;
|
|
1095
|
-
return it;
|
|
1096
|
-
}));
|
|
1097
|
-
if (opts.count) {
|
|
1098
|
-
const where = (0, drizzle_orm.isSQLWrapper)(query.where) ? query.where : query.where ? this.toSQL(query.where) : void 0;
|
|
1099
|
-
tasks.push(this.db.$count(this.table, where).then((it) => {
|
|
1100
|
-
timers.count = Date.now() - timers.count;
|
|
1101
|
-
return it;
|
|
1102
|
-
}));
|
|
1103
|
-
}
|
|
1104
|
-
const [entities, countResult] = await Promise.all(tasks);
|
|
1105
|
-
let sortMetadata;
|
|
1106
|
-
if (orderBy) sortMetadata = this.queryManager.normalizeOrderBy(orderBy);
|
|
1107
|
-
const response = this.queryManager.createPagination(entities, limit, offset, sortMetadata);
|
|
1108
|
-
response.page.totalElements = countResult;
|
|
1109
|
-
if (countResult != null) response.page.totalPages = Math.ceil(countResult / limit);
|
|
1110
|
-
return response;
|
|
1111
|
-
}
|
|
1112
|
-
/**
|
|
1113
|
-
* Find an entity by ID.
|
|
1114
|
-
*
|
|
1115
|
-
* This is a convenience method for `findOne` with a where clause on the primary key.
|
|
1116
|
-
* If you need more complex queries, use `findOne` instead.
|
|
1117
|
-
*/
|
|
1118
|
-
async findById(id, opts = {}) {
|
|
1119
|
-
return await this.findOne({ where: this.getWhereId(id) }, opts);
|
|
1120
|
-
}
|
|
1121
|
-
/**
|
|
1122
|
-
* Helper to create a type-safe query object.
|
|
1123
|
-
*/
|
|
1124
|
-
createQuery() {
|
|
1125
|
-
return {};
|
|
1126
|
-
}
|
|
1127
|
-
/**
|
|
1128
|
-
* Helper to create a type-safe where clause.
|
|
1129
|
-
*/
|
|
1130
|
-
createQueryWhere() {
|
|
1131
|
-
return {};
|
|
1132
|
-
}
|
|
1133
|
-
/**
|
|
1134
|
-
* Create an entity.
|
|
1135
|
-
*
|
|
1136
|
-
* @param data The entity to create.
|
|
1137
|
-
* @param opts The options for creating the entity.
|
|
1138
|
-
* @returns The ID of the created entity.
|
|
1139
|
-
*/
|
|
1140
|
-
async create(data, opts = {}) {
|
|
1141
|
-
await this.alepha.events.emit("repository:create:before", {
|
|
1142
|
-
tableName: this.tableName,
|
|
1143
|
-
data
|
|
1144
|
-
});
|
|
1145
|
-
try {
|
|
1146
|
-
const entity = await this.rawInsert(opts).values(this.cast(data ?? {}, true)).returning(this.table).then(([it]) => this.clean(it, this.entity.schema));
|
|
1147
|
-
await this.alepha.events.emit("repository:create:after", {
|
|
1148
|
-
tableName: this.tableName,
|
|
1149
|
-
data,
|
|
1150
|
-
entity
|
|
1151
|
-
});
|
|
1152
|
-
return entity;
|
|
1153
|
-
} catch (error) {
|
|
1154
|
-
throw this.handleError(error, "Insert query has failed");
|
|
1155
|
-
}
|
|
1156
|
-
}
|
|
1157
|
-
/**
|
|
1158
|
-
* Create many entities.
|
|
1159
|
-
*
|
|
1160
|
-
* @param values The entities to create.
|
|
1161
|
-
* @param opts The statement options.
|
|
1162
|
-
* @returns The created entities.
|
|
1163
|
-
*/
|
|
1164
|
-
async createMany(values, opts = {}) {
|
|
1165
|
-
if (values.length === 0) return [];
|
|
1166
|
-
await this.alepha.events.emit("repository:create:before", {
|
|
1167
|
-
tableName: this.tableName,
|
|
1168
|
-
data: values
|
|
1169
|
-
});
|
|
1170
|
-
try {
|
|
1171
|
-
const entities = await this.rawInsert(opts).values(values.map((data) => this.cast(data, true))).returning(this.table).then((rows) => rows.map((it) => this.clean(it, this.entity.schema)));
|
|
1172
|
-
await this.alepha.events.emit("repository:create:after", {
|
|
1173
|
-
tableName: this.tableName,
|
|
1174
|
-
data: values,
|
|
1175
|
-
entity: entities
|
|
1176
|
-
});
|
|
1177
|
-
return entities;
|
|
1178
|
-
} catch (error) {
|
|
1179
|
-
throw this.handleError(error, "Insert query has failed");
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
/**
|
|
1183
|
-
* Find an entity and update it.
|
|
1184
|
-
*/
|
|
1185
|
-
async updateOne(where, data, opts = {}) {
|
|
1186
|
-
await this.alepha.events.emit("repository:update:before", {
|
|
1187
|
-
tableName: this.tableName,
|
|
1188
|
-
where,
|
|
1189
|
-
data
|
|
1190
|
-
});
|
|
1191
|
-
let row = data;
|
|
1192
|
-
const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
|
|
1193
|
-
if (updatedAtField) row[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
|
|
1194
|
-
where = this.withDeletedAt(where, opts);
|
|
1195
|
-
row = this.cast(row, false);
|
|
1196
|
-
delete row[this.id.key];
|
|
1197
|
-
const response = await this.rawUpdate(opts).set(row).where(this.toSQL(where)).returning(this.table).catch((error) => {
|
|
1198
|
-
throw this.handleError(error, "Update query has failed");
|
|
1199
|
-
});
|
|
1200
|
-
if (!response[0]) throw new DbEntityNotFoundError(this.tableName);
|
|
1201
|
-
try {
|
|
1202
|
-
const entity = this.clean(response[0], this.entity.schema);
|
|
1203
|
-
await this.alepha.events.emit("repository:update:after", {
|
|
1204
|
-
tableName: this.tableName,
|
|
1205
|
-
where,
|
|
1206
|
-
data,
|
|
1207
|
-
entities: [entity]
|
|
1208
|
-
});
|
|
1209
|
-
return entity;
|
|
1210
|
-
} catch (error) {
|
|
1211
|
-
throw this.handleError(error, "Update query has failed");
|
|
1212
|
-
}
|
|
1213
|
-
}
|
|
1214
|
-
/**
|
|
1215
|
-
* Save a given entity.
|
|
1216
|
-
*
|
|
1217
|
-
* @example
|
|
1218
|
-
* ```ts
|
|
1219
|
-
* const entity = await repository.findById(1);
|
|
1220
|
-
* entity.name = "New Name"; // update a field
|
|
1221
|
-
* delete entity.description; // delete a field
|
|
1222
|
-
* await repository.save(entity);
|
|
1223
|
-
* ```
|
|
1224
|
-
*
|
|
1225
|
-
* Difference with `updateById/updateOne`:
|
|
1226
|
-
*
|
|
1227
|
-
* - requires the entity to be fetched first (whole object is expected)
|
|
1228
|
-
* - check pg.version() if present -> optimistic locking
|
|
1229
|
-
* - validate entity against schema
|
|
1230
|
-
* - undefined values will be set to null, not ignored!
|
|
1231
|
-
*
|
|
1232
|
-
* @see {@link DbVersionMismatchError}
|
|
1233
|
-
*/
|
|
1234
|
-
async save(entity, opts = {}) {
|
|
1235
|
-
const row = entity;
|
|
1236
|
-
const id = row[this.id.key];
|
|
1237
|
-
if (id == null) throw new alepha.AlephaError("Cannot save entity without ID - missing primary key in value");
|
|
1238
|
-
for (const key of Object.keys(this.entity.schema.properties)) if (row[key] === void 0) row[key] = null;
|
|
1239
|
-
let where = this.createQueryWhere();
|
|
1240
|
-
where.id = { eq: id };
|
|
1241
|
-
const versionField = getAttrFields(this.entity.schema, PG_VERSION)?.[0];
|
|
1242
|
-
if (versionField && typeof row[versionField.key] === "number") {
|
|
1243
|
-
where = { and: [where, { [versionField.key]: { eq: row[versionField.key] } }] };
|
|
1244
|
-
row[versionField.key] += 1;
|
|
1245
|
-
}
|
|
1246
|
-
try {
|
|
1247
|
-
const newValue = await this.updateOne(where, row, opts);
|
|
1248
|
-
for (const key of Object.keys(this.entity.schema.properties)) row[key] = void 0;
|
|
1249
|
-
Object.assign(row, newValue);
|
|
1250
|
-
} catch (error) {
|
|
1251
|
-
if (error instanceof DbEntityNotFoundError && versionField) try {
|
|
1252
|
-
await this.findById(id);
|
|
1253
|
-
throw new DbVersionMismatchError(this.tableName, id);
|
|
1254
|
-
} catch (lookupError) {
|
|
1255
|
-
if (lookupError instanceof DbEntityNotFoundError) throw error;
|
|
1256
|
-
if (lookupError instanceof DbVersionMismatchError) throw lookupError;
|
|
1257
|
-
throw lookupError;
|
|
1258
|
-
}
|
|
1259
|
-
throw error;
|
|
1260
|
-
}
|
|
1261
|
-
}
|
|
1262
|
-
/**
|
|
1263
|
-
* Find an entity by ID and update it.
|
|
1264
|
-
*/
|
|
1265
|
-
async updateById(id, data, opts = {}) {
|
|
1266
|
-
return await this.updateOne(this.getWhereId(id), data, opts);
|
|
1267
|
-
}
|
|
1268
|
-
/**
|
|
1269
|
-
* Find many entities and update all of them.
|
|
1270
|
-
*/
|
|
1271
|
-
async updateMany(where, data, opts = {}) {
|
|
1272
|
-
await this.alepha.events.emit("repository:update:before", {
|
|
1273
|
-
tableName: this.tableName,
|
|
1274
|
-
where,
|
|
1275
|
-
data
|
|
1276
|
-
});
|
|
1277
|
-
const updatedAtField = getAttrFields(this.entity.schema, PG_UPDATED_AT)?.[0];
|
|
1278
|
-
if (updatedAtField) data[updatedAtField.key] = this.dateTimeProvider.of(opts.now).toISOString();
|
|
1279
|
-
where = this.withDeletedAt(where, opts);
|
|
1280
|
-
data = this.cast(data, false);
|
|
1281
|
-
try {
|
|
1282
|
-
const entities = await this.rawUpdate(opts).set(data).where(this.toSQL(where)).returning();
|
|
1283
|
-
await this.alepha.events.emit("repository:update:after", {
|
|
1284
|
-
tableName: this.tableName,
|
|
1285
|
-
where,
|
|
1286
|
-
data,
|
|
1287
|
-
entities
|
|
1288
|
-
});
|
|
1289
|
-
return entities.map((it) => it[this.id.key]);
|
|
1290
|
-
} catch (error) {
|
|
1291
|
-
throw this.handleError(error, "Update query has failed");
|
|
1292
|
-
}
|
|
1293
|
-
}
|
|
1294
|
-
/**
|
|
1295
|
-
* Find many and delete all of them.
|
|
1296
|
-
* @returns Array of deleted entity IDs
|
|
1297
|
-
*/
|
|
1298
|
-
async deleteMany(where = {}, opts = {}) {
|
|
1299
|
-
const deletedAt = this.deletedAt();
|
|
1300
|
-
if (deletedAt && !opts.force) return await this.updateMany(where, { [deletedAt.key]: opts.now ?? this.dateTimeProvider.nowISOString() }, opts);
|
|
1301
|
-
await this.alepha.events.emit("repository:delete:before", {
|
|
1302
|
-
tableName: this.tableName,
|
|
1303
|
-
where
|
|
1304
|
-
});
|
|
1305
|
-
try {
|
|
1306
|
-
const ids = (await this.rawDelete(opts).where(this.toSQL(where)).returning({ id: this.table[this.id.key] })).map((row) => row.id);
|
|
1307
|
-
await this.alepha.events.emit("repository:delete:after", {
|
|
1308
|
-
tableName: this.tableName,
|
|
1309
|
-
where,
|
|
1310
|
-
ids
|
|
1311
|
-
});
|
|
1312
|
-
return ids;
|
|
1313
|
-
} catch (error) {
|
|
1314
|
-
throw new DbError("Delete query has failed", error);
|
|
1315
|
-
}
|
|
1316
|
-
}
|
|
1317
|
-
/**
|
|
1318
|
-
* Delete all entities.
|
|
1319
|
-
* @returns Array of deleted entity IDs
|
|
1320
|
-
*/
|
|
1321
|
-
clear(opts = {}) {
|
|
1322
|
-
return this.deleteMany({}, opts);
|
|
1323
|
-
}
|
|
1324
|
-
/**
|
|
1325
|
-
* Delete the given entity.
|
|
1326
|
-
*
|
|
1327
|
-
* You must fetch the entity first in order to delete it.
|
|
1328
|
-
* @returns Array containing the deleted entity ID
|
|
1329
|
-
*/
|
|
1330
|
-
async destroy(entity, opts = {}) {
|
|
1331
|
-
const id = entity[this.id.key];
|
|
1332
|
-
if (id == null) throw new alepha.AlephaError("Cannot destroy entity without ID");
|
|
1333
|
-
const deletedAt = this.deletedAt();
|
|
1334
|
-
if (deletedAt && !opts.force) {
|
|
1335
|
-
opts.now ??= this.dateTimeProvider.nowISOString();
|
|
1336
|
-
entity[deletedAt.key] = opts.now;
|
|
1337
|
-
}
|
|
1338
|
-
return await this.deleteById(id, opts);
|
|
1339
|
-
}
|
|
1340
|
-
/**
|
|
1341
|
-
* Find an entity and delete it.
|
|
1342
|
-
* @returns Array of deleted entity IDs (should contain at most one ID)
|
|
1343
|
-
*/
|
|
1344
|
-
async deleteOne(where = {}, opts = {}) {
|
|
1345
|
-
return await this.deleteMany(where, opts);
|
|
1346
|
-
}
|
|
1347
|
-
/**
|
|
1348
|
-
* Find an entity by ID and delete it.
|
|
1349
|
-
* @returns Array containing the deleted entity ID
|
|
1350
|
-
* @throws DbEntityNotFoundError if the entity is not found
|
|
1351
|
-
*/
|
|
1352
|
-
async deleteById(id, opts = {}) {
|
|
1353
|
-
const result = await this.deleteMany(this.getWhereId(id), opts);
|
|
1354
|
-
if (result.length === 0) throw new DbEntityNotFoundError(`Entity with ID ${id} not found in ${this.tableName}`);
|
|
1355
|
-
return result;
|
|
1356
|
-
}
|
|
1357
|
-
/**
|
|
1358
|
-
* Count entities.
|
|
1359
|
-
*/
|
|
1360
|
-
async count(where = {}, opts = {}) {
|
|
1361
|
-
where = this.withDeletedAt(where, opts);
|
|
1362
|
-
return (opts.tx ?? this.db).$count(this.table, this.toSQL(where));
|
|
1363
|
-
}
|
|
1364
|
-
conflictMessagePattern = "duplicate key value violates unique constraint";
|
|
1365
|
-
handleError(error, message) {
|
|
1366
|
-
if (!(error instanceof Error)) return new DbError(message);
|
|
1367
|
-
if (error.cause?.message.includes(this.conflictMessagePattern) || error.message.includes(this.conflictMessagePattern)) return new DbConflictError(message, error);
|
|
1368
|
-
return new DbError(message, error);
|
|
1369
|
-
}
|
|
1370
|
-
withDeletedAt(where, opts = {}) {
|
|
1371
|
-
if (opts.force) return where;
|
|
1372
|
-
const deletedAt = this.deletedAt();
|
|
1373
|
-
if (!deletedAt) return where;
|
|
1374
|
-
return { and: [where, { [deletedAt.key]: { isNull: true } }] };
|
|
1375
|
-
}
|
|
1376
|
-
deletedAt() {
|
|
1377
|
-
const deletedAtFields = getAttrFields(this.entity.schema, PG_DELETED_AT);
|
|
1378
|
-
if (deletedAtFields.length > 0) return deletedAtFields[0];
|
|
1379
|
-
}
|
|
1380
|
-
/**
|
|
1381
|
-
* Convert something to valid Pg Insert Value.
|
|
1382
|
-
*/
|
|
1383
|
-
cast(data, insert) {
|
|
1384
|
-
const schema$1 = insert ? this.entity.insertSchema : alepha.t.partial(this.entity.updateSchema);
|
|
1385
|
-
return this.alepha.codec.encode(schema$1, data);
|
|
1386
|
-
}
|
|
1387
|
-
/**
|
|
1388
|
-
* Transform a row from the database into a clean entity.
|
|
1389
|
-
*/
|
|
1390
|
-
clean(row, schema$1) {
|
|
1391
|
-
for (const key of Object.keys(schema$1.properties)) {
|
|
1392
|
-
const value = schema$1.properties[key];
|
|
1393
|
-
if (typeof row[key] === "string") {
|
|
1394
|
-
if (alepha.t.schema.isDateTime(value)) row[key] = this.dateTimeProvider.of(row[key]).toISOString();
|
|
1395
|
-
else if (alepha.t.schema.isDate(value)) row[key] = this.dateTimeProvider.of(`${row[key]}T00:00:00Z`).toISOString().split("T")[0];
|
|
1396
|
-
}
|
|
1397
|
-
if (typeof row[key] === "bigint" && alepha.t.schema.isBigInt(value)) row[key] = row[key].toString();
|
|
1398
|
-
}
|
|
1399
|
-
return this.alepha.codec.decode(schema$1, row);
|
|
1400
|
-
}
|
|
1401
|
-
/**
|
|
1402
|
-
* Clean a row with joins recursively
|
|
1403
|
-
*/
|
|
1404
|
-
cleanWithJoins(row, schema$1, joins, parentPath) {
|
|
1405
|
-
const joinsAtThisLevel = joins.filter((j) => j.parent === parentPath);
|
|
1406
|
-
const cleanRow = { ...row };
|
|
1407
|
-
const joinedData = {};
|
|
1408
|
-
for (const join of joinsAtThisLevel) {
|
|
1409
|
-
joinedData[join.key] = cleanRow[join.key];
|
|
1410
|
-
delete cleanRow[join.key];
|
|
1411
|
-
}
|
|
1412
|
-
const entity = this.clean(cleanRow, schema$1);
|
|
1413
|
-
for (const join of joinsAtThisLevel) {
|
|
1414
|
-
const joinedValue = joinedData[join.key];
|
|
1415
|
-
if (joinedValue != null) {
|
|
1416
|
-
const joinPath = parentPath ? `${parentPath}.${join.key}` : join.key;
|
|
1417
|
-
if (joins.filter((j) => j.parent === joinPath).length > 0) entity[join.key] = this.cleanWithJoins(joinedValue, join.schema, joins, joinPath);
|
|
1418
|
-
else entity[join.key] = this.clean(joinedValue, join.schema);
|
|
1419
|
-
} else entity[join.key] = void 0;
|
|
1420
|
-
}
|
|
1421
|
-
return entity;
|
|
1422
|
-
}
|
|
1423
|
-
/**
|
|
1424
|
-
* Convert a where clause to SQL.
|
|
1425
|
-
*/
|
|
1426
|
-
toSQL(where, joins) {
|
|
1427
|
-
return this.queryManager.toSQL(where, {
|
|
1428
|
-
schema: this.entity.schema,
|
|
1429
|
-
col: (name) => {
|
|
1430
|
-
return this.col(name);
|
|
1431
|
-
},
|
|
1432
|
-
joins,
|
|
1433
|
-
dialect: this.provider.dialect
|
|
1434
|
-
});
|
|
1435
|
-
}
|
|
1436
|
-
/**
|
|
1437
|
-
* Get the where clause for an ID.
|
|
1438
|
-
*
|
|
1439
|
-
* @param id The ID to get the where clause for.
|
|
1440
|
-
* @returns The where clause for the ID.
|
|
1441
|
-
*/
|
|
1442
|
-
getWhereId(id) {
|
|
1443
|
-
return { [this.id.key]: { eq: alepha.t.schema.isString(this.id.type) ? String(id) : Number(id) } };
|
|
1444
|
-
}
|
|
1445
|
-
/**
|
|
1446
|
-
* Find a primary key in the schema.
|
|
1447
|
-
*/
|
|
1448
|
-
getPrimaryKey(schema$1) {
|
|
1449
|
-
const primaryKeys = getAttrFields(schema$1, PG_PRIMARY_KEY);
|
|
1450
|
-
if (primaryKeys.length === 0) throw new alepha.AlephaError("Primary key not found in schema");
|
|
1451
|
-
if (primaryKeys.length > 1) throw new alepha.AlephaError(`Multiple primary keys (${primaryKeys.length}) are not supported`);
|
|
1452
|
-
return {
|
|
1453
|
-
key: primaryKeys[0].key,
|
|
1454
|
-
col: this.col(primaryKeys[0].key),
|
|
1455
|
-
type: primaryKeys[0].type
|
|
1456
|
-
};
|
|
1457
|
-
}
|
|
1458
|
-
};
|
|
1459
|
-
|
|
1460
|
-
//#endregion
|
|
1461
|
-
//#region src/orm/providers/RepositoryProvider.ts
|
|
1462
|
-
var RepositoryProvider = class {
|
|
1463
|
-
alepha = (0, alepha.$inject)(alepha.Alepha);
|
|
1464
|
-
registry = /* @__PURE__ */ new Map();
|
|
1465
|
-
getRepositories(provider) {
|
|
1466
|
-
const repositories = this.alepha.services(Repository);
|
|
1467
|
-
if (provider) return repositories.filter((it) => it.provider === provider);
|
|
1468
|
-
return repositories;
|
|
1469
|
-
}
|
|
1470
|
-
getRepository(entity) {
|
|
1471
|
-
const RepositoryClass = this.createClassRepository(entity);
|
|
1472
|
-
return this.alepha.inject(RepositoryClass);
|
|
1473
|
-
}
|
|
1474
|
-
createClassRepository(entity) {
|
|
1475
|
-
let name = entity.name.charAt(0).toUpperCase() + entity.name.slice(1);
|
|
1476
|
-
if (name.endsWith("s")) name = name.slice(0, -1);
|
|
1477
|
-
name = `${name}Repository`;
|
|
1478
|
-
if (this.registry.has(entity)) return this.registry.get(entity);
|
|
1479
|
-
class GenericRepository extends Repository {
|
|
1480
|
-
constructor() {
|
|
1481
|
-
super(entity);
|
|
1482
|
-
}
|
|
1483
|
-
}
|
|
1484
|
-
Object.defineProperty(GenericRepository, "name", { value: name });
|
|
1485
|
-
this.registry.set(entity, GenericRepository);
|
|
1486
|
-
return GenericRepository;
|
|
1487
|
-
}
|
|
1488
|
-
};
|
|
1489
|
-
|
|
1490
|
-
//#endregion
|
|
1491
|
-
//#region src/orm/descriptors/$repository.ts
|
|
1492
|
-
/**
|
|
1493
|
-
* Get the repository for the given entity.
|
|
1494
|
-
*/
|
|
1495
|
-
const $repository = (entity) => {
|
|
1496
|
-
const { alepha: alepha$1 } = (0, alepha.$context)();
|
|
1497
|
-
return (0, alepha.$inject)(alepha$1.inject(RepositoryProvider).createClassRepository(entity));
|
|
1498
|
-
};
|
|
1499
|
-
|
|
1500
|
-
//#endregion
|
|
1501
|
-
//#region src/orm/descriptors/$sequence.ts
|
|
1502
|
-
/**
|
|
1503
|
-
* Creates a PostgreSQL sequence descriptor for generating unique numeric values.
|
|
1504
|
-
*/
|
|
1505
|
-
const $sequence = (options = {}) => {
|
|
1506
|
-
return (0, alepha.createDescriptor)(SequenceDescriptor, options);
|
|
1507
|
-
};
|
|
1508
|
-
var SequenceDescriptor = class extends alepha.Descriptor {
|
|
1509
|
-
provider = this.$provider();
|
|
1510
|
-
onInit() {
|
|
1511
|
-
this.provider.registerSequence(this);
|
|
1512
|
-
}
|
|
1513
|
-
get name() {
|
|
1514
|
-
return this.options.name ?? this.config.propertyKey;
|
|
1515
|
-
}
|
|
1516
|
-
async next() {
|
|
1517
|
-
return this.provider.execute(drizzle_orm.sql`SELECT nextval('${drizzle_orm.sql.raw(this.provider.schema)}."${drizzle_orm.sql.raw(this.name)}"')`).then((rows) => Number(rows[0]?.nextval));
|
|
1518
|
-
}
|
|
1519
|
-
async current() {
|
|
1520
|
-
return this.provider.execute(drizzle_orm.sql`SELECT last_value FROM ${drizzle_orm.sql.raw(this.provider.schema)}."${drizzle_orm.sql.raw(this.name)}"`).then((rows) => Number(rows[0]?.last_value));
|
|
1521
|
-
}
|
|
1522
|
-
$provider() {
|
|
1523
|
-
return this.options.provider ?? this.alepha.inject(DatabaseProvider);
|
|
1524
|
-
}
|
|
1525
|
-
};
|
|
1526
|
-
$sequence[alepha.KIND] = SequenceDescriptor;
|
|
1527
|
-
|
|
1528
|
-
//#endregion
|
|
1529
|
-
//#region src/orm/providers/DrizzleKitProvider.ts
|
|
1530
|
-
var DrizzleKitProvider = class {
|
|
1531
|
-
log = (0, alepha_logger.$logger)();
|
|
1532
|
-
alepha = (0, alepha.$inject)(alepha.Alepha);
|
|
1533
|
-
/**
|
|
1534
|
-
* Synchronize database with current schema definitions.
|
|
1535
|
-
*
|
|
1536
|
-
* In development mode, it will generate and execute migrations based on the current state.
|
|
1537
|
-
* In testing mode, it will generate migrations from scratch without applying them.
|
|
1538
|
-
*
|
|
1539
|
-
* Does nothing in production mode, you must handle migrations manually.
|
|
1540
|
-
*/
|
|
1541
|
-
async synchronize(provider) {
|
|
1542
|
-
if (this.alepha.isProduction()) {
|
|
1543
|
-
this.log.warn("Synchronization skipped in production mode.");
|
|
1544
|
-
return;
|
|
1545
|
-
}
|
|
1546
|
-
if (provider.schema !== "public") await this.createSchemaIfNotExists(provider, provider.schema);
|
|
1547
|
-
const now = Date.now();
|
|
1548
|
-
if (this.alepha.isTest()) {
|
|
1549
|
-
const { statements } = await this.generateMigration(provider);
|
|
1550
|
-
await this.executeStatements(statements, provider);
|
|
1551
|
-
} else {
|
|
1552
|
-
const entry = await this.loadDevMigrations(provider);
|
|
1553
|
-
const { statements, snapshot } = await this.generateMigration(provider, entry?.snapshot ? JSON.parse(entry.snapshot) : void 0);
|
|
1554
|
-
await this.executeStatements(statements, provider, true);
|
|
1555
|
-
await this.saveDevMigrations(provider, snapshot, entry);
|
|
1556
|
-
}
|
|
1557
|
-
this.log.info(`Db '${provider.name}' synchronization OK [${Date.now() - now}ms]`);
|
|
1558
|
-
}
|
|
1559
|
-
/**
|
|
1560
|
-
* Mostly used for testing purposes. You can generate SQL migration statements without executing them.
|
|
1561
|
-
*/
|
|
1562
|
-
async generateMigration(provider, prevSnapshot) {
|
|
1563
|
-
const kit = this.importDrizzleKit();
|
|
1564
|
-
const models = this.getModels(provider);
|
|
1565
|
-
if (Object.keys(models).length > 0) {
|
|
1566
|
-
if (provider.dialect === "sqlite") {
|
|
1567
|
-
const prev$1 = prevSnapshot ?? await kit.generateSQLiteDrizzleJson({});
|
|
1568
|
-
const curr$1 = await kit.generateSQLiteDrizzleJson(models);
|
|
1569
|
-
return {
|
|
1570
|
-
models,
|
|
1571
|
-
statements: await kit.generateSQLiteMigration(prev$1, curr$1),
|
|
1572
|
-
snapshot: curr$1
|
|
1573
|
-
};
|
|
1574
|
-
}
|
|
1575
|
-
const prev = prevSnapshot ?? await kit.generateDrizzleJson({});
|
|
1576
|
-
const curr = await kit.generateDrizzleJson(models);
|
|
1577
|
-
return {
|
|
1578
|
-
models,
|
|
1579
|
-
statements: await kit.generateMigration(prev, curr),
|
|
1580
|
-
snapshot: curr
|
|
1581
|
-
};
|
|
1582
|
-
}
|
|
1583
|
-
return {
|
|
1584
|
-
models,
|
|
1585
|
-
statements: [],
|
|
1586
|
-
snapshot: {}
|
|
1587
|
-
};
|
|
1588
|
-
}
|
|
1589
|
-
/**
|
|
1590
|
-
* Load all tables, enums, sequences, etc. from the provider's repositories.
|
|
1591
|
-
*/
|
|
1592
|
-
getModels(provider) {
|
|
1593
|
-
const models = {};
|
|
1594
|
-
for (const [key, value] of provider.tables.entries()) {
|
|
1595
|
-
if (models[key]) throw new alepha.AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
1596
|
-
models[key] = value;
|
|
1597
|
-
}
|
|
1598
|
-
for (const [key, value] of provider.enums.entries()) {
|
|
1599
|
-
if (models[key]) throw new alepha.AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
1600
|
-
models[key] = value;
|
|
1601
|
-
}
|
|
1602
|
-
for (const [key, value] of provider.sequences.entries()) {
|
|
1603
|
-
if (models[key]) throw new alepha.AlephaError(`Model name conflict: '${key}' is already defined.`);
|
|
1604
|
-
models[key] = value;
|
|
1605
|
-
}
|
|
1606
|
-
return models;
|
|
1607
|
-
}
|
|
1608
|
-
/**
|
|
1609
|
-
* Load the migration snapshot from the database.
|
|
1610
|
-
*/
|
|
1611
|
-
async loadDevMigrations(provider) {
|
|
1612
|
-
const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
|
|
1613
|
-
if (provider.url.includes(":memory:")) {
|
|
1614
|
-
this.log.trace(`In-memory database detected for '${name}', skipping migration snapshot load.`);
|
|
1615
|
-
return;
|
|
1616
|
-
}
|
|
1617
|
-
if (provider.dialect === "sqlite") {
|
|
1618
|
-
try {
|
|
1619
|
-
const text = await (0, node_fs_promises.readFile)(`node_modules/.alepha/sqlite-${name}.json`, "utf-8");
|
|
1620
|
-
return this.alepha.codec.decode(devMigrationsSchema, text);
|
|
1621
|
-
} catch (e) {
|
|
1622
|
-
this.log.trace(`No existing migration snapshot for '${name}'`, e);
|
|
1623
|
-
}
|
|
1624
|
-
return;
|
|
1625
|
-
}
|
|
1626
|
-
await provider.execute(drizzle_orm.sql`CREATE SCHEMA IF NOT EXISTS "drizzle";`);
|
|
1627
|
-
await provider.execute(drizzle_orm.sql`
|
|
1628
|
-
CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_dev_migrations" (
|
|
1629
|
-
"id" SERIAL PRIMARY KEY,
|
|
1630
|
-
"name" TEXT NOT NULL,
|
|
1631
|
-
"created_at" TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
|
1632
|
-
"snapshot" TEXT NOT NULL
|
|
1633
|
-
);
|
|
1634
|
-
`);
|
|
1635
|
-
const rows = await provider.run(drizzle_orm.sql`SELECT * FROM "drizzle"."__drizzle_dev_migrations" WHERE "name" = ${name} LIMIT 1`, devMigrationsSchema);
|
|
1636
|
-
if (rows.length === 0) {
|
|
1637
|
-
this.log.trace(`No existing migration snapshot for '${name}'`);
|
|
1638
|
-
return;
|
|
1639
|
-
}
|
|
1640
|
-
return this.alepha.codec.decode(devMigrationsSchema, rows[0]);
|
|
1641
|
-
}
|
|
1642
|
-
async saveDevMigrations(provider, curr, devMigrations) {
|
|
1643
|
-
if (provider.url.includes(":memory:")) {
|
|
1644
|
-
this.log.trace(`In-memory database detected for '${provider.constructor.name}', skipping migration snapshot save.`);
|
|
1645
|
-
return;
|
|
1646
|
-
}
|
|
1647
|
-
const name = `${this.alepha.env.APP_NAME ?? "APP"}-${provider.constructor.name}`.toLowerCase();
|
|
1648
|
-
if (provider.dialect === "sqlite") {
|
|
1649
|
-
const filePath = `node_modules/.alepha/sqlite-${name}.json`;
|
|
1650
|
-
await (0, node_fs_promises.mkdir)("node_modules/.alepha", { recursive: true }).catch(() => null);
|
|
1651
|
-
await (0, node_fs_promises.writeFile)(filePath, JSON.stringify({
|
|
1652
|
-
id: devMigrations?.id ?? 1,
|
|
1653
|
-
name,
|
|
1654
|
-
created_at: /* @__PURE__ */ new Date(),
|
|
1655
|
-
snapshot: JSON.stringify(curr)
|
|
1656
|
-
}, null, 2));
|
|
1657
|
-
this.log.debug(`Saved migration snapshot to '${filePath}'`);
|
|
1658
|
-
return;
|
|
1659
|
-
}
|
|
1660
|
-
if (!devMigrations) await provider.execute(drizzle_orm.sql`INSERT INTO "drizzle"."__drizzle_dev_migrations" ("name", "snapshot") VALUES (${name}, ${JSON.stringify(curr)})`);
|
|
1661
|
-
else {
|
|
1662
|
-
const newSnapshot = JSON.stringify(curr);
|
|
1663
|
-
if (devMigrations.snapshot !== newSnapshot) await provider.execute(drizzle_orm.sql`UPDATE "drizzle"."__drizzle_dev_migrations" SET "snapshot" = ${newSnapshot} WHERE "id" = ${devMigrations.id}`);
|
|
1664
|
-
}
|
|
1665
|
-
}
|
|
1666
|
-
async executeStatements(statements, provider, catchErrors = false) {
|
|
1667
|
-
let nErrors = 0;
|
|
1668
|
-
for (const statement of statements) {
|
|
1669
|
-
if (statement.startsWith("DROP SCHEMA")) continue;
|
|
1670
|
-
try {
|
|
1671
|
-
await provider.execute(drizzle_orm.sql.raw(statement));
|
|
1672
|
-
} catch (error) {
|
|
1673
|
-
const errorMessage = `Error executing statement: ${statement}`;
|
|
1674
|
-
if (catchErrors) {
|
|
1675
|
-
nErrors++;
|
|
1676
|
-
this.log.warn(errorMessage, { context: [error] });
|
|
1677
|
-
} else throw error;
|
|
1678
|
-
}
|
|
1679
|
-
}
|
|
1680
|
-
if (nErrors > 0) this.log.warn(`Executed ${statements.length} statements with ${nErrors} errors.`);
|
|
1681
|
-
}
|
|
1682
|
-
async createSchemaIfNotExists(provider, schemaName) {
|
|
1683
|
-
if (!/^[a-z0-9_]+$/i.test(schemaName)) throw new Error(`Invalid schema name: ${schemaName}. Must only contain alphanumeric characters and underscores.`);
|
|
1684
|
-
const sqlSchema = drizzle_orm.sql.raw(schemaName);
|
|
1685
|
-
if (schemaName.startsWith("test_")) {
|
|
1686
|
-
this.log.info(`Drop test schema '${schemaName}' ...`, schemaName);
|
|
1687
|
-
await provider.execute(drizzle_orm.sql`DROP SCHEMA IF EXISTS ${sqlSchema} CASCADE`);
|
|
1688
|
-
}
|
|
1689
|
-
this.log.debug(`Ensuring schema '${schemaName}' exists`);
|
|
1690
|
-
await provider.execute(drizzle_orm.sql`CREATE SCHEMA IF NOT EXISTS ${sqlSchema}`);
|
|
1691
|
-
}
|
|
1692
|
-
/**
|
|
1693
|
-
* Try to load the official Drizzle Kit API.
|
|
1694
|
-
* If not available, fallback to the local kit import.
|
|
1695
|
-
*/
|
|
1696
|
-
importDrizzleKit() {
|
|
1697
|
-
try {
|
|
1698
|
-
return (0, node_module.createRequire)(require("url").pathToFileURL(__filename).href)("drizzle-kit/api");
|
|
1699
|
-
} catch (_) {
|
|
1700
|
-
throw new Error("Drizzle Kit is not installed. Please install it with `npm install -D drizzle-kit`.");
|
|
1701
|
-
}
|
|
1702
|
-
}
|
|
1703
|
-
};
|
|
1704
|
-
const devMigrationsSchema = alepha.t.object({
|
|
1705
|
-
id: alepha.t.number(),
|
|
1706
|
-
name: alepha.t.text(),
|
|
1707
|
-
snapshot: alepha.t.string(),
|
|
1708
|
-
created_at: alepha.t.string()
|
|
1709
|
-
});
|
|
1710
|
-
|
|
1711
|
-
//#endregion
|
|
1712
|
-
//#region src/orm/errors/DbMigrationError.ts
|
|
1713
|
-
var DbMigrationError = class extends DbError {
|
|
1714
|
-
name = "DbMigrationError";
|
|
1715
|
-
constructor(cause) {
|
|
1716
|
-
super("Failed to migrate database", cause);
|
|
1717
|
-
}
|
|
1718
|
-
};
|
|
1719
|
-
|
|
1720
|
-
//#endregion
|
|
1721
|
-
//#region src/orm/types/byte.ts
|
|
1722
|
-
/**
|
|
1723
|
-
* Postgres bytea type.
|
|
1724
|
-
*/
|
|
1725
|
-
const byte = (0, drizzle_orm_pg_core.customType)({ dataType: () => "bytea" });
|
|
1726
|
-
|
|
1727
|
-
//#endregion
|
|
1728
|
-
//#region src/orm/services/ModelBuilder.ts
|
|
1729
|
-
/**
|
|
1730
|
-
* Abstract base class for transforming Alepha Descriptors (Entity, Sequence, etc...)
|
|
1731
|
-
* into drizzle models (tables, enums, sequences, etc...).
|
|
1732
|
-
*/
|
|
1733
|
-
var ModelBuilder = class {
|
|
1734
|
-
/**
|
|
1735
|
-
* Convert camelCase to snake_case for column names.
|
|
1736
|
-
*/
|
|
1737
|
-
toColumnName(str) {
|
|
1738
|
-
return str[0].toLowerCase() + str.slice(1).replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
|
|
1739
|
-
}
|
|
1740
|
-
/**
|
|
1741
|
-
* Build the table configuration function for any database.
|
|
1742
|
-
* This includes indexes, foreign keys, constraints, and custom config.
|
|
1743
|
-
*
|
|
1744
|
-
* @param entity - The entity descriptor
|
|
1745
|
-
* @param builders - Database-specific builder functions
|
|
1746
|
-
* @param tableResolver - Function to resolve entity references to table columns
|
|
1747
|
-
* @param customConfigHandler - Optional handler for custom config
|
|
1748
|
-
*/
|
|
1749
|
-
buildTableConfig(entity, builders, tableResolver, customConfigHandler) {
|
|
1750
|
-
if (!entity.options.indexes && !entity.options.foreignKeys && !entity.options.constraints && !entity.options.config) return;
|
|
1751
|
-
return (self) => {
|
|
1752
|
-
const configs = [];
|
|
1753
|
-
if (entity.options.indexes) {
|
|
1754
|
-
for (const indexDef of entity.options.indexes) if (typeof indexDef === "string") {
|
|
1755
|
-
const columnName = this.toColumnName(indexDef);
|
|
1756
|
-
const indexName = `${entity.name}_${columnName}_idx`;
|
|
1757
|
-
if (self[indexDef]) configs.push(builders.index(indexName).on(self[indexDef]));
|
|
1758
|
-
} else if (typeof indexDef === "object" && indexDef !== null) {
|
|
1759
|
-
if ("column" in indexDef) {
|
|
1760
|
-
const columnName = this.toColumnName(indexDef.column);
|
|
1761
|
-
const indexName = indexDef.name || `${entity.name}_${columnName}_idx`;
|
|
1762
|
-
if (self[indexDef.column]) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(self[indexDef.column]));
|
|
1763
|
-
else configs.push(builders.index(indexName).on(self[indexDef.column]));
|
|
1764
|
-
} else if ("columns" in indexDef) {
|
|
1765
|
-
const columnNames = indexDef.columns.map((col) => this.toColumnName(col));
|
|
1766
|
-
const indexName = indexDef.name || `${entity.name}_${columnNames.join("_")}_idx`;
|
|
1767
|
-
const cols = indexDef.columns.map((col) => self[col]).filter(Boolean);
|
|
1768
|
-
if (cols.length === indexDef.columns.length) if (indexDef.unique) configs.push(builders.uniqueIndex(indexName).on(...cols));
|
|
1769
|
-
else configs.push(builders.index(indexName).on(...cols));
|
|
1770
|
-
}
|
|
1771
|
-
}
|
|
1772
|
-
}
|
|
1773
|
-
if (entity.options.foreignKeys) for (const fkDef of entity.options.foreignKeys) {
|
|
1774
|
-
const columnNames = fkDef.columns.map((col) => this.toColumnName(col));
|
|
1775
|
-
const cols = fkDef.columns.map((col) => self[col]).filter(Boolean);
|
|
1776
|
-
if (cols.length === fkDef.columns.length) {
|
|
1777
|
-
const fkName = fkDef.name || `${entity.name}_${columnNames.join("_")}_fk`;
|
|
1778
|
-
const foreignColumns = fkDef.foreignColumns.map((colRef) => {
|
|
1779
|
-
const entityCol = colRef();
|
|
1780
|
-
if (!entityCol || !entityCol.entity || !entityCol.name) throw new Error(`Invalid foreign column reference in ${entity.name}`);
|
|
1781
|
-
if (tableResolver) {
|
|
1782
|
-
const foreignTable = tableResolver(entityCol.entity.name);
|
|
1783
|
-
if (!foreignTable) throw new Error(`Foreign table ${entityCol.entity.name} not found for ${entity.name}`);
|
|
1784
|
-
return foreignTable[entityCol.name];
|
|
1785
|
-
}
|
|
1786
|
-
return entityCol;
|
|
1787
|
-
});
|
|
1788
|
-
configs.push(builders.foreignKey({
|
|
1789
|
-
name: fkName,
|
|
1790
|
-
columns: cols,
|
|
1791
|
-
foreignColumns
|
|
1792
|
-
}));
|
|
1793
|
-
}
|
|
1794
|
-
}
|
|
1795
|
-
if (entity.options.constraints) for (const constraintDef of entity.options.constraints) {
|
|
1796
|
-
const columnNames = constraintDef.columns.map((col) => this.toColumnName(col));
|
|
1797
|
-
const cols = constraintDef.columns.map((col) => self[col]).filter(Boolean);
|
|
1798
|
-
if (cols.length === constraintDef.columns.length) {
|
|
1799
|
-
if (constraintDef.unique) {
|
|
1800
|
-
const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_unique`;
|
|
1801
|
-
configs.push(builders.unique(constraintName).on(...cols));
|
|
1802
|
-
}
|
|
1803
|
-
if (constraintDef.check) {
|
|
1804
|
-
const constraintName = constraintDef.name || `${entity.name}_${columnNames.join("_")}_check`;
|
|
1805
|
-
configs.push(builders.check(constraintName, constraintDef.check));
|
|
1806
|
-
}
|
|
1807
|
-
}
|
|
1808
|
-
}
|
|
1809
|
-
if (entity.options.config && customConfigHandler) configs.push(...customConfigHandler(entity.options.config, self));
|
|
1810
|
-
else if (entity.options.config) {
|
|
1811
|
-
const customConfigs = entity.options.config(self);
|
|
1812
|
-
if (Array.isArray(customConfigs)) configs.push(...customConfigs);
|
|
1813
|
-
}
|
|
1814
|
-
return configs;
|
|
1815
|
-
};
|
|
1816
|
-
}
|
|
1817
|
-
};
|
|
1818
|
-
|
|
1819
|
-
//#endregion
|
|
1820
|
-
//#region src/orm/services/PostgresModelBuilder.ts
|
|
1821
|
-
var PostgresModelBuilder = class extends ModelBuilder {
|
|
1822
|
-
schemas = /* @__PURE__ */ new Map();
|
|
1823
|
-
getPgSchema(name) {
|
|
1824
|
-
if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, (0, drizzle_orm_pg_core.pgSchema)(name));
|
|
1825
|
-
const nsp = name !== "public" ? this.schemas.get(name) : {
|
|
1826
|
-
enum: drizzle_orm_pg_core.pgEnum,
|
|
1827
|
-
table: drizzle_orm_pg_core.pgTable
|
|
1828
|
-
};
|
|
1829
|
-
if (!nsp) throw new alepha.AlephaError(`Postgres schema ${name} not found`);
|
|
1830
|
-
return nsp;
|
|
1831
|
-
}
|
|
1832
|
-
buildTable(entity, options) {
|
|
1833
|
-
const tableName = entity.name;
|
|
1834
|
-
if (options.tables.has(tableName)) return;
|
|
1835
|
-
const nsp = this.getPgSchema(options.schema);
|
|
1836
|
-
const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
|
|
1837
|
-
const configFn = this.getTableConfig(entity, options.tables);
|
|
1838
|
-
const table = nsp.table(tableName, columns, configFn);
|
|
1839
|
-
options.tables.set(tableName, table);
|
|
1840
|
-
}
|
|
1841
|
-
buildSequence(sequence, options) {
|
|
1842
|
-
const sequenceName = sequence.name;
|
|
1843
|
-
if (options.sequences.has(sequenceName)) return;
|
|
1844
|
-
const nsp = this.getPgSchema(options.schema);
|
|
1845
|
-
options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
|
|
1846
|
-
}
|
|
1847
|
-
/**
|
|
1848
|
-
* Get PostgreSQL-specific config builder for the table.
|
|
1849
|
-
*/
|
|
1850
|
-
getTableConfig(entity, tables) {
|
|
1851
|
-
const pgBuilders = {
|
|
1852
|
-
index: drizzle_orm_pg_core.index,
|
|
1853
|
-
uniqueIndex: drizzle_orm_pg_core.uniqueIndex,
|
|
1854
|
-
unique: drizzle_orm_pg_core.unique,
|
|
1855
|
-
check: drizzle_orm_pg_core.check,
|
|
1856
|
-
foreignKey: drizzle_orm_pg_core.foreignKey
|
|
1857
|
-
};
|
|
1858
|
-
const tableResolver = (entityName) => {
|
|
1859
|
-
return tables.get(entityName);
|
|
1860
|
-
};
|
|
1861
|
-
return this.buildTableConfig(entity, pgBuilders, tableResolver);
|
|
1862
|
-
}
|
|
1863
|
-
schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
|
|
1864
|
-
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
1865
|
-
let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
|
|
1866
|
-
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
1867
|
-
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
1868
|
-
if (PG_REF in value) {
|
|
1869
|
-
const config = value[PG_REF];
|
|
1870
|
-
col = col.references(() => {
|
|
1871
|
-
const ref = config.ref();
|
|
1872
|
-
const table = tables.get(ref.entity.name);
|
|
1873
|
-
if (!table) throw new alepha.AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
1874
|
-
const target = table[ref.name];
|
|
1875
|
-
if (!target) throw new alepha.AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
1876
|
-
return target;
|
|
1877
|
-
}, config.actions);
|
|
1878
|
-
}
|
|
1879
|
-
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
1880
|
-
return {
|
|
1881
|
-
...columns,
|
|
1882
|
-
[key]: col
|
|
1883
|
-
};
|
|
1884
|
-
}, {});
|
|
1885
|
-
};
|
|
1886
|
-
mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
|
|
1887
|
-
const key = this.toColumnName(fieldName);
|
|
1888
|
-
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => alepha.t.schema.isNull(it))) value = value.anyOf.find((it) => !alepha.t.schema.isNull(it));
|
|
1889
|
-
if (alepha.t.schema.isInteger(value)) {
|
|
1890
|
-
if (PG_SERIAL in value) return drizzle_orm_pg_core.serial(key);
|
|
1891
|
-
if (PG_IDENTITY in value) {
|
|
1892
|
-
const options = value[PG_IDENTITY];
|
|
1893
|
-
if (options.mode === "byDefault") return drizzle_orm_pg_core.integer().generatedByDefaultAsIdentity(options);
|
|
1894
|
-
return drizzle_orm_pg_core.integer().generatedAlwaysAsIdentity(options);
|
|
1895
|
-
}
|
|
1896
|
-
return drizzle_orm_pg_core.integer(key);
|
|
1897
|
-
}
|
|
1898
|
-
if (alepha.t.schema.isBigInt(value)) {
|
|
1899
|
-
if (PG_IDENTITY in value) {
|
|
1900
|
-
const options = value[PG_IDENTITY];
|
|
1901
|
-
if (options.mode === "byDefault") return drizzle_orm_pg_core.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
|
|
1902
|
-
return drizzle_orm_pg_core.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
|
|
1903
|
-
}
|
|
1904
|
-
}
|
|
1905
|
-
if (alepha.t.schema.isNumber(value)) {
|
|
1906
|
-
if (PG_IDENTITY in value) {
|
|
1907
|
-
const options = value[PG_IDENTITY];
|
|
1908
|
-
if (options.mode === "byDefault") return drizzle_orm_pg_core.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
|
|
1909
|
-
return drizzle_orm_pg_core.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
|
|
1910
|
-
}
|
|
1911
|
-
if (value.format === "int64") return drizzle_orm_pg_core.bigint(key, { mode: "number" });
|
|
1912
|
-
return drizzle_orm_pg_core.numeric(key);
|
|
1913
|
-
}
|
|
1914
|
-
if (alepha.t.schema.isString(value)) return this.mapStringToColumn(key, value);
|
|
1915
|
-
if (alepha.t.schema.isBoolean(value)) return drizzle_orm_pg_core.boolean(key);
|
|
1916
|
-
if (alepha.t.schema.isObject(value)) return schema(key, value);
|
|
1917
|
-
if (alepha.t.schema.isRecord(value)) return schema(key, value);
|
|
1918
|
-
if (alepha.t.schema.isArray(value)) {
|
|
1919
|
-
if (alepha.t.schema.isObject(value.items)) return schema(key, value);
|
|
1920
|
-
if (alepha.t.schema.isRecord(value.items)) return schema(key, value);
|
|
1921
|
-
if (alepha.t.schema.isString(value.items)) return drizzle_orm_pg_core.text(key).array();
|
|
1922
|
-
if (alepha.t.schema.isInteger(value.items)) return drizzle_orm_pg_core.integer(key).array();
|
|
1923
|
-
if (alepha.t.schema.isNumber(value.items)) return drizzle_orm_pg_core.numeric(key).array();
|
|
1924
|
-
if (alepha.t.schema.isBoolean(value.items)) return drizzle_orm_pg_core.boolean(key).array();
|
|
1925
|
-
}
|
|
1926
|
-
if (alepha.t.schema.isUnsafe(value) && "type" in value && value.type === "string" && "enum" in value && Array.isArray(value.enum)) {
|
|
1927
|
-
if (!value.enum.every((it) => typeof it === "string")) throw new alepha.AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
|
|
1928
|
-
if (PG_ENUM in value && value[PG_ENUM]) {
|
|
1929
|
-
const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
|
|
1930
|
-
if (enums.has(enumName)) {
|
|
1931
|
-
const values = enums.get(enumName).enumValues.join(",");
|
|
1932
|
-
const newValues = value.enum.join(",");
|
|
1933
|
-
if (values !== newValues) throw new alepha.AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
|
|
1934
|
-
}
|
|
1935
|
-
enums.set(enumName, nsp.enum(enumName, value.enum));
|
|
1936
|
-
return enums.get(enumName)(key);
|
|
1937
|
-
}
|
|
1938
|
-
return this.mapStringToColumn(key, value);
|
|
1939
|
-
}
|
|
1940
|
-
throw new alepha.AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
|
|
1941
|
-
};
|
|
1942
|
-
/**
|
|
1943
|
-
* Map a string to a PG column.
|
|
1944
|
-
*
|
|
1945
|
-
* @param key The key of the field.
|
|
1946
|
-
* @param value The value of the field.
|
|
1947
|
-
*/
|
|
1948
|
-
mapStringToColumn = (key, value) => {
|
|
1949
|
-
if ("format" in value) {
|
|
1950
|
-
if (value.format === "uuid") {
|
|
1951
|
-
if (PG_PRIMARY_KEY in value) return drizzle_orm_pg_core.uuid(key).defaultRandom();
|
|
1952
|
-
return drizzle_orm_pg_core.uuid(key);
|
|
1953
|
-
}
|
|
1954
|
-
if (value.format === "byte") return byte(key);
|
|
1955
|
-
if (value.format === "date-time") {
|
|
1956
|
-
if (PG_CREATED_AT in value) return drizzle_orm_pg_core.timestamp(key, {
|
|
1957
|
-
mode: "string",
|
|
1958
|
-
withTimezone: true
|
|
1959
|
-
}).defaultNow();
|
|
1960
|
-
if (PG_UPDATED_AT in value) return drizzle_orm_pg_core.timestamp(key, {
|
|
1961
|
-
mode: "string",
|
|
1962
|
-
withTimezone: true
|
|
1963
|
-
}).defaultNow();
|
|
1964
|
-
return drizzle_orm_pg_core.timestamp(key, {
|
|
1965
|
-
mode: "string",
|
|
1966
|
-
withTimezone: true
|
|
1967
|
-
});
|
|
1968
|
-
}
|
|
1969
|
-
if (value.format === "date") return drizzle_orm_pg_core.date(key, { mode: "string" });
|
|
1970
|
-
}
|
|
1971
|
-
return drizzle_orm_pg_core.text(key);
|
|
1972
|
-
};
|
|
1973
|
-
};
|
|
1974
|
-
|
|
1975
|
-
//#endregion
|
|
1976
|
-
//#region src/orm/providers/drivers/NodePostgresProvider.ts
|
|
1977
|
-
const envSchema$2 = alepha.t.object({
|
|
1978
|
-
DATABASE_URL: alepha.t.optional(alepha.t.text()),
|
|
1979
|
-
POSTGRES_SCHEMA: alepha.t.optional(alepha.t.text())
|
|
1980
|
-
});
|
|
1981
|
-
var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
|
|
1982
|
-
static SSL_MODES = [
|
|
1983
|
-
"require",
|
|
1984
|
-
"allow",
|
|
1985
|
-
"prefer",
|
|
1986
|
-
"verify-full"
|
|
1987
|
-
];
|
|
1988
|
-
log = (0, alepha_logger.$logger)();
|
|
1989
|
-
env = (0, alepha.$env)(envSchema$2);
|
|
1990
|
-
kit = (0, alepha.$inject)(DrizzleKitProvider);
|
|
1991
|
-
builder = (0, alepha.$inject)(PostgresModelBuilder);
|
|
1992
|
-
client;
|
|
1993
|
-
pg;
|
|
1994
|
-
dialect = "postgresql";
|
|
1995
|
-
get name() {
|
|
1996
|
-
return "postgres";
|
|
1997
|
-
}
|
|
1998
|
-
/**
|
|
1999
|
-
* In testing mode, the schema name will be generated and deleted after the test.
|
|
2000
|
-
*/
|
|
2001
|
-
schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
|
|
2002
|
-
get url() {
|
|
2003
|
-
if (!this.env.DATABASE_URL) throw new alepha.AlephaError("DATABASE_URL is not defined in the environment");
|
|
2004
|
-
return this.env.DATABASE_URL;
|
|
2005
|
-
}
|
|
2006
|
-
/**
|
|
2007
|
-
* Execute a SQL statement.
|
|
2008
|
-
*/
|
|
2009
|
-
execute(statement) {
|
|
2010
|
-
try {
|
|
2011
|
-
return this.db.execute(statement);
|
|
2012
|
-
} catch (error) {
|
|
2013
|
-
throw new DbError("Error executing statement", error);
|
|
2014
|
-
}
|
|
2015
|
-
}
|
|
2016
|
-
/**
|
|
2017
|
-
* Get Postgres schema used by this provider.
|
|
2018
|
-
*/
|
|
2019
|
-
get schema() {
|
|
2020
|
-
if (this.schemaForTesting) return this.schemaForTesting;
|
|
2021
|
-
if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
|
|
2022
|
-
return "public";
|
|
2023
|
-
}
|
|
2024
|
-
/**
|
|
2025
|
-
* Get the Drizzle Postgres database instance.
|
|
2026
|
-
*/
|
|
2027
|
-
get db() {
|
|
2028
|
-
if (!this.pg) throw new alepha.AlephaError("Database not initialized");
|
|
2029
|
-
return this.pg;
|
|
2030
|
-
}
|
|
2031
|
-
async executeMigrations(migrationsFolder) {
|
|
2032
|
-
await (0, drizzle_orm_postgres_js_migrator.migrate)(this.db, { migrationsFolder });
|
|
2033
|
-
}
|
|
2034
|
-
onStart = (0, alepha.$hook)({
|
|
2035
|
-
on: "start",
|
|
2036
|
-
handler: async () => {
|
|
2037
|
-
await this.connect();
|
|
2038
|
-
if (!this.alepha.isServerless()) try {
|
|
2039
|
-
await this.migrate.run();
|
|
2040
|
-
} catch (error) {
|
|
2041
|
-
throw new DbMigrationError(error);
|
|
2042
|
-
}
|
|
2043
|
-
}
|
|
2044
|
-
});
|
|
2045
|
-
onStop = (0, alepha.$hook)({
|
|
2046
|
-
on: "stop",
|
|
2047
|
-
handler: async () => {
|
|
2048
|
-
if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
|
|
2049
|
-
if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new alepha.AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
|
|
2050
|
-
this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
|
|
2051
|
-
await this.execute(drizzle_orm.sql`DROP SCHEMA IF EXISTS ${drizzle_orm.sql.raw(this.schemaForTesting)} CASCADE`);
|
|
2052
|
-
this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
|
|
2053
|
-
}
|
|
2054
|
-
await this.close();
|
|
2055
|
-
}
|
|
2056
|
-
});
|
|
2057
|
-
async connect() {
|
|
2058
|
-
this.log.debug("Connect ..");
|
|
2059
|
-
const client = (0, postgres.default)(this.getClientOptions());
|
|
2060
|
-
await client`SELECT 1`;
|
|
2061
|
-
this.client = client;
|
|
2062
|
-
this.pg = (0, drizzle_orm_postgres_js.drizzle)(client, { logger: { logQuery: (query, params) => {
|
|
2063
|
-
this.log.trace(query, { params });
|
|
2064
|
-
} } });
|
|
2065
|
-
this.log.info("Connection OK");
|
|
2066
|
-
}
|
|
2067
|
-
async close() {
|
|
2068
|
-
if (this.client) {
|
|
2069
|
-
this.log.debug("Close...");
|
|
2070
|
-
await this.client.end();
|
|
2071
|
-
this.client = void 0;
|
|
2072
|
-
this.pg = void 0;
|
|
2073
|
-
this.log.info("Connection closed");
|
|
2074
|
-
}
|
|
2075
|
-
}
|
|
2076
|
-
migrate = (0, alepha_lock.$lock)({ handler: async () => {
|
|
2077
|
-
await this.migrateDatabase();
|
|
2078
|
-
} });
|
|
2079
|
-
/**
|
|
2080
|
-
* Map the DATABASE_URL to postgres client options.
|
|
2081
|
-
*/
|
|
2082
|
-
getClientOptions() {
|
|
2083
|
-
const url = new URL(this.url);
|
|
2084
|
-
return {
|
|
2085
|
-
host: url.hostname,
|
|
2086
|
-
user: decodeURIComponent(url.username),
|
|
2087
|
-
database: decodeURIComponent(url.pathname.replace("/", "")),
|
|
2088
|
-
password: decodeURIComponent(url.password),
|
|
2089
|
-
port: Number(url.port || 5432),
|
|
2090
|
-
ssl: this.ssl(url),
|
|
2091
|
-
onnotice: () => {}
|
|
2092
|
-
};
|
|
2093
|
-
}
|
|
2094
|
-
ssl(url) {
|
|
2095
|
-
const mode = url.searchParams.get("sslmode");
|
|
2096
|
-
for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
|
|
2097
|
-
}
|
|
2098
|
-
/**
|
|
2099
|
-
* For testing purposes, generate a unique schema name.
|
|
2100
|
-
* The schema name will be generated based on the current date and time.
|
|
2101
|
-
* It will be in the format of `test_YYYYMMDD_HHMMSS_randomSuffix`.
|
|
2102
|
-
*/
|
|
2103
|
-
generateTestSchemaName() {
|
|
2104
|
-
const pad = (n) => n.toString().padStart(2, "0");
|
|
2105
|
-
const now = /* @__PURE__ */ new Date();
|
|
2106
|
-
return `test_${`${now.getUTCFullYear()}${pad(now.getUTCMonth() + 1)}${pad(now.getUTCDate())}_${pad(now.getUTCHours())}${pad(now.getUTCMinutes())}${pad(now.getUTCSeconds())}`}_${Math.random().toString(36).slice(2, 6)}`;
|
|
2107
|
-
}
|
|
2108
|
-
};
|
|
2109
|
-
|
|
2110
|
-
//#endregion
|
|
2111
|
-
//#region src/orm/services/SqliteModelBuilder.ts
|
|
2112
|
-
var SqliteModelBuilder = class extends ModelBuilder {
|
|
2113
|
-
buildTable(entity, options) {
|
|
2114
|
-
const tableName = entity.name;
|
|
2115
|
-
if (options.tables.has(tableName)) return;
|
|
2116
|
-
const table = (0, drizzle_orm_sqlite_core.sqliteTable)(tableName, this.schemaToSqliteColumns(tableName, entity.schema, options.enums, options.tables), this.getTableConfig(entity, options.tables));
|
|
2117
|
-
options.tables.set(tableName, table);
|
|
2118
|
-
}
|
|
2119
|
-
buildSequence(sequence, options) {
|
|
2120
|
-
throw new alepha.AlephaError("SQLite does not support sequences");
|
|
2121
|
-
}
|
|
2122
|
-
/**
|
|
2123
|
-
* Get SQLite-specific config builder for the table.
|
|
2124
|
-
*/
|
|
2125
|
-
getTableConfig(entity, tables) {
|
|
2126
|
-
const sqliteBuilders = {
|
|
2127
|
-
index: drizzle_orm_sqlite_core.index,
|
|
2128
|
-
uniqueIndex: drizzle_orm_sqlite_core.uniqueIndex,
|
|
2129
|
-
unique: drizzle_orm_sqlite_core.unique,
|
|
2130
|
-
check: drizzle_orm_sqlite_core.check,
|
|
2131
|
-
foreignKey: drizzle_orm_sqlite_core.foreignKey
|
|
2132
|
-
};
|
|
2133
|
-
const tableResolver = (entityName) => {
|
|
2134
|
-
return tables.get(entityName);
|
|
2135
|
-
};
|
|
2136
|
-
return this.buildTableConfig(entity, sqliteBuilders, tableResolver, (config, self) => {
|
|
2137
|
-
const customConfigs = config(self);
|
|
2138
|
-
return Array.isArray(customConfigs) ? customConfigs : [];
|
|
2139
|
-
});
|
|
2140
|
-
}
|
|
2141
|
-
schemaToSqliteColumns = (tableName, schema$1, enums, tables) => {
|
|
2142
|
-
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
2143
|
-
let col = this.mapFieldToSqliteColumn(tableName, key, value, enums);
|
|
2144
|
-
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
2145
|
-
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
2146
|
-
if (PG_REF in value) {
|
|
2147
|
-
const config = value[PG_REF];
|
|
2148
|
-
col = col.references(() => {
|
|
2149
|
-
const ref = config.ref();
|
|
2150
|
-
const table = tables.get(ref.entity.name);
|
|
2151
|
-
if (!table) throw new alepha.AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
2152
|
-
const target = table[ref.name];
|
|
2153
|
-
if (!target) throw new alepha.AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
2154
|
-
return target;
|
|
2155
|
-
}, config.actions);
|
|
2156
|
-
}
|
|
2157
|
-
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
2158
|
-
return {
|
|
2159
|
-
...columns,
|
|
2160
|
-
[key]: col
|
|
2161
|
-
};
|
|
2162
|
-
}, {});
|
|
2163
|
-
};
|
|
2164
|
-
mapFieldToSqliteColumn = (tableName, fieldName, value, enums) => {
|
|
2165
|
-
const key = this.toColumnName(fieldName);
|
|
2166
|
-
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => alepha.t.schema.isNull(it))) value = value.anyOf.find((it) => !alepha.t.schema.isNull(it));
|
|
2167
|
-
if (alepha.t.schema.isInteger(value)) {
|
|
2168
|
-
if (PG_SERIAL in value || PG_IDENTITY in value) return drizzle_orm_sqlite_core.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
2169
|
-
return drizzle_orm_sqlite_core.integer(key);
|
|
2170
|
-
}
|
|
2171
|
-
if (alepha.t.schema.isNumber(value)) {
|
|
2172
|
-
if (PG_IDENTITY in value) return drizzle_orm_sqlite_core.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
2173
|
-
return drizzle_orm_sqlite_core.numeric(key);
|
|
2174
|
-
}
|
|
2175
|
-
if (alepha.t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
|
|
2176
|
-
if (alepha.t.schema.isBoolean(value)) return this.sqliteBool(key, value);
|
|
2177
|
-
if (alepha.t.schema.isObject(value)) return this.sqliteJson(key, value);
|
|
2178
|
-
if (alepha.t.schema.isRecord(value)) return this.sqliteJson(key, value);
|
|
2179
|
-
if (alepha.t.schema.isAny(value)) return this.sqliteJson(key, value);
|
|
2180
|
-
if (alepha.t.schema.isArray(value)) {
|
|
2181
|
-
if (alepha.t.schema.isObject(value.items)) return this.sqliteJson(key, value);
|
|
2182
|
-
if (alepha.t.schema.isRecord(value.items)) return this.sqliteJson(key, value);
|
|
2183
|
-
if (alepha.t.schema.isAny(value.items)) return this.sqliteJson(key, value);
|
|
2184
|
-
if (alepha.t.schema.isString(value.items)) return this.sqliteJson(key, value);
|
|
2185
|
-
if (alepha.t.schema.isInteger(value.items)) return this.sqliteJson(key, value);
|
|
2186
|
-
if (alepha.t.schema.isNumber(value.items)) return this.sqliteJson(key, value);
|
|
2187
|
-
if (alepha.t.schema.isBoolean(value.items)) return this.sqliteJson(key, value);
|
|
2188
|
-
}
|
|
2189
|
-
if (alepha.t.schema.isUnsafe(value) && "type" in value && value.type === "string") return this.mapStringToSqliteColumn(key, value);
|
|
2190
|
-
throw new Error(`Unsupported schema for field '${tableName}.${fieldName}' (schema: ${JSON.stringify(value)})`);
|
|
2191
|
-
};
|
|
2192
|
-
mapStringToSqliteColumn = (key, value) => {
|
|
2193
|
-
if (value.format === "uuid") {
|
|
2194
|
-
if (PG_PRIMARY_KEY in value) return drizzle_orm_sqlite_core.text(key).primaryKey().$defaultFn(() => (0, node_crypto.randomUUID)());
|
|
2195
|
-
return drizzle_orm_sqlite_core.text(key);
|
|
2196
|
-
}
|
|
2197
|
-
if (value.format === "byte") return this.sqliteJson(key, value);
|
|
2198
|
-
if (value.format === "date-time") {
|
|
2199
|
-
if (PG_CREATED_AT in value) return this.sqliteDateTime(key, {}).default(drizzle_orm.sql`(unixepoch('subsec') * 1000)`);
|
|
2200
|
-
if (PG_UPDATED_AT in value) return this.sqliteDateTime(key, {}).default(drizzle_orm.sql`(unixepoch('subsec') * 1000)`);
|
|
2201
|
-
return this.sqliteDateTime(key, {});
|
|
2202
|
-
}
|
|
2203
|
-
if (value.format === "date") return this.sqliteDate(key, {});
|
|
2204
|
-
return drizzle_orm_sqlite_core.text(key);
|
|
2205
|
-
};
|
|
2206
|
-
sqliteJson = (name, document) => drizzle_orm_sqlite_core.customType({
|
|
2207
|
-
dataType: () => "text",
|
|
2208
|
-
toDriver: (value) => JSON.stringify(value),
|
|
2209
|
-
fromDriver: (value) => {
|
|
2210
|
-
return value && typeof value === "string" ? JSON.parse(value) : value;
|
|
2211
|
-
}
|
|
2212
|
-
})(name, { document }).$type();
|
|
2213
|
-
sqliteDateTime = drizzle_orm_sqlite_core.customType({
|
|
2214
|
-
dataType: () => "integer",
|
|
2215
|
-
toDriver: (value) => new Date(value).getTime(),
|
|
2216
|
-
fromDriver: (value) => {
|
|
2217
|
-
return new Date(value).toISOString();
|
|
2218
|
-
}
|
|
2219
|
-
});
|
|
2220
|
-
sqliteBool = drizzle_orm_sqlite_core.customType({
|
|
2221
|
-
dataType: () => "integer",
|
|
2222
|
-
toDriver: (value) => value ? 1 : 0,
|
|
2223
|
-
fromDriver: (value) => value === 1
|
|
2224
|
-
});
|
|
2225
|
-
sqliteDate = drizzle_orm_sqlite_core.customType({
|
|
2226
|
-
dataType: () => "integer",
|
|
2227
|
-
toDriver: (value) => new Date(value).getTime(),
|
|
2228
|
-
fromDriver: (value) => {
|
|
2229
|
-
return new Date(value).toISOString().split("T")[0];
|
|
2230
|
-
}
|
|
2231
|
-
});
|
|
2232
|
-
};
|
|
2233
|
-
|
|
2234
|
-
//#endregion
|
|
2235
|
-
//#region src/orm/providers/drivers/NodeSqliteProvider.ts
|
|
2236
|
-
const envSchema$1 = alepha.t.object({ DATABASE_URL: alepha.t.optional(alepha.t.text()) });
|
|
2237
|
-
/**
|
|
2238
|
-
* Configuration options for the Node.js SQLite database provider.
|
|
2239
|
-
*/
|
|
2240
|
-
const nodeSqliteOptions = (0, alepha.$atom)({
|
|
2241
|
-
name: "alepha.postgres.node-sqlite.options",
|
|
2242
|
-
schema: alepha.t.object({ path: alepha.t.optional(alepha.t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
|
|
2243
|
-
default: {}
|
|
2244
|
-
});
|
|
2245
|
-
/**
|
|
2246
|
-
* Add a fake support for SQLite in Node.js based on Postgres interfaces.
|
|
2247
|
-
*
|
|
2248
|
-
* This is NOT a real SQLite provider, it's a workaround to use SQLite with Drizzle ORM.
|
|
2249
|
-
* This is NOT recommended for production use.
|
|
2250
|
-
*/
|
|
2251
|
-
var NodeSqliteProvider = class extends DatabaseProvider {
|
|
2252
|
-
kit = (0, alepha.$inject)(DrizzleKitProvider);
|
|
2253
|
-
log = (0, alepha_logger.$logger)();
|
|
2254
|
-
env = (0, alepha.$env)(envSchema$1);
|
|
2255
|
-
builder = (0, alepha.$inject)(SqliteModelBuilder);
|
|
2256
|
-
options = (0, alepha.$use)(nodeSqliteOptions);
|
|
2257
|
-
sqlite;
|
|
2258
|
-
get name() {
|
|
2259
|
-
return "sqlite";
|
|
2260
|
-
}
|
|
2261
|
-
dialect = "sqlite";
|
|
2262
|
-
get url() {
|
|
2263
|
-
const path = this.options.path ?? this.env.DATABASE_URL;
|
|
2264
|
-
if (path) {
|
|
2265
|
-
if (path.startsWith("postgres://")) throw new alepha.AlephaError("Postgres URL is not supported for SQLite provider.");
|
|
2266
|
-
return path;
|
|
2267
|
-
}
|
|
2268
|
-
if (this.alepha.isTest()) return ":memory:";
|
|
2269
|
-
else return "node_modules/sqlite.db";
|
|
2270
|
-
}
|
|
2271
|
-
async execute(query) {
|
|
2272
|
-
const { sql: sql$8, params, method } = this.db.all(query).getQuery();
|
|
2273
|
-
this.log.trace(`${sql$8}`, params);
|
|
2274
|
-
const statement = this.sqlite.prepare(sql$8);
|
|
2275
|
-
if (method === "run") {
|
|
2276
|
-
statement.run(...params);
|
|
2277
|
-
return [];
|
|
2278
|
-
}
|
|
2279
|
-
if (method === "get") {
|
|
2280
|
-
const data = statement.get(...params);
|
|
2281
|
-
return data ? [{ ...data }] : [];
|
|
2282
|
-
}
|
|
2283
|
-
return statement.all(...params);
|
|
2284
|
-
}
|
|
2285
|
-
db = (0, drizzle_orm_sqlite_proxy.drizzle)(async (sql$8, params, method) => {
|
|
2286
|
-
const statement = this.sqlite.prepare(sql$8);
|
|
2287
|
-
this.log.trace(`${sql$8}`, { params });
|
|
2288
|
-
if (method === "get") {
|
|
2289
|
-
const data = statement.get(...params);
|
|
2290
|
-
return { rows: data ? [{ ...data }] : [] };
|
|
2291
|
-
}
|
|
2292
|
-
if (method === "run") {
|
|
2293
|
-
statement.run(...params);
|
|
2294
|
-
return { rows: [] };
|
|
2295
|
-
}
|
|
2296
|
-
if (method === "all") return { rows: statement.all(...params).map((row) => Object.values(row)) };
|
|
2297
|
-
if (method === "values") return { rows: statement.all(...params).map((row) => Object.values(row)) };
|
|
2298
|
-
throw new alepha.AlephaError(`Unsupported method: ${method}`);
|
|
2299
|
-
});
|
|
2300
|
-
onStart = (0, alepha.$hook)({
|
|
2301
|
-
on: "start",
|
|
2302
|
-
handler: async () => {
|
|
2303
|
-
const { DatabaseSync } = await import("node:sqlite");
|
|
2304
|
-
const filepath = this.url.replace("sqlite://", "");
|
|
2305
|
-
if (filepath !== ":memory:" && filepath !== "") {
|
|
2306
|
-
const dirname = filepath.split("/").slice(0, -1).join("/");
|
|
2307
|
-
if (dirname) await (0, node_fs_promises.mkdir)(dirname, { recursive: true });
|
|
2308
|
-
}
|
|
2309
|
-
this.sqlite = new DatabaseSync(filepath);
|
|
2310
|
-
await this.migrateDatabase();
|
|
2311
|
-
this.log.info(`Using SQLite database at ${filepath}`);
|
|
2312
|
-
}
|
|
2313
|
-
});
|
|
2314
|
-
async executeMigrations(migrationsFolder) {
|
|
2315
|
-
await (0, drizzle_orm_sqlite_proxy_migrator.migrate)(this.db, async (migrationQueries) => {
|
|
2316
|
-
this.log.debug("Executing migration queries", { migrationQueries });
|
|
2317
|
-
for (const query of migrationQueries) this.sqlite.prepare(query).run();
|
|
2318
|
-
}, { migrationsFolder });
|
|
2319
|
-
}
|
|
2320
|
-
};
|
|
2321
|
-
|
|
2322
|
-
//#endregion
|
|
2323
|
-
//#region src/orm/providers/drivers/PglitePostgresProvider.ts
|
|
2324
|
-
const envSchema = alepha.t.object({ DATABASE_URL: alepha.t.optional(alepha.t.text()) });
|
|
2325
|
-
var PglitePostgresProvider = class PglitePostgresProvider extends DatabaseProvider {
|
|
2326
|
-
static importPglite() {
|
|
2327
|
-
try {
|
|
2328
|
-
return (0, node_module.createRequire)(require("url").pathToFileURL(__filename).href)("@electric-sql/pglite");
|
|
2329
|
-
} catch {}
|
|
2330
|
-
}
|
|
2331
|
-
env = (0, alepha.$env)(envSchema);
|
|
2332
|
-
log = (0, alepha_logger.$logger)();
|
|
2333
|
-
kit = (0, alepha.$inject)(DrizzleKitProvider);
|
|
2334
|
-
builder = (0, alepha.$inject)(PostgresModelBuilder);
|
|
2335
|
-
client;
|
|
2336
|
-
pglite;
|
|
2337
|
-
get name() {
|
|
2338
|
-
return "pglite";
|
|
2339
|
-
}
|
|
2340
|
-
dialect = "postgresql";
|
|
2341
|
-
get url() {
|
|
2342
|
-
let path = this.env.DATABASE_URL;
|
|
2343
|
-
if (!path) if (this.alepha.isTest()) path = ":memory:";
|
|
2344
|
-
else path = "node_modules/.db";
|
|
2345
|
-
else if (path.includes(":memory:")) path = ":memory:";
|
|
2346
|
-
else if (path.startsWith("file://")) path = path.replace("file://", "");
|
|
2347
|
-
return path;
|
|
2348
|
-
}
|
|
2349
|
-
get db() {
|
|
2350
|
-
if (!this.pglite) throw new alepha.AlephaError("Database not initialized");
|
|
2351
|
-
return this.pglite;
|
|
2352
|
-
}
|
|
2353
|
-
async execute(statement) {
|
|
2354
|
-
const { rows } = await this.db.execute(statement);
|
|
2355
|
-
return rows;
|
|
2356
|
-
}
|
|
2357
|
-
onStart = (0, alepha.$hook)({
|
|
2358
|
-
on: "start",
|
|
2359
|
-
handler: async () => {
|
|
2360
|
-
if (Object.keys(this.kit.getModels(this)).length === 0) return;
|
|
2361
|
-
const module$1 = PglitePostgresProvider.importPglite();
|
|
2362
|
-
if (!module$1) throw new alepha.AlephaError("@electric-sql/pglite is not installed. Please install it to use the pglite driver.");
|
|
2363
|
-
const { drizzle: drizzle$2 } = (0, node_module.createRequire)(require("url").pathToFileURL(__filename).href)("drizzle-orm/pglite");
|
|
2364
|
-
const path = this.url;
|
|
2365
|
-
if (path !== ":memory:") {
|
|
2366
|
-
await (0, node_fs_promises.mkdir)(path, { recursive: true }).catch(() => null);
|
|
2367
|
-
this.client = new module$1.PGlite(path);
|
|
2368
|
-
} else this.client = new module$1.PGlite();
|
|
2369
|
-
this.pglite = drizzle$2({ client: this.client });
|
|
2370
|
-
await this.migrateDatabase();
|
|
2371
|
-
this.log.info(`Using PGlite database at ${path}`);
|
|
2372
|
-
}
|
|
2373
|
-
});
|
|
2374
|
-
onStop = (0, alepha.$hook)({
|
|
2375
|
-
on: "stop",
|
|
2376
|
-
handler: async () => {
|
|
2377
|
-
if (this.client) {
|
|
2378
|
-
this.log.debug("Closing PGlite connection...");
|
|
2379
|
-
await this.client.close();
|
|
2380
|
-
this.client = void 0;
|
|
2381
|
-
this.pglite = void 0;
|
|
2382
|
-
this.log.info("PGlite connection closed");
|
|
2383
|
-
}
|
|
2384
|
-
}
|
|
2385
|
-
});
|
|
2386
|
-
async executeMigrations(migrationsFolder) {
|
|
2387
|
-
await (0, drizzle_orm_pglite_migrator.migrate)(this.db, { migrationsFolder });
|
|
2388
|
-
}
|
|
2389
|
-
};
|
|
2390
|
-
|
|
2391
|
-
//#endregion
|
|
2392
|
-
//#region src/orm/descriptors/$transaction.ts
|
|
2393
|
-
/**
|
|
2394
|
-
* Creates a transaction descriptor for database operations requiring atomicity and consistency.
|
|
2395
|
-
*
|
|
2396
|
-
* This descriptor provides a convenient way to wrap database operations in PostgreSQL
|
|
2397
|
-
* transactions, ensuring ACID properties and automatic retry logic for version conflicts.
|
|
2398
|
-
* It integrates seamlessly with the repository pattern and provides built-in handling
|
|
2399
|
-
* for optimistic locking scenarios with automatic retry on version mismatches.
|
|
2400
|
-
*
|
|
2401
|
-
* **Important Notes**:
|
|
2402
|
-
* - All operations within the transaction handler are atomic
|
|
2403
|
-
* - Automatic retry on `PgVersionMismatchError` for optimistic locking
|
|
2404
|
-
* - Pass `{ tx }` option to all repository operations within the transaction
|
|
2405
|
-
* - Transactions are automatically rolled back on any unhandled error
|
|
2406
|
-
* - Use appropriate isolation levels based on your consistency requirements
|
|
2407
|
-
*/
|
|
2408
|
-
const $transaction = (opts) => {
|
|
2409
|
-
const { alepha: alepha$1 } = (0, alepha.$context)();
|
|
2410
|
-
const provider = alepha$1.inject(DatabaseProvider);
|
|
2411
|
-
return (0, alepha_retry.$retry)({
|
|
2412
|
-
when: (err) => err instanceof DbVersionMismatchError,
|
|
2413
|
-
handler: (...args) => provider.db.transaction(async (tx) => opts.handler(tx, ...args), opts.config)
|
|
2414
|
-
});
|
|
2415
|
-
};
|
|
2416
|
-
|
|
2417
|
-
//#endregion
|
|
2418
|
-
//#region src/orm/helpers/parseQueryString.ts
|
|
2419
|
-
/**
|
|
2420
|
-
* Parse a string query into a PgQueryWhere object.
|
|
2421
|
-
*
|
|
2422
|
-
* Supported syntax:
|
|
2423
|
-
* - Simple equality: "name=John"
|
|
2424
|
-
* - Wildcard patterns: "name=John*" (startsWith), "name=*John" (endsWith), "name=*John*" (contains)
|
|
2425
|
-
* - Operators: "age>18", "age>=18", "age<65", "age<=65", "status!=active"
|
|
2426
|
-
* - NULL checks: "deletedAt=null", "email!=null"
|
|
2427
|
-
* - IN arrays: "status=[pending,active]"
|
|
2428
|
-
* - AND conditions: "name=John&age>18"
|
|
2429
|
-
* - OR conditions: "name=John|email=john@example.com"
|
|
2430
|
-
* - Nested AND/OR: "(name=John|name=Jane)&age>18"
|
|
2431
|
-
* - JSONB nested: "profile.city=Paris"
|
|
2432
|
-
*
|
|
2433
|
-
* @example
|
|
2434
|
-
* ```ts
|
|
2435
|
-
* // Simple equality
|
|
2436
|
-
* parseQueryString("name=John")
|
|
2437
|
-
* // => { name: { eq: "John" } }
|
|
2438
|
-
*
|
|
2439
|
-
* // Wildcard patterns
|
|
2440
|
-
* parseQueryString("name=John*") // startsWith
|
|
2441
|
-
* // => { name: { startsWith: "John" } }
|
|
2442
|
-
* parseQueryString("name=*Smith") // endsWith
|
|
2443
|
-
* // => { name: { endsWith: "Smith" } }
|
|
2444
|
-
* parseQueryString("name=*oh*") // contains
|
|
2445
|
-
* // => { name: { contains: "oh" } }
|
|
2446
|
-
*
|
|
2447
|
-
* // Multiple conditions
|
|
2448
|
-
* parseQueryString("name=John&age>18")
|
|
2449
|
-
* // => { and: [{ name: { eq: "John" } }, { age: { gt: 18 } }] }
|
|
2450
|
-
*
|
|
2451
|
-
* // OR conditions
|
|
2452
|
-
* parseQueryString("status=active|status=pending")
|
|
2453
|
-
* // => { or: [{ status: { eq: "active" } }, { status: { eq: "pending" } }] }
|
|
2454
|
-
*
|
|
2455
|
-
* // Complex nested
|
|
2456
|
-
* parseQueryString("(name=John|name=Jane)&age>18&status!=archived")
|
|
2457
|
-
* // => { and: [
|
|
2458
|
-
* // { or: [{ name: { eq: "John" } }, { name: { eq: "Jane" } }] },
|
|
2459
|
-
* // { age: { gt: 18 } },
|
|
2460
|
-
* // { status: { ne: "archived" } }
|
|
2461
|
-
* // ] }
|
|
2462
|
-
*
|
|
2463
|
-
* // JSONB nested query
|
|
2464
|
-
* parseQueryString("profile.city=Paris&profile.age>25")
|
|
2465
|
-
* // => { profile: { city: { eq: "Paris" }, age: { gt: 25 } } }
|
|
2466
|
-
* ```
|
|
2467
|
-
*/
|
|
2468
|
-
function parseQueryString(query) {
|
|
2469
|
-
if (!query || query.trim() === "") return {};
|
|
2470
|
-
return new QueryStringParser(query).parse();
|
|
2471
|
-
}
|
|
2472
|
-
var QueryStringParser = class {
|
|
2473
|
-
pos = 0;
|
|
2474
|
-
query;
|
|
2475
|
-
constructor(query) {
|
|
2476
|
-
this.query = query.trim();
|
|
2477
|
-
}
|
|
2478
|
-
parse() {
|
|
2479
|
-
return this.parseExpression();
|
|
2480
|
-
}
|
|
2481
|
-
parseExpression() {
|
|
2482
|
-
return this.parseOr();
|
|
2483
|
-
}
|
|
2484
|
-
parseOr() {
|
|
2485
|
-
const left = this.parseAnd();
|
|
2486
|
-
if (this.peek() === "|") {
|
|
2487
|
-
const conditions = [left];
|
|
2488
|
-
while (this.peek() === "|") {
|
|
2489
|
-
this.consume("|");
|
|
2490
|
-
conditions.push(this.parseAnd());
|
|
2491
|
-
}
|
|
2492
|
-
return { or: conditions };
|
|
2493
|
-
}
|
|
2494
|
-
return left;
|
|
2495
|
-
}
|
|
2496
|
-
parseAnd() {
|
|
2497
|
-
const left = this.parsePrimary();
|
|
2498
|
-
if (this.peek() === "&") {
|
|
2499
|
-
const conditions = [left];
|
|
2500
|
-
while (this.peek() === "&") {
|
|
2501
|
-
this.consume("&");
|
|
2502
|
-
conditions.push(this.parsePrimary());
|
|
2503
|
-
}
|
|
2504
|
-
return { and: conditions };
|
|
2505
|
-
}
|
|
2506
|
-
return left;
|
|
2507
|
-
}
|
|
2508
|
-
parsePrimary() {
|
|
2509
|
-
this.skipWhitespace();
|
|
2510
|
-
if (this.peek() === "(") {
|
|
2511
|
-
this.consume("(");
|
|
2512
|
-
const expr = this.parseExpression();
|
|
2513
|
-
this.consume(")");
|
|
2514
|
-
return expr;
|
|
2515
|
-
}
|
|
2516
|
-
return this.parseCondition();
|
|
2517
|
-
}
|
|
2518
|
-
parseCondition() {
|
|
2519
|
-
const field = this.parseFieldPath();
|
|
2520
|
-
this.skipWhitespace();
|
|
2521
|
-
const operator = this.parseOperator();
|
|
2522
|
-
this.skipWhitespace();
|
|
2523
|
-
const value = this.parseValue();
|
|
2524
|
-
if (value === "") throw new alepha.AlephaError(`Expected value for field '${field.join(".")}'`);
|
|
2525
|
-
return this.buildCondition(field, operator, value);
|
|
2526
|
-
}
|
|
2527
|
-
parseFieldPath() {
|
|
2528
|
-
const path = [];
|
|
2529
|
-
let current = "";
|
|
2530
|
-
while (this.pos < this.query.length) {
|
|
2531
|
-
const ch = this.query[this.pos];
|
|
2532
|
-
if (ch === "." && current) {
|
|
2533
|
-
path.push(current);
|
|
2534
|
-
current = "";
|
|
2535
|
-
this.pos++;
|
|
2536
|
-
continue;
|
|
2537
|
-
}
|
|
2538
|
-
if (ch === "=" || ch === "!" || ch === ">" || ch === "<" || ch === " ") break;
|
|
2539
|
-
current += ch;
|
|
2540
|
-
this.pos++;
|
|
2541
|
-
}
|
|
2542
|
-
if (current) path.push(current);
|
|
2543
|
-
return path;
|
|
2544
|
-
}
|
|
2545
|
-
parseOperator() {
|
|
2546
|
-
this.skipWhitespace();
|
|
2547
|
-
const remaining = this.query.slice(this.pos);
|
|
2548
|
-
if (remaining.startsWith(">=")) {
|
|
2549
|
-
this.pos += 2;
|
|
2550
|
-
return ">=";
|
|
2551
|
-
}
|
|
2552
|
-
if (remaining.startsWith("<=")) {
|
|
2553
|
-
this.pos += 2;
|
|
2554
|
-
return "<=";
|
|
2555
|
-
}
|
|
2556
|
-
if (remaining.startsWith("!=")) {
|
|
2557
|
-
this.pos += 2;
|
|
2558
|
-
return "!=";
|
|
2559
|
-
}
|
|
2560
|
-
const ch = this.query[this.pos];
|
|
2561
|
-
if (ch === "=" || ch === ">" || ch === "<") {
|
|
2562
|
-
this.pos++;
|
|
2563
|
-
return ch;
|
|
2564
|
-
}
|
|
2565
|
-
throw new Error(`Expected operator at position ${this.pos}`);
|
|
2566
|
-
}
|
|
2567
|
-
parseValue() {
|
|
2568
|
-
this.skipWhitespace();
|
|
2569
|
-
if (this.query.slice(this.pos, this.pos + 4).toLowerCase() === "null") {
|
|
2570
|
-
this.pos += 4;
|
|
2571
|
-
return null;
|
|
2572
|
-
}
|
|
2573
|
-
if (this.query[this.pos] === "[") return this.parseArray();
|
|
2574
|
-
if (this.query[this.pos] === "\"" || this.query[this.pos] === "'") return this.parseQuotedString();
|
|
2575
|
-
let value = "";
|
|
2576
|
-
while (this.pos < this.query.length) {
|
|
2577
|
-
const ch = this.query[this.pos];
|
|
2578
|
-
if (ch === "&" || ch === "|" || ch === ")") break;
|
|
2579
|
-
value += ch;
|
|
2580
|
-
this.pos++;
|
|
2581
|
-
}
|
|
2582
|
-
return this.coerceValue(value.trim());
|
|
2583
|
-
}
|
|
2584
|
-
parseArray() {
|
|
2585
|
-
this.consume("[");
|
|
2586
|
-
const values = [];
|
|
2587
|
-
while (this.pos < this.query.length && this.query[this.pos] !== "]") {
|
|
2588
|
-
this.skipWhitespace();
|
|
2589
|
-
if (this.query[this.pos] === "\"" || this.query[this.pos] === "'") values.push(this.parseQuotedString());
|
|
2590
|
-
else {
|
|
2591
|
-
let value = "";
|
|
2592
|
-
while (this.pos < this.query.length && this.query[this.pos] !== "," && this.query[this.pos] !== "]") {
|
|
2593
|
-
value += this.query[this.pos];
|
|
2594
|
-
this.pos++;
|
|
2595
|
-
}
|
|
2596
|
-
values.push(this.coerceValue(value.trim()));
|
|
2597
|
-
}
|
|
2598
|
-
this.skipWhitespace();
|
|
2599
|
-
if (this.query[this.pos] === ",") this.pos++;
|
|
2600
|
-
}
|
|
2601
|
-
this.consume("]");
|
|
2602
|
-
return values;
|
|
2603
|
-
}
|
|
2604
|
-
parseQuotedString() {
|
|
2605
|
-
const quote = this.query[this.pos];
|
|
2606
|
-
this.pos++;
|
|
2607
|
-
let value = "";
|
|
2608
|
-
let escaped = false;
|
|
2609
|
-
while (this.pos < this.query.length) {
|
|
2610
|
-
const ch = this.query[this.pos];
|
|
2611
|
-
if (escaped) {
|
|
2612
|
-
value += ch;
|
|
2613
|
-
escaped = false;
|
|
2614
|
-
this.pos++;
|
|
2615
|
-
continue;
|
|
2616
|
-
}
|
|
2617
|
-
if (ch === "\\") {
|
|
2618
|
-
escaped = true;
|
|
2619
|
-
this.pos++;
|
|
2620
|
-
continue;
|
|
2621
|
-
}
|
|
2622
|
-
if (ch === quote) {
|
|
2623
|
-
this.pos++;
|
|
2624
|
-
break;
|
|
2625
|
-
}
|
|
2626
|
-
value += ch;
|
|
2627
|
-
this.pos++;
|
|
2628
|
-
}
|
|
2629
|
-
return value;
|
|
2630
|
-
}
|
|
2631
|
-
coerceValue(value) {
|
|
2632
|
-
if (/^-?\d+$/.test(value)) return parseInt(value, 10);
|
|
2633
|
-
if (/^-?\d+\.\d+$/.test(value)) return parseFloat(value);
|
|
2634
|
-
if (value.toLowerCase() === "true") return true;
|
|
2635
|
-
if (value.toLowerCase() === "false") return false;
|
|
2636
|
-
return value;
|
|
2637
|
-
}
|
|
2638
|
-
buildCondition(path, operator, value) {
|
|
2639
|
-
let filterOp;
|
|
2640
|
-
if (operator === "=") if (value === null) filterOp = { isNull: true };
|
|
2641
|
-
else if (Array.isArray(value)) filterOp = { inArray: value };
|
|
2642
|
-
else if (typeof value === "string" && value.includes("*")) {
|
|
2643
|
-
const startsWithAsterisk = value.startsWith("*");
|
|
2644
|
-
const endsWithAsterisk = value.endsWith("*");
|
|
2645
|
-
const cleanValue = value.replace(/^\*|\*$/g, "");
|
|
2646
|
-
if (startsWithAsterisk && endsWithAsterisk) filterOp = { contains: cleanValue };
|
|
2647
|
-
else if (startsWithAsterisk) filterOp = { endsWith: cleanValue };
|
|
2648
|
-
else if (endsWithAsterisk) filterOp = { startsWith: cleanValue };
|
|
2649
|
-
else filterOp = { eq: value };
|
|
2650
|
-
} else filterOp = { eq: value };
|
|
2651
|
-
else if (operator === "!=") if (value === null) filterOp = { isNotNull: true };
|
|
2652
|
-
else filterOp = { ne: value };
|
|
2653
|
-
else if (operator === ">") filterOp = { gt: value };
|
|
2654
|
-
else if (operator === ">=") filterOp = { gte: value };
|
|
2655
|
-
else if (operator === "<") filterOp = { lt: value };
|
|
2656
|
-
else if (operator === "<=") filterOp = { lte: value };
|
|
2657
|
-
else throw new Error(`Unsupported operator: ${operator}`);
|
|
2658
|
-
if (path.length === 1) return { [path[0]]: filterOp };
|
|
2659
|
-
let result = filterOp;
|
|
2660
|
-
for (let i = path.length - 1; i >= 0; i--) result = { [path[i]]: result };
|
|
2661
|
-
return result;
|
|
2662
|
-
}
|
|
2663
|
-
peek() {
|
|
2664
|
-
this.skipWhitespace();
|
|
2665
|
-
return this.query[this.pos] || "";
|
|
2666
|
-
}
|
|
2667
|
-
consume(expected) {
|
|
2668
|
-
this.skipWhitespace();
|
|
2669
|
-
if (this.query[this.pos] !== expected) throw new Error(`Expected '${expected}' at position ${this.pos}, got '${this.query[this.pos]}'`);
|
|
2670
|
-
this.pos++;
|
|
2671
|
-
}
|
|
2672
|
-
skipWhitespace() {
|
|
2673
|
-
while (this.pos < this.query.length && /\s/.test(this.query[this.pos])) this.pos++;
|
|
2674
|
-
}
|
|
2675
|
-
};
|
|
2676
|
-
/**
|
|
2677
|
-
* Helper function to build query strings programmatically
|
|
2678
|
-
*
|
|
2679
|
-
* @example
|
|
2680
|
-
* ```ts
|
|
2681
|
-
* buildQueryString({
|
|
2682
|
-
* and: [
|
|
2683
|
-
* { name: "eq:John" },
|
|
2684
|
-
* { age: "gt:18" }
|
|
2685
|
-
* ]
|
|
2686
|
-
* })
|
|
2687
|
-
* // => "name=John&age>18"
|
|
2688
|
-
* ```
|
|
2689
|
-
*/
|
|
2690
|
-
function buildQueryString(where) {
|
|
2691
|
-
if (!where || typeof where !== "object") return "";
|
|
2692
|
-
if ("and" in where && Array.isArray(where.and)) return where.and.map((w) => buildQueryString(w)).join("&");
|
|
2693
|
-
if ("or" in where && Array.isArray(where.or)) {
|
|
2694
|
-
const parts$1 = where.or.map((w) => buildQueryString(w));
|
|
2695
|
-
return parts$1.length > 1 ? `(${parts$1.join("|")})` : parts$1[0];
|
|
2696
|
-
}
|
|
2697
|
-
if ("not" in where) return "";
|
|
2698
|
-
const parts = [];
|
|
2699
|
-
for (const [field, condition] of Object.entries(where)) {
|
|
2700
|
-
if (typeof condition !== "object" || condition === null) {
|
|
2701
|
-
parts.push(`${field}=${condition}`);
|
|
2702
|
-
continue;
|
|
2703
|
-
}
|
|
2704
|
-
if ("eq" in condition) parts.push(`${field}=${condition.eq}`);
|
|
2705
|
-
else if ("ne" in condition) parts.push(`${field}!=${condition.ne}`);
|
|
2706
|
-
else if ("gt" in condition) parts.push(`${field}>${condition.gt}`);
|
|
2707
|
-
else if ("gte" in condition) parts.push(`${field}>=${condition.gte}`);
|
|
2708
|
-
else if ("lt" in condition) parts.push(`${field}<${condition.lt}`);
|
|
2709
|
-
else if ("lte" in condition) parts.push(`${field}<=${condition.lte}`);
|
|
2710
|
-
else if ("contains" in condition) parts.push(`${field}=*${condition.contains}*`);
|
|
2711
|
-
else if ("startsWith" in condition) parts.push(`${field}=${condition.startsWith}*`);
|
|
2712
|
-
else if ("endsWith" in condition) parts.push(`${field}=*${condition.endsWith}`);
|
|
2713
|
-
else if ("isNull" in condition && condition.isNull) parts.push(`${field}=null`);
|
|
2714
|
-
else if ("isNotNull" in condition && condition.isNotNull) parts.push(`${field}!=null`);
|
|
2715
|
-
else if ("inArray" in condition && Array.isArray(condition.inArray)) {
|
|
2716
|
-
const values = condition.inArray.map((v) => typeof v === "string" ? `"${v}"` : v);
|
|
2717
|
-
parts.push(`${field}=[${values.join(",")}]`);
|
|
2718
|
-
} else {
|
|
2719
|
-
const nested = buildQueryString(condition);
|
|
2720
|
-
if (nested) parts.push(`${field}.${nested}`);
|
|
2721
|
-
}
|
|
2722
|
-
}
|
|
2723
|
-
return parts.join("&");
|
|
2724
|
-
}
|
|
2725
|
-
|
|
2726
|
-
//#endregion
|
|
2727
|
-
//#region src/orm/providers/PostgresTypeProvider.ts
|
|
2728
|
-
var PostgresTypeProvider = class {
|
|
2729
|
-
attr = pgAttr;
|
|
2730
|
-
/**
|
|
2731
|
-
* Creates a primary key with an identity column.
|
|
2732
|
-
*/
|
|
2733
|
-
identityPrimaryKey = (identity, options) => pgAttr(pgAttr(pgAttr(alepha.t.integer(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2734
|
-
/**
|
|
2735
|
-
* Creates a primary key with a big identity column. (default)
|
|
2736
|
-
*/
|
|
2737
|
-
bigIdentityPrimaryKey = (identity, options) => pgAttr(pgAttr(pgAttr(alepha.t.int64(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2738
|
-
/**
|
|
2739
|
-
* Creates a primary key with a UUID column.
|
|
2740
|
-
*/
|
|
2741
|
-
uuidPrimaryKey = () => pgAttr(pgAttr(alepha.t.uuid(), PG_PRIMARY_KEY), PG_DEFAULT);
|
|
2742
|
-
primaryKey(type, options, identity) {
|
|
2743
|
-
if (!type || alepha.t.schema.isInteger(type)) return pgAttr(pgAttr(pgAttr(alepha.t.integer(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2744
|
-
if (alepha.t.schema.isString(type) && type.format === "uuid") return pgAttr(pgAttr(alepha.t.uuid(), PG_PRIMARY_KEY), PG_DEFAULT);
|
|
2745
|
-
if (alepha.t.schema.isNumber(type) && type.format === "int64") return pgAttr(pgAttr(pgAttr(alepha.t.number(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2746
|
-
if (alepha.t.schema.isBigInt(type)) return pgAttr(pgAttr(pgAttr(alepha.t.bigint(options), PG_PRIMARY_KEY), PG_IDENTITY, identity), PG_DEFAULT);
|
|
2747
|
-
throw new alepha.AlephaError(`Unsupported type for primary key: ${type}`);
|
|
2748
|
-
}
|
|
2749
|
-
/**
|
|
2750
|
-
* Wrap a schema with "default" attribute.
|
|
2751
|
-
* This is used to set a default value for a column in the database.
|
|
2752
|
-
*/
|
|
2753
|
-
default = (type, value) => {
|
|
2754
|
-
if (value != null) Object.assign(type, { default: value });
|
|
2755
|
-
return this.attr(type, PG_DEFAULT);
|
|
2756
|
-
};
|
|
2757
|
-
/**
|
|
2758
|
-
* Creates a column 'version'.
|
|
2759
|
-
*
|
|
2760
|
-
* This is used to track the version of a row in the database.
|
|
2761
|
-
*
|
|
2762
|
-
* You can use it for optimistic concurrency control (OCC) with {@link RepositoryDescriptor#save}.
|
|
2763
|
-
*
|
|
2764
|
-
* @see {@link RepositoryDescriptor#save}
|
|
2765
|
-
* @see {@link PgVersionMismatchError}
|
|
2766
|
-
*/
|
|
2767
|
-
version = (options = {}) => this.default(pgAttr(alepha.t.integer(options), PG_VERSION), 0);
|
|
2768
|
-
/**
|
|
2769
|
-
* Creates a column Created At. So just a datetime column with a default value of the current timestamp.
|
|
2770
|
-
*/
|
|
2771
|
-
createdAt = (options) => pgAttr(pgAttr(alepha.t.datetime(options), PG_CREATED_AT), PG_DEFAULT);
|
|
2772
|
-
/**
|
|
2773
|
-
* Creates a column Updated At. Like createdAt, but it is updated on every update of the row.
|
|
2774
|
-
*/
|
|
2775
|
-
updatedAt = (options) => pgAttr(pgAttr(alepha.t.datetime(options), PG_UPDATED_AT), PG_DEFAULT);
|
|
2776
|
-
/**
|
|
2777
|
-
* Creates a column Deleted At for soft delete functionality.
|
|
2778
|
-
* This is used to mark rows as deleted without actually removing them from the database.
|
|
2779
|
-
* The column is nullable - NULL means not deleted, timestamp means deleted.
|
|
2780
|
-
*/
|
|
2781
|
-
deletedAt = (options) => pgAttr(alepha.t.optional(alepha.t.datetime(options)), PG_DELETED_AT);
|
|
2782
|
-
/**
|
|
2783
|
-
* Creates a Postgres ENUM type.
|
|
2784
|
-
*
|
|
2785
|
-
* > By default, `t.enum()` is mapped to a TEXT column in Postgres.
|
|
2786
|
-
* > Using this method, you can create a real ENUM type in the database.
|
|
2787
|
-
*
|
|
2788
|
-
* @example
|
|
2789
|
-
* ```ts
|
|
2790
|
-
* const statusEnum = pg.enum(["pending", "active", "archived"], { name: "status_enum" });
|
|
2791
|
-
* ```
|
|
2792
|
-
*/
|
|
2793
|
-
enum = (values, pgEnumOptions, typeOptions) => {
|
|
2794
|
-
return pgAttr(alepha.t.enum(values, {
|
|
2795
|
-
description: pgEnumOptions?.description,
|
|
2796
|
-
...typeOptions
|
|
2797
|
-
}), PG_ENUM, pgEnumOptions);
|
|
2798
|
-
};
|
|
2799
|
-
/**
|
|
2800
|
-
* Creates a reference to another table or schema. Basically a foreign key.
|
|
2801
|
-
*/
|
|
2802
|
-
ref = (type, ref, actions) => {
|
|
2803
|
-
const finalActions = actions ?? { onDelete: alepha.t.schema.isOptional(type) ? "set null" : "cascade" };
|
|
2804
|
-
return this.attr(type, PG_REF, {
|
|
2805
|
-
ref,
|
|
2806
|
-
actions: finalActions
|
|
2807
|
-
});
|
|
2808
|
-
};
|
|
2809
|
-
/**
|
|
2810
|
-
* Creates a page schema for a given object schema.
|
|
2811
|
-
* It's used by {@link RepositoryDescriptor#paginate} method.
|
|
2812
|
-
*/
|
|
2813
|
-
page = (resource, options) => {
|
|
2814
|
-
return (0, alepha.pageSchema)(resource, options);
|
|
2815
|
-
};
|
|
2816
|
-
};
|
|
2817
|
-
const pg = new PostgresTypeProvider();
|
|
2818
|
-
|
|
2819
|
-
//#endregion
|
|
2820
|
-
//#region src/orm/schemas/legacyIdSchema.ts
|
|
2821
|
-
/**
|
|
2822
|
-
* @deprecated Use `pg.primaryKey()` instead.
|
|
2823
|
-
*/
|
|
2824
|
-
const legacyIdSchema = pgAttr(pgAttr(pgAttr(alepha.t.integer(), PG_PRIMARY_KEY), PG_SERIAL), PG_DEFAULT);
|
|
2825
|
-
|
|
2826
|
-
//#endregion
|
|
2827
|
-
//#region src/orm/index.ts
|
|
2828
|
-
/**
|
|
2829
|
-
* Postgres client based on Drizzle ORM, Alepha type-safe friendly.
|
|
2830
|
-
*
|
|
2831
|
-
* ```ts
|
|
2832
|
-
* const users = $entity({
|
|
2833
|
-
* name: "users",
|
|
2834
|
-
* schema: t.object({
|
|
2835
|
-
* id: pg.primaryKey(),
|
|
2836
|
-
* name: t.text(),
|
|
2837
|
-
* email: t.text(),
|
|
2838
|
-
* }),
|
|
2839
|
-
* });
|
|
2840
|
-
*
|
|
2841
|
-
* class Db {
|
|
2842
|
-
* users = $repository(users);
|
|
2843
|
-
* }
|
|
2844
|
-
*
|
|
2845
|
-
* const db = alepha.inject(Db);
|
|
2846
|
-
* const user = await db.users.one({ name: { eq: "John Doe" } });
|
|
2847
|
-
* ```
|
|
2848
|
-
*
|
|
2849
|
-
* This is not a full ORM, but rather a set of tools to work with Postgres databases in a type-safe way.
|
|
2850
|
-
*
|
|
2851
|
-
* It provides:
|
|
2852
|
-
* - A type-safe way to define entities and repositories. (via `$entity` and `$repository`)
|
|
2853
|
-
* - Custom query builders and filters.
|
|
2854
|
-
* - Built-in special columns like `createdAt`, `updatedAt`, `deletedAt`, `version`.
|
|
2855
|
-
* - Automatic JSONB support.
|
|
2856
|
-
* - Automatic synchronization of entities with the database schema (for testing and development).
|
|
2857
|
-
* - Fallback to raw SQL via Drizzle ORM `sql` function.
|
|
2858
|
-
*
|
|
2859
|
-
* Migrations are supported via Drizzle ORM, you need to use the `drizzle-kit` CLI tool to generate and run migrations.
|
|
2860
|
-
*
|
|
2861
|
-
* @see {@link $entity}
|
|
2862
|
-
* @see {@link $sequence}
|
|
2863
|
-
* @see {@link $repository}
|
|
2864
|
-
* @see {@link $transaction}
|
|
2865
|
-
* @module alepha.postgres
|
|
2866
|
-
*/
|
|
2867
|
-
const AlephaPostgres = (0, alepha.$module)({
|
|
2868
|
-
name: "alepha.postgres",
|
|
2869
|
-
descriptors: [$sequence, $entity],
|
|
2870
|
-
services: [
|
|
2871
|
-
alepha_datetime.AlephaDateTime,
|
|
2872
|
-
DatabaseProvider,
|
|
2873
|
-
NodePostgresProvider,
|
|
2874
|
-
PglitePostgresProvider,
|
|
2875
|
-
NodeSqliteProvider,
|
|
2876
|
-
SqliteModelBuilder,
|
|
2877
|
-
PostgresModelBuilder,
|
|
2878
|
-
DrizzleKitProvider,
|
|
2879
|
-
RepositoryProvider,
|
|
2880
|
-
Repository
|
|
2881
|
-
],
|
|
2882
|
-
register: (alepha$1) => {
|
|
2883
|
-
const env = alepha$1.parseEnv(alepha.t.object({ DATABASE_URL: alepha.t.optional(alepha.t.text()) }));
|
|
2884
|
-
alepha$1.with(DrizzleKitProvider);
|
|
2885
|
-
alepha$1.with(RepositoryProvider);
|
|
2886
|
-
const url = env.DATABASE_URL;
|
|
2887
|
-
const hasPGlite = !!PglitePostgresProvider.importPglite();
|
|
2888
|
-
const isPostgres = url?.startsWith("postgres:");
|
|
2889
|
-
const isSqlite = url?.startsWith("sqlite:");
|
|
2890
|
-
const isMemory = url?.includes(":memory:");
|
|
2891
|
-
if (hasPGlite && (isMemory || !!url && !isPostgres && !isMemory || !url) && !isSqlite) {
|
|
2892
|
-
alepha$1.with({
|
|
2893
|
-
optional: true,
|
|
2894
|
-
provide: DatabaseProvider,
|
|
2895
|
-
use: PglitePostgresProvider
|
|
2896
|
-
});
|
|
2897
|
-
return;
|
|
2898
|
-
}
|
|
2899
|
-
if (isPostgres) {
|
|
2900
|
-
alepha$1.with({
|
|
2901
|
-
optional: true,
|
|
2902
|
-
provide: DatabaseProvider,
|
|
2903
|
-
use: NodePostgresProvider
|
|
2904
|
-
});
|
|
2905
|
-
return;
|
|
2906
|
-
}
|
|
2907
|
-
alepha$1.with({
|
|
2908
|
-
optional: true,
|
|
2909
|
-
provide: DatabaseProvider,
|
|
2910
|
-
use: NodeSqliteProvider
|
|
2911
|
-
});
|
|
2912
|
-
}
|
|
2913
|
-
});
|
|
2914
|
-
|
|
2915
|
-
//#endregion
|
|
2916
|
-
exports.$entity = $entity;
|
|
2917
|
-
exports.$repository = $repository;
|
|
2918
|
-
exports.$sequence = $sequence;
|
|
2919
|
-
exports.$transaction = $transaction;
|
|
2920
|
-
exports.AlephaPostgres = AlephaPostgres;
|
|
2921
|
-
exports.DatabaseProvider = DatabaseProvider;
|
|
2922
|
-
exports.DbConflictError = DbConflictError;
|
|
2923
|
-
exports.DbEntityNotFoundError = DbEntityNotFoundError;
|
|
2924
|
-
exports.DbError = DbError;
|
|
2925
|
-
exports.DbMigrationError = DbMigrationError;
|
|
2926
|
-
exports.DbVersionMismatchError = DbVersionMismatchError;
|
|
2927
|
-
exports.DrizzleKitProvider = DrizzleKitProvider;
|
|
2928
|
-
exports.EntityDescriptor = EntityDescriptor;
|
|
2929
|
-
exports.NodePostgresProvider = NodePostgresProvider;
|
|
2930
|
-
exports.NodeSqliteProvider = NodeSqliteProvider;
|
|
2931
|
-
exports.PG_CREATED_AT = PG_CREATED_AT;
|
|
2932
|
-
exports.PG_DEFAULT = PG_DEFAULT;
|
|
2933
|
-
exports.PG_DELETED_AT = PG_DELETED_AT;
|
|
2934
|
-
exports.PG_ENUM = PG_ENUM;
|
|
2935
|
-
exports.PG_IDENTITY = PG_IDENTITY;
|
|
2936
|
-
exports.PG_PRIMARY_KEY = PG_PRIMARY_KEY;
|
|
2937
|
-
exports.PG_REF = PG_REF;
|
|
2938
|
-
exports.PG_SERIAL = PG_SERIAL;
|
|
2939
|
-
exports.PG_UPDATED_AT = PG_UPDATED_AT;
|
|
2940
|
-
exports.PG_VERSION = PG_VERSION;
|
|
2941
|
-
exports.PostgresTypeProvider = PostgresTypeProvider;
|
|
2942
|
-
exports.Repository = Repository;
|
|
2943
|
-
exports.RepositoryProvider = RepositoryProvider;
|
|
2944
|
-
exports.SequenceDescriptor = SequenceDescriptor;
|
|
2945
|
-
exports.buildQueryString = buildQueryString;
|
|
2946
|
-
Object.defineProperty(exports, 'drizzle', {
|
|
2947
|
-
enumerable: true,
|
|
2948
|
-
get: function () {
|
|
2949
|
-
return drizzle_orm;
|
|
2950
|
-
}
|
|
2951
|
-
});
|
|
2952
|
-
exports.getAttrFields = getAttrFields;
|
|
2953
|
-
exports.insertSchema = insertSchema;
|
|
2954
|
-
exports.legacyIdSchema = legacyIdSchema;
|
|
2955
|
-
exports.nodeSqliteOptions = nodeSqliteOptions;
|
|
2956
|
-
Object.defineProperty(exports, 'pageQuerySchema', {
|
|
2957
|
-
enumerable: true,
|
|
2958
|
-
get: function () {
|
|
2959
|
-
return alepha.pageQuerySchema;
|
|
2960
|
-
}
|
|
2961
|
-
});
|
|
2962
|
-
Object.defineProperty(exports, 'pageSchema', {
|
|
2963
|
-
enumerable: true,
|
|
2964
|
-
get: function () {
|
|
2965
|
-
return alepha.pageSchema;
|
|
2966
|
-
}
|
|
2967
|
-
});
|
|
2968
|
-
exports.parseQueryString = parseQueryString;
|
|
2969
|
-
exports.pg = pg;
|
|
2970
|
-
exports.pgAttr = pgAttr;
|
|
2971
|
-
exports.schema = schema;
|
|
2972
|
-
Object.defineProperty(exports, 'sql', {
|
|
2973
|
-
enumerable: true,
|
|
2974
|
-
get: function () {
|
|
2975
|
-
return drizzle_orm.sql;
|
|
2976
|
-
}
|
|
2977
|
-
});
|
|
2978
|
-
exports.updateSchema = updateSchema;
|
|
2979
|
-
Object.keys(drizzle_orm_pg_core).forEach(function (k) {
|
|
2980
|
-
if (k !== 'default' && !Object.prototype.hasOwnProperty.call(exports, k)) Object.defineProperty(exports, k, {
|
|
2981
|
-
enumerable: true,
|
|
2982
|
-
get: function () { return drizzle_orm_pg_core[k]; }
|
|
2983
|
-
});
|
|
2984
|
-
});
|
|
2985
|
-
|
|
2986
|
-
//# sourceMappingURL=index.cjs.map
|