alepha 0.13.8 → 0.14.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api/audits/index.d.ts +418 -338
- package/dist/api/audits/index.d.ts.map +1 -0
- package/dist/api/files/index.d.ts +81 -1
- package/dist/api/files/index.d.ts.map +1 -0
- package/dist/api/jobs/index.d.ts +107 -27
- package/dist/api/jobs/index.d.ts.map +1 -0
- package/dist/api/notifications/index.d.ts +21 -1
- package/dist/api/notifications/index.d.ts.map +1 -0
- package/dist/api/parameters/index.d.ts +455 -8
- package/dist/api/parameters/index.d.ts.map +1 -0
- package/dist/api/users/index.d.ts +844 -840
- package/dist/api/users/index.d.ts.map +1 -0
- package/dist/api/verifications/index.d.ts.map +1 -0
- package/dist/batch/index.d.ts.map +1 -0
- package/dist/bucket/index.d.ts.map +1 -0
- package/dist/cache/core/index.d.ts.map +1 -0
- package/dist/cache/redis/index.d.ts.map +1 -0
- package/dist/cli/index.d.ts +254 -59
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +499 -127
- package/dist/cli/index.js.map +1 -1
- package/dist/command/index.d.ts +217 -10
- package/dist/command/index.d.ts.map +1 -0
- package/dist/command/index.js +350 -74
- package/dist/command/index.js.map +1 -1
- package/dist/core/index.browser.js +1334 -1318
- package/dist/core/index.browser.js.map +1 -1
- package/dist/core/index.d.ts +76 -72
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +1337 -1321
- package/dist/core/index.js.map +1 -1
- package/dist/core/index.native.js +1337 -1321
- package/dist/core/index.native.js.map +1 -1
- package/dist/datetime/index.d.ts.map +1 -0
- package/dist/email/index.d.ts.map +1 -0
- package/dist/fake/index.d.ts.map +1 -0
- package/dist/file/index.d.ts.map +1 -0
- package/dist/file/index.js.map +1 -1
- package/dist/lock/core/index.d.ts.map +1 -0
- package/dist/lock/redis/index.d.ts.map +1 -0
- package/dist/logger/index.d.ts +1 -0
- package/dist/logger/index.d.ts.map +1 -0
- package/dist/mcp/index.d.ts +820 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +978 -0
- package/dist/mcp/index.js.map +1 -0
- package/dist/orm/index.d.ts +234 -107
- package/dist/orm/index.d.ts.map +1 -0
- package/dist/orm/index.js +376 -316
- package/dist/orm/index.js.map +1 -1
- package/dist/queue/core/index.d.ts +4 -4
- package/dist/queue/core/index.d.ts.map +1 -0
- package/dist/queue/redis/index.d.ts.map +1 -0
- package/dist/queue/redis/index.js +2 -4
- package/dist/queue/redis/index.js.map +1 -1
- package/dist/redis/index.d.ts +400 -29
- package/dist/redis/index.d.ts.map +1 -0
- package/dist/redis/index.js +412 -21
- package/dist/redis/index.js.map +1 -1
- package/dist/retry/index.d.ts.map +1 -0
- package/dist/router/index.d.ts.map +1 -0
- package/dist/scheduler/index.d.ts +6 -6
- package/dist/scheduler/index.d.ts.map +1 -0
- package/dist/security/index.d.ts +28 -28
- package/dist/security/index.d.ts.map +1 -0
- package/dist/server/auth/index.d.ts +155 -155
- package/dist/server/auth/index.d.ts.map +1 -0
- package/dist/server/cache/index.d.ts.map +1 -0
- package/dist/server/compress/index.d.ts.map +1 -0
- package/dist/server/cookies/index.d.ts.map +1 -0
- package/dist/server/core/index.d.ts +0 -1
- package/dist/server/core/index.d.ts.map +1 -0
- package/dist/server/core/index.js.map +1 -1
- package/dist/server/cors/index.d.ts.map +1 -0
- package/dist/server/health/index.d.ts +17 -17
- package/dist/server/health/index.d.ts.map +1 -0
- package/dist/server/helmet/index.d.ts +4 -1
- package/dist/server/helmet/index.d.ts.map +1 -0
- package/dist/server/links/index.d.ts +33 -33
- package/dist/server/links/index.d.ts.map +1 -0
- package/dist/server/metrics/index.d.ts.map +1 -0
- package/dist/server/multipart/index.d.ts.map +1 -0
- package/dist/server/multipart/index.js.map +1 -1
- package/dist/server/proxy/index.d.ts.map +1 -0
- package/dist/server/proxy/index.js.map +1 -1
- package/dist/server/rate-limit/index.d.ts.map +1 -0
- package/dist/server/security/index.d.ts +9 -9
- package/dist/server/security/index.d.ts.map +1 -0
- package/dist/server/static/index.d.ts.map +1 -0
- package/dist/server/swagger/index.d.ts.map +1 -0
- package/dist/sms/index.d.ts.map +1 -0
- package/dist/thread/index.d.ts.map +1 -0
- package/dist/topic/core/index.d.ts.map +1 -0
- package/dist/topic/redis/index.d.ts.map +1 -0
- package/dist/topic/redis/index.js +3 -3
- package/dist/topic/redis/index.js.map +1 -1
- package/dist/vite/index.d.ts +10 -2
- package/dist/vite/index.d.ts.map +1 -0
- package/dist/vite/index.js +45 -20
- package/dist/vite/index.js.map +1 -1
- package/dist/websocket/index.d.ts.map +1 -0
- package/package.json +9 -4
- package/src/cli/apps/AlephaCli.ts +10 -3
- package/src/cli/apps/AlephaPackageBuilderCli.ts +15 -8
- package/src/cli/assets/mainTs.ts +9 -10
- package/src/cli/atoms/changelogOptions.ts +45 -0
- package/src/cli/commands/ChangelogCommands.ts +259 -0
- package/src/cli/commands/DeployCommands.ts +118 -0
- package/src/cli/commands/DrizzleCommands.ts +230 -10
- package/src/cli/commands/ViteCommands.ts +47 -23
- package/src/cli/defineConfig.ts +15 -0
- package/src/cli/index.ts +3 -0
- package/src/cli/services/AlephaCliUtils.ts +10 -154
- package/src/cli/services/GitMessageParser.ts +77 -0
- package/src/command/helpers/EnvUtils.ts +37 -0
- package/src/command/index.ts +3 -1
- package/src/command/primitives/$command.ts +172 -6
- package/src/command/providers/CliProvider.ts +499 -95
- package/src/core/Alepha.ts +1 -1
- package/src/core/providers/SchemaValidator.ts +23 -1
- package/src/file/providers/NodeFileSystemProvider.ts +3 -1
- package/src/mcp/errors/McpError.ts +72 -0
- package/src/mcp/helpers/jsonrpc.ts +163 -0
- package/src/mcp/index.ts +132 -0
- package/src/mcp/interfaces/McpTypes.ts +248 -0
- package/src/mcp/primitives/$prompt.ts +188 -0
- package/src/mcp/primitives/$resource.ts +171 -0
- package/src/mcp/primitives/$tool.ts +285 -0
- package/src/mcp/providers/McpServerProvider.ts +382 -0
- package/src/mcp/transports/SseMcpTransport.ts +172 -0
- package/src/mcp/transports/StdioMcpTransport.ts +126 -0
- package/src/orm/index.ts +20 -4
- package/src/orm/interfaces/PgQueryWhere.ts +1 -26
- package/src/orm/providers/drivers/BunPostgresProvider.ts +225 -0
- package/src/orm/providers/drivers/BunSqliteProvider.ts +180 -0
- package/src/orm/providers/drivers/CloudflareD1Provider.ts +164 -0
- package/src/orm/providers/drivers/DatabaseProvider.ts +25 -0
- package/src/orm/providers/drivers/NodePostgresProvider.ts +0 -25
- package/src/orm/providers/drivers/NodeSqliteProvider.ts +3 -1
- package/src/orm/services/QueryManager.ts +10 -125
- package/src/queue/redis/providers/RedisQueueProvider.ts +2 -7
- package/src/redis/index.ts +65 -3
- package/src/redis/providers/BunRedisProvider.ts +304 -0
- package/src/redis/providers/BunRedisSubscriberProvider.ts +94 -0
- package/src/redis/providers/NodeRedisProvider.ts +280 -0
- package/src/redis/providers/NodeRedisSubscriberProvider.ts +94 -0
- package/src/redis/providers/RedisProvider.ts +134 -140
- package/src/redis/providers/RedisSubscriberProvider.ts +58 -49
- package/src/server/core/providers/BunHttpServerProvider.ts +0 -3
- package/src/server/core/providers/ServerBodyParserProvider.ts +3 -1
- package/src/server/core/providers/ServerProvider.ts +7 -4
- package/src/server/multipart/providers/ServerMultipartProvider.ts +3 -1
- package/src/server/proxy/providers/ServerProxyProvider.ts +1 -1
- package/src/topic/redis/providers/RedisTopicProvider.ts +3 -3
- package/src/vite/plugins/viteAlephaBuild.ts +8 -2
- package/src/vite/plugins/viteAlephaDev.ts +6 -2
- package/src/vite/tasks/buildServer.ts +2 -1
- package/src/vite/tasks/generateCloudflare.ts +43 -15
- package/src/vite/tasks/runAlepha.ts +1 -0
- package/src/orm/services/PgJsonQueryManager.ts +0 -511
package/dist/orm/index.js
CHANGED
|
@@ -9,14 +9,14 @@ import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
|
|
|
9
9
|
import { $logger } from "alepha/logger";
|
|
10
10
|
import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
|
|
11
11
|
import { $lock } from "alepha/lock";
|
|
12
|
+
import { randomUUID } from "node:crypto";
|
|
13
|
+
import * as pg$1 from "drizzle-orm/sqlite-core";
|
|
14
|
+
import { check as check$1, foreignKey as foreignKey$1, index as index$1, sqliteTable, unique as unique$1, uniqueIndex as uniqueIndex$1 } from "drizzle-orm/sqlite-core";
|
|
12
15
|
import { drizzle as drizzle$1 } from "drizzle-orm/postgres-js";
|
|
13
16
|
import { migrate } from "drizzle-orm/postgres-js/migrator";
|
|
14
17
|
import postgres from "postgres";
|
|
15
18
|
import { drizzle as drizzle$2 } from "drizzle-orm/sqlite-proxy";
|
|
16
19
|
import { migrate as migrate$1 } from "drizzle-orm/sqlite-proxy/migrator";
|
|
17
|
-
import { randomUUID } from "node:crypto";
|
|
18
|
-
import * as pg$1 from "drizzle-orm/sqlite-core";
|
|
19
|
-
import { check as check$1, foreignKey as foreignKey$1, index as index$1, sqliteTable, unique as unique$1, uniqueIndex as uniqueIndex$1 } from "drizzle-orm/sqlite-core";
|
|
20
20
|
import { migrate as migrate$2 } from "drizzle-orm/pglite/migrator";
|
|
21
21
|
import { $retry } from "alepha/retry";
|
|
22
22
|
|
|
@@ -289,6 +289,16 @@ var DatabaseProvider = class {
|
|
|
289
289
|
throw new DbError(`Failed to synchronize ${this.dialect} database schema`, error);
|
|
290
290
|
}
|
|
291
291
|
}
|
|
292
|
+
/**
|
|
293
|
+
* For testing purposes, generate a unique schema name.
|
|
294
|
+
* The schema name will be generated based on the current date and time.
|
|
295
|
+
* It will be in the format of `test_YYYYMMDD_HHMMSS_randomSuffix`.
|
|
296
|
+
*/
|
|
297
|
+
generateTestSchemaName() {
|
|
298
|
+
const pad = (n) => n.toString().padStart(2, "0");
|
|
299
|
+
const now = /* @__PURE__ */ new Date();
|
|
300
|
+
return `test_${`${now.getUTCFullYear()}${pad(now.getUTCMonth() + 1)}${pad(now.getUTCDate())}_${pad(now.getUTCHours())}${pad(now.getUTCMinutes())}${pad(now.getUTCSeconds())}`}_${Math.random().toString(36).slice(2, 6)}`;
|
|
301
|
+
}
|
|
292
302
|
};
|
|
293
303
|
|
|
294
304
|
//#endregion
|
|
@@ -353,233 +363,9 @@ var PgRelationManager = class {
|
|
|
353
363
|
}
|
|
354
364
|
};
|
|
355
365
|
|
|
356
|
-
//#endregion
|
|
357
|
-
//#region ../../src/orm/services/PgJsonQueryManager.ts
|
|
358
|
-
/**
|
|
359
|
-
* Manages JSONB query generation for nested object and array queries in PostgreSQL.
|
|
360
|
-
* This class handles complex nested queries using PostgreSQL's JSONB operators.
|
|
361
|
-
*/
|
|
362
|
-
var PgJsonQueryManager = class {
|
|
363
|
-
/**
|
|
364
|
-
* Check if a query contains nested JSONB queries.
|
|
365
|
-
* A nested query is when the value is an object with operator keys.
|
|
366
|
-
*/
|
|
367
|
-
hasNestedQuery(where) {
|
|
368
|
-
for (const [key, value] of Object.entries(where)) {
|
|
369
|
-
if (key === "and" || key === "or" || key === "not") continue;
|
|
370
|
-
if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
371
|
-
const keys = Object.keys(value);
|
|
372
|
-
if (!keys.some((k) => [
|
|
373
|
-
"eq",
|
|
374
|
-
"ne",
|
|
375
|
-
"gt",
|
|
376
|
-
"gte",
|
|
377
|
-
"lt",
|
|
378
|
-
"lte",
|
|
379
|
-
"like",
|
|
380
|
-
"ilike",
|
|
381
|
-
"isNull",
|
|
382
|
-
"isNotNull",
|
|
383
|
-
"inArray",
|
|
384
|
-
"notInArray"
|
|
385
|
-
].includes(k)) && keys.length > 0) return true;
|
|
386
|
-
}
|
|
387
|
-
}
|
|
388
|
-
return false;
|
|
389
|
-
}
|
|
390
|
-
/**
|
|
391
|
-
* Build a JSONB query condition for nested object queries.
|
|
392
|
-
* Supports deep nesting like: { profile: { contact: { email: { eq: "test@example.com" } } } }
|
|
393
|
-
*
|
|
394
|
-
* @param column The JSONB column
|
|
395
|
-
* @param path The path to the nested property (e.g., ['profile', 'contact', 'email'])
|
|
396
|
-
* @param operator The filter operator (e.g., { eq: "test@example.com" })
|
|
397
|
-
* @param dialect Database dialect (postgresql or sqlite)
|
|
398
|
-
* @param columnSchema Optional schema of the JSON column for type inference
|
|
399
|
-
* @returns SQL condition
|
|
400
|
-
*/
|
|
401
|
-
buildJsonbCondition(column, path, operator, dialect, columnSchema) {
|
|
402
|
-
if (path.length === 0) return;
|
|
403
|
-
const isArrayOperator = operator.arrayContains !== void 0 || operator.arrayContained !== void 0 || operator.arrayOverlaps !== void 0;
|
|
404
|
-
let jsonValue;
|
|
405
|
-
if (dialect === "sqlite") jsonValue = sql$1`json_extract(${column}, ${`$.${path.join(".")}`})`;
|
|
406
|
-
else {
|
|
407
|
-
let jsonPath = sql$1`${column}`;
|
|
408
|
-
for (let i = 0; i < path.length - 1; i++) jsonPath = sql$1`${jsonPath}->${path[i]}`;
|
|
409
|
-
const lastPath = path[path.length - 1];
|
|
410
|
-
if (isArrayOperator) jsonValue = sql$1`${jsonPath}->${lastPath}`;
|
|
411
|
-
else jsonValue = sql$1`${jsonPath}->>${lastPath}`;
|
|
412
|
-
}
|
|
413
|
-
const fieldType = columnSchema ? this.getFieldType(columnSchema, path) : void 0;
|
|
414
|
-
return this.applyOperatorToJsonValue(jsonValue, operator, dialect, fieldType);
|
|
415
|
-
}
|
|
416
|
-
/**
|
|
417
|
-
* Build JSONB array query conditions.
|
|
418
|
-
* Supports queries like: { addresses: { city: { eq: "Wonderland" } } }
|
|
419
|
-
* which translates to: EXISTS (SELECT 1 FROM jsonb_array_elements(addresses) elem WHERE elem->>'city' = 'Wonderland')
|
|
420
|
-
*
|
|
421
|
-
* @param dialect Database dialect (postgresql or sqlite)
|
|
422
|
-
* Note: SQLite array queries are not yet supported
|
|
423
|
-
*/
|
|
424
|
-
buildJsonbArrayCondition(column, path, arrayPath, operator, dialect) {
|
|
425
|
-
if (dialect === "sqlite") throw new Error("Array queries in JSON columns are not yet supported for SQLite. Please use PostgreSQL for complex JSON array queries, or restructure your data.");
|
|
426
|
-
if (path.length === 0) return;
|
|
427
|
-
let jsonPath = sql$1`${column}`;
|
|
428
|
-
if (arrayPath) jsonPath = sql$1`${jsonPath}->${arrayPath}`;
|
|
429
|
-
const elemCondition = sql$1`elem->>${path[0]}`;
|
|
430
|
-
const condition = this.applyOperatorToJsonValue(elemCondition, operator, dialect);
|
|
431
|
-
if (!condition) return;
|
|
432
|
-
return sql$1`EXISTS (SELECT 1 FROM jsonb_array_elements(${jsonPath}) AS elem WHERE ${condition})`;
|
|
433
|
-
}
|
|
434
|
-
/**
|
|
435
|
-
* Apply a filter operator to a JSONB value.
|
|
436
|
-
* @param dialect Database dialect for appropriate casting syntax
|
|
437
|
-
* @param fieldType Optional field type from schema for smart casting
|
|
438
|
-
*/
|
|
439
|
-
applyOperatorToJsonValue(jsonValue, operator, dialect, fieldType) {
|
|
440
|
-
const castForNumeric = (value) => {
|
|
441
|
-
if (dialect === "sqlite") {
|
|
442
|
-
if (fieldType === "integer" || fieldType === "int") return sql$1`CAST(${value} AS INTEGER)`;
|
|
443
|
-
return sql$1`CAST(${value} AS REAL)`;
|
|
444
|
-
}
|
|
445
|
-
return sql$1`(${value})::numeric`;
|
|
446
|
-
};
|
|
447
|
-
if (typeof operator !== "object") return sql$1`${jsonValue} = ${operator}`;
|
|
448
|
-
const conditions = [];
|
|
449
|
-
if (operator.eq !== void 0) conditions.push(sql$1`${jsonValue} = ${operator.eq}`);
|
|
450
|
-
if (operator.ne !== void 0) conditions.push(sql$1`${jsonValue} != ${operator.ne}`);
|
|
451
|
-
if (operator.gt !== void 0) conditions.push(sql$1`${castForNumeric(jsonValue)} > ${operator.gt}`);
|
|
452
|
-
if (operator.gte !== void 0) conditions.push(sql$1`${castForNumeric(jsonValue)} >= ${operator.gte}`);
|
|
453
|
-
if (operator.lt !== void 0) conditions.push(sql$1`${castForNumeric(jsonValue)} < ${operator.lt}`);
|
|
454
|
-
if (operator.lte !== void 0) conditions.push(sql$1`${castForNumeric(jsonValue)} <= ${operator.lte}`);
|
|
455
|
-
if (operator.like !== void 0) conditions.push(sql$1`${jsonValue} LIKE ${operator.like}`);
|
|
456
|
-
if (operator.ilike !== void 0) if (dialect === "sqlite") conditions.push(sql$1`${jsonValue} LIKE ${operator.ilike}`);
|
|
457
|
-
else conditions.push(sql$1`${jsonValue} ILIKE ${operator.ilike}`);
|
|
458
|
-
if (operator.notLike !== void 0) conditions.push(sql$1`${jsonValue} NOT LIKE ${operator.notLike}`);
|
|
459
|
-
if (operator.notIlike !== void 0) if (dialect === "sqlite") conditions.push(sql$1`${jsonValue} NOT LIKE ${operator.notIlike}`);
|
|
460
|
-
else conditions.push(sql$1`${jsonValue} NOT ILIKE ${operator.notIlike}`);
|
|
461
|
-
if (operator.isNull !== void 0) conditions.push(sql$1`${jsonValue} IS NULL`);
|
|
462
|
-
if (operator.isNotNull !== void 0) conditions.push(sql$1`${jsonValue} IS NOT NULL`);
|
|
463
|
-
if (operator.inArray !== void 0 && Array.isArray(operator.inArray)) conditions.push(sql$1`${jsonValue} IN (${sql$1.join(operator.inArray.map((v) => sql$1`${v}`), sql$1`, `)})`);
|
|
464
|
-
if (operator.notInArray !== void 0 && Array.isArray(operator.notInArray)) conditions.push(sql$1`${jsonValue} NOT IN (${sql$1.join(operator.notInArray.map((v) => sql$1`${v}`), sql$1`, `)})`);
|
|
465
|
-
if (operator.arrayContains !== void 0) {
|
|
466
|
-
if (dialect === "postgresql") {
|
|
467
|
-
const jsonArray = JSON.stringify(Array.isArray(operator.arrayContains) ? operator.arrayContains : [operator.arrayContains]);
|
|
468
|
-
conditions.push(sql$1`${jsonValue} @> ${jsonArray}::jsonb`);
|
|
469
|
-
}
|
|
470
|
-
}
|
|
471
|
-
if (operator.arrayContained !== void 0) {
|
|
472
|
-
if (dialect === "postgresql") {
|
|
473
|
-
const jsonArray = JSON.stringify(Array.isArray(operator.arrayContained) ? operator.arrayContained : [operator.arrayContained]);
|
|
474
|
-
conditions.push(sql$1`${jsonValue} <@ ${jsonArray}::jsonb`);
|
|
475
|
-
}
|
|
476
|
-
}
|
|
477
|
-
if (operator.arrayOverlaps !== void 0) {
|
|
478
|
-
if (dialect === "postgresql") {
|
|
479
|
-
const overlapConditions = (Array.isArray(operator.arrayOverlaps) ? operator.arrayOverlaps : [operator.arrayOverlaps]).map((val) => {
|
|
480
|
-
return sql$1`${jsonValue} @> ${JSON.stringify(val)}::jsonb`;
|
|
481
|
-
});
|
|
482
|
-
if (overlapConditions.length > 0) conditions.push(sql$1`(${sql$1.join(overlapConditions, sql$1` OR `)})`);
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
if (conditions.length === 0) return;
|
|
486
|
-
if (conditions.length === 1) return conditions[0];
|
|
487
|
-
return sql$1.join(conditions, sql$1` AND `);
|
|
488
|
-
}
|
|
489
|
-
/**
|
|
490
|
-
* Parse a nested query object and extract the path and operator.
|
|
491
|
-
* For example: { profile: { contact: { email: { eq: "test@example.com" } } } }
|
|
492
|
-
* Returns: { path: ['profile', 'contact', 'email'], operator: { eq: "test@example.com" } }
|
|
493
|
-
*/
|
|
494
|
-
parseNestedQuery(nestedQuery, currentPath = []) {
|
|
495
|
-
const results = [];
|
|
496
|
-
for (const [key, value] of Object.entries(nestedQuery)) if (value && typeof value === "object" && !Array.isArray(value)) if (Object.keys(value).some((k) => [
|
|
497
|
-
"eq",
|
|
498
|
-
"ne",
|
|
499
|
-
"gt",
|
|
500
|
-
"gte",
|
|
501
|
-
"lt",
|
|
502
|
-
"lte",
|
|
503
|
-
"like",
|
|
504
|
-
"ilike",
|
|
505
|
-
"notLike",
|
|
506
|
-
"notIlike",
|
|
507
|
-
"isNull",
|
|
508
|
-
"isNotNull",
|
|
509
|
-
"inArray",
|
|
510
|
-
"notInArray",
|
|
511
|
-
"arrayContains",
|
|
512
|
-
"arrayContained",
|
|
513
|
-
"arrayOverlaps"
|
|
514
|
-
].includes(k))) results.push({
|
|
515
|
-
path: [...currentPath, key],
|
|
516
|
-
operator: value
|
|
517
|
-
});
|
|
518
|
-
else {
|
|
519
|
-
const nestedResults = this.parseNestedQuery(value, [...currentPath, key]);
|
|
520
|
-
results.push(...nestedResults);
|
|
521
|
-
}
|
|
522
|
-
return results;
|
|
523
|
-
}
|
|
524
|
-
/**
|
|
525
|
-
* Determine if a property is a JSONB column based on the schema.
|
|
526
|
-
* A column is JSONB if it's defined as an object or array in the TypeBox schema.
|
|
527
|
-
*/
|
|
528
|
-
isJsonbColumn(schema$1, columnName) {
|
|
529
|
-
const property = schema$1.properties[columnName];
|
|
530
|
-
if (!property) return false;
|
|
531
|
-
return property.type === "object" || property.type === "array";
|
|
532
|
-
}
|
|
533
|
-
/**
|
|
534
|
-
* Check if an array property contains primitive types (string, number, boolean, etc.)
|
|
535
|
-
* rather than objects. Primitive arrays should use native Drizzle operators.
|
|
536
|
-
* @returns true if the array contains primitives, false if it contains objects
|
|
537
|
-
*/
|
|
538
|
-
isPrimitiveArray(schema$1, columnName) {
|
|
539
|
-
const property = schema$1.properties[columnName];
|
|
540
|
-
if (!property || property.type !== "array") return false;
|
|
541
|
-
const items = property.items;
|
|
542
|
-
if (!items) return false;
|
|
543
|
-
const itemType = items.type;
|
|
544
|
-
return itemType === "string" || itemType === "number" || itemType === "integer" || itemType === "boolean" || itemType === "null";
|
|
545
|
-
}
|
|
546
|
-
/**
|
|
547
|
-
* Get the type of a field by navigating through a schema path.
|
|
548
|
-
* Used for smart type casting in SQL queries.
|
|
549
|
-
*
|
|
550
|
-
* @param columnSchema The schema of the JSON column (e.g., t.object({ age: t.integer() }))
|
|
551
|
-
* @param path The path to navigate (e.g., ['contact', 'email'])
|
|
552
|
-
* @returns The type string (e.g., 'integer', 'number', 'string') or undefined if not found
|
|
553
|
-
*/
|
|
554
|
-
getFieldType(columnSchema, path) {
|
|
555
|
-
let current = columnSchema;
|
|
556
|
-
for (const segment of path) if (current.type === "object" && current.properties) {
|
|
557
|
-
current = current.properties[segment];
|
|
558
|
-
if (!current) return;
|
|
559
|
-
} else return;
|
|
560
|
-
return current.type;
|
|
561
|
-
}
|
|
562
|
-
/**
|
|
563
|
-
* Check if a nested path points to an array property.
|
|
564
|
-
*/
|
|
565
|
-
isArrayProperty(schema$1, path) {
|
|
566
|
-
if (path.length === 0) return false;
|
|
567
|
-
let currentSchema = schema$1.properties[path[0]];
|
|
568
|
-
if (!currentSchema) return false;
|
|
569
|
-
if (currentSchema.type === "array") return true;
|
|
570
|
-
for (let i = 1; i < path.length; i++) if (currentSchema.type === "object" && currentSchema.properties) {
|
|
571
|
-
currentSchema = currentSchema.properties[path[i]];
|
|
572
|
-
if (!currentSchema) return false;
|
|
573
|
-
if (currentSchema.type === "array") return true;
|
|
574
|
-
} else return false;
|
|
575
|
-
return false;
|
|
576
|
-
}
|
|
577
|
-
};
|
|
578
|
-
|
|
579
366
|
//#endregion
|
|
580
367
|
//#region ../../src/orm/services/QueryManager.ts
|
|
581
368
|
var QueryManager = class {
|
|
582
|
-
jsonQueryManager = $inject(PgJsonQueryManager);
|
|
583
369
|
alepha = $inject(Alepha);
|
|
584
370
|
/**
|
|
585
371
|
* Convert a query object to a SQL query.
|
|
@@ -639,11 +425,7 @@ var QueryManager = class {
|
|
|
639
425
|
});
|
|
640
426
|
if (where) return not(where);
|
|
641
427
|
}
|
|
642
|
-
if (operator)
|
|
643
|
-
const column = col(key);
|
|
644
|
-
const jsonbSql = this.buildJsonbQuery(column, operator, schema$1, key, options.dialect);
|
|
645
|
-
if (jsonbSql) conditions.push(jsonbSql);
|
|
646
|
-
} else {
|
|
428
|
+
if (operator) {
|
|
647
429
|
const column = col(key);
|
|
648
430
|
const sql$2 = this.mapOperatorToSql(operator, column, schema$1, key, options.dialect);
|
|
649
431
|
if (sql$2) conditions.push(sql$2);
|
|
@@ -654,32 +436,6 @@ var QueryManager = class {
|
|
|
654
436
|
return and(...conditions);
|
|
655
437
|
}
|
|
656
438
|
/**
|
|
657
|
-
* Build a JSONB query for nested object/array queries.
|
|
658
|
-
*/
|
|
659
|
-
buildJsonbQuery(column, nestedQuery, schema$1, columnName, dialect) {
|
|
660
|
-
const queries = this.jsonQueryManager.parseNestedQuery(nestedQuery);
|
|
661
|
-
if (queries.length === 0) return;
|
|
662
|
-
const columnSchema = schema$1.properties[columnName];
|
|
663
|
-
const conditions = [];
|
|
664
|
-
for (const { path, operator } of queries) {
|
|
665
|
-
const isArrayOperator = operator.arrayContains !== void 0 || operator.arrayContained !== void 0 || operator.arrayOverlaps !== void 0;
|
|
666
|
-
const isArrayProp = this.jsonQueryManager.isArrayProperty(schema$1, [columnName, ...path]);
|
|
667
|
-
if (isArrayProp && isArrayOperator) {
|
|
668
|
-
const condition = this.jsonQueryManager.buildJsonbCondition(column, path, operator, dialect, columnSchema);
|
|
669
|
-
if (condition) conditions.push(condition);
|
|
670
|
-
} else if (isArrayProp && !isArrayOperator) {
|
|
671
|
-
const condition = this.jsonQueryManager.buildJsonbArrayCondition(column, path, "", operator, dialect);
|
|
672
|
-
if (condition) conditions.push(condition);
|
|
673
|
-
} else {
|
|
674
|
-
const condition = this.jsonQueryManager.buildJsonbCondition(column, path, operator, dialect, columnSchema);
|
|
675
|
-
if (condition) conditions.push(condition);
|
|
676
|
-
}
|
|
677
|
-
}
|
|
678
|
-
if (conditions.length === 0) return;
|
|
679
|
-
if (conditions.length === 1) return conditions[0];
|
|
680
|
-
return and(...conditions);
|
|
681
|
-
}
|
|
682
|
-
/**
|
|
683
439
|
* Check if an object has any filter operator properties.
|
|
684
440
|
*/
|
|
685
441
|
hasFilterOperatorProperties(obj) {
|
|
@@ -1957,27 +1713,39 @@ var PostgresModelBuilder = class extends ModelBuilder {
|
|
|
1957
1713
|
};
|
|
1958
1714
|
|
|
1959
1715
|
//#endregion
|
|
1960
|
-
//#region ../../src/orm/providers/drivers/
|
|
1961
|
-
const envSchema$
|
|
1716
|
+
//#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
|
|
1717
|
+
const envSchema$4 = t.object({
|
|
1962
1718
|
DATABASE_URL: t.optional(t.text()),
|
|
1963
1719
|
POSTGRES_SCHEMA: t.optional(t.text())
|
|
1964
1720
|
});
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1721
|
+
/**
|
|
1722
|
+
* Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
|
|
1723
|
+
*
|
|
1724
|
+
* This provider uses Bun's built-in SQL class for PostgreSQL connections,
|
|
1725
|
+
* which provides excellent performance on the Bun runtime.
|
|
1726
|
+
*
|
|
1727
|
+
* @example
|
|
1728
|
+
* ```ts
|
|
1729
|
+
* // Set DATABASE_URL environment variable
|
|
1730
|
+
* // DATABASE_URL=postgres://user:password@localhost:5432/database
|
|
1731
|
+
*
|
|
1732
|
+
* // Or configure programmatically
|
|
1733
|
+
* alepha.with({
|
|
1734
|
+
* provide: DatabaseProvider,
|
|
1735
|
+
* use: BunPostgresProvider,
|
|
1736
|
+
* });
|
|
1737
|
+
* ```
|
|
1738
|
+
*/
|
|
1739
|
+
var BunPostgresProvider = class extends DatabaseProvider {
|
|
1972
1740
|
log = $logger();
|
|
1973
|
-
env = $env(envSchema$
|
|
1741
|
+
env = $env(envSchema$4);
|
|
1974
1742
|
kit = $inject(DrizzleKitProvider);
|
|
1975
1743
|
builder = $inject(PostgresModelBuilder);
|
|
1976
1744
|
client;
|
|
1977
|
-
|
|
1745
|
+
bunDb;
|
|
1978
1746
|
dialect = "postgresql";
|
|
1979
1747
|
get name() {
|
|
1980
|
-
return "postgres";
|
|
1748
|
+
return "bun-postgres";
|
|
1981
1749
|
}
|
|
1982
1750
|
/**
|
|
1983
1751
|
* In testing mode, the schema name will be generated and deleted after the test.
|
|
@@ -2009,11 +1777,12 @@ var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
|
|
|
2009
1777
|
* Get the Drizzle Postgres database instance.
|
|
2010
1778
|
*/
|
|
2011
1779
|
get db() {
|
|
2012
|
-
if (!this.
|
|
2013
|
-
return this.
|
|
1780
|
+
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
1781
|
+
return this.bunDb;
|
|
2014
1782
|
}
|
|
2015
1783
|
async executeMigrations(migrationsFolder) {
|
|
2016
|
-
|
|
1784
|
+
const { migrate: migrate$3 } = await import("drizzle-orm/bun-sql/migrator");
|
|
1785
|
+
await migrate$3(this.bunDb, { migrationsFolder });
|
|
2017
1786
|
}
|
|
2018
1787
|
onStart = $hook({
|
|
2019
1788
|
on: "start",
|
|
@@ -2040,55 +1809,31 @@ var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
|
|
|
2040
1809
|
});
|
|
2041
1810
|
async connect() {
|
|
2042
1811
|
this.log.debug("Connect ..");
|
|
2043
|
-
|
|
2044
|
-
await
|
|
2045
|
-
|
|
2046
|
-
this.
|
|
2047
|
-
|
|
2048
|
-
|
|
1812
|
+
if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
|
|
1813
|
+
const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sql");
|
|
1814
|
+
const { SQL: SQL$1 } = await import("bun");
|
|
1815
|
+
this.client = new SQL$1(this.url);
|
|
1816
|
+
await this.client.unsafe("SELECT 1");
|
|
1817
|
+
this.bunDb = drizzle$3({
|
|
1818
|
+
client: this.client,
|
|
1819
|
+
logger: { logQuery: (query, params) => {
|
|
1820
|
+
this.log.trace(query, { params });
|
|
1821
|
+
} }
|
|
1822
|
+
});
|
|
2049
1823
|
this.log.info("Connection OK");
|
|
2050
1824
|
}
|
|
2051
1825
|
async close() {
|
|
2052
1826
|
if (this.client) {
|
|
2053
1827
|
this.log.debug("Close...");
|
|
2054
|
-
await this.client.
|
|
1828
|
+
await this.client.close();
|
|
2055
1829
|
this.client = void 0;
|
|
2056
|
-
this.
|
|
1830
|
+
this.bunDb = void 0;
|
|
2057
1831
|
this.log.info("Connection closed");
|
|
2058
1832
|
}
|
|
2059
1833
|
}
|
|
2060
1834
|
migrate = $lock({ handler: async () => {
|
|
2061
1835
|
await this.migrateDatabase();
|
|
2062
1836
|
} });
|
|
2063
|
-
/**
|
|
2064
|
-
* Map the DATABASE_URL to postgres client options.
|
|
2065
|
-
*/
|
|
2066
|
-
getClientOptions() {
|
|
2067
|
-
const url = new URL(this.url);
|
|
2068
|
-
return {
|
|
2069
|
-
host: url.hostname,
|
|
2070
|
-
user: decodeURIComponent(url.username),
|
|
2071
|
-
database: decodeURIComponent(url.pathname.replace("/", "")),
|
|
2072
|
-
password: decodeURIComponent(url.password),
|
|
2073
|
-
port: Number(url.port || 5432),
|
|
2074
|
-
ssl: this.ssl(url),
|
|
2075
|
-
onnotice: () => {}
|
|
2076
|
-
};
|
|
2077
|
-
}
|
|
2078
|
-
ssl(url) {
|
|
2079
|
-
const mode = url.searchParams.get("sslmode");
|
|
2080
|
-
for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
|
|
2081
|
-
}
|
|
2082
|
-
/**
|
|
2083
|
-
* For testing purposes, generate a unique schema name.
|
|
2084
|
-
* The schema name will be generated based on the current date and time.
|
|
2085
|
-
* It will be in the format of `test_YYYYMMDD_HHMMSS_randomSuffix`.
|
|
2086
|
-
*/
|
|
2087
|
-
generateTestSchemaName() {
|
|
2088
|
-
const pad = (n) => n.toString().padStart(2, "0");
|
|
2089
|
-
const now = /* @__PURE__ */ new Date();
|
|
2090
|
-
return `test_${`${now.getUTCFullYear()}${pad(now.getUTCMonth() + 1)}${pad(now.getUTCDate())}_${pad(now.getUTCHours())}${pad(now.getUTCMinutes())}${pad(now.getUTCSeconds())}`}_${Math.random().toString(36).slice(2, 6)}`;
|
|
2091
|
-
}
|
|
2092
1837
|
};
|
|
2093
1838
|
|
|
2094
1839
|
//#endregion
|
|
@@ -2219,6 +1964,310 @@ var SqliteModelBuilder = class extends ModelBuilder {
|
|
|
2219
1964
|
});
|
|
2220
1965
|
};
|
|
2221
1966
|
|
|
1967
|
+
//#endregion
|
|
1968
|
+
//#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
|
|
1969
|
+
const envSchema$3 = t.object({ DATABASE_URL: t.optional(t.text()) });
|
|
1970
|
+
/**
|
|
1971
|
+
* Configuration options for the Bun SQLite database provider.
|
|
1972
|
+
*/
|
|
1973
|
+
const bunSqliteOptions = $atom({
|
|
1974
|
+
name: "alepha.postgres.bun-sqlite.options",
|
|
1975
|
+
schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
|
|
1976
|
+
default: {}
|
|
1977
|
+
});
|
|
1978
|
+
/**
|
|
1979
|
+
* Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
|
|
1980
|
+
*
|
|
1981
|
+
* This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
|
|
1982
|
+
* which provides excellent performance on the Bun runtime.
|
|
1983
|
+
*
|
|
1984
|
+
* @example
|
|
1985
|
+
* ```ts
|
|
1986
|
+
* // Set DATABASE_URL environment variable
|
|
1987
|
+
* // DATABASE_URL=sqlite://./my-database.db
|
|
1988
|
+
*
|
|
1989
|
+
* // Or configure programmatically
|
|
1990
|
+
* alepha.with({
|
|
1991
|
+
* provide: DatabaseProvider,
|
|
1992
|
+
* use: BunSqliteProvider,
|
|
1993
|
+
* });
|
|
1994
|
+
*
|
|
1995
|
+
* // Or use options atom
|
|
1996
|
+
* alepha.store.mut(bunSqliteOptions, (old) => ({
|
|
1997
|
+
* ...old,
|
|
1998
|
+
* path: ":memory:",
|
|
1999
|
+
* }));
|
|
2000
|
+
* ```
|
|
2001
|
+
*/
|
|
2002
|
+
var BunSqliteProvider = class extends DatabaseProvider {
|
|
2003
|
+
kit = $inject(DrizzleKitProvider);
|
|
2004
|
+
log = $logger();
|
|
2005
|
+
env = $env(envSchema$3);
|
|
2006
|
+
builder = $inject(SqliteModelBuilder);
|
|
2007
|
+
options = $use(bunSqliteOptions);
|
|
2008
|
+
sqlite;
|
|
2009
|
+
bunDb;
|
|
2010
|
+
get name() {
|
|
2011
|
+
return "bun-sqlite";
|
|
2012
|
+
}
|
|
2013
|
+
dialect = "sqlite";
|
|
2014
|
+
get url() {
|
|
2015
|
+
const path = this.options.path ?? this.env.DATABASE_URL;
|
|
2016
|
+
if (path) {
|
|
2017
|
+
if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
|
|
2018
|
+
return path;
|
|
2019
|
+
}
|
|
2020
|
+
if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
|
|
2021
|
+
else return "node_modules/.alepha/bun-sqlite.db";
|
|
2022
|
+
}
|
|
2023
|
+
get db() {
|
|
2024
|
+
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
2025
|
+
return this.bunDb;
|
|
2026
|
+
}
|
|
2027
|
+
async execute(query) {
|
|
2028
|
+
return this.bunDb.all(query);
|
|
2029
|
+
}
|
|
2030
|
+
onStart = $hook({
|
|
2031
|
+
on: "start",
|
|
2032
|
+
handler: async () => {
|
|
2033
|
+
if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
|
|
2034
|
+
const { Database } = await import("bun:sqlite");
|
|
2035
|
+
const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sqlite");
|
|
2036
|
+
const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
|
|
2037
|
+
if (filepath !== ":memory:" && filepath !== "") {
|
|
2038
|
+
const dirname = filepath.split("/").slice(0, -1).join("/");
|
|
2039
|
+
if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
|
|
2040
|
+
}
|
|
2041
|
+
this.sqlite = new Database(filepath);
|
|
2042
|
+
this.bunDb = drizzle$3({
|
|
2043
|
+
client: this.sqlite,
|
|
2044
|
+
logger: { logQuery: (query, params) => {
|
|
2045
|
+
this.log.trace(query, { params });
|
|
2046
|
+
} }
|
|
2047
|
+
});
|
|
2048
|
+
await this.migrateDatabase();
|
|
2049
|
+
this.log.info(`Using Bun SQLite database at ${filepath}`);
|
|
2050
|
+
}
|
|
2051
|
+
});
|
|
2052
|
+
onStop = $hook({
|
|
2053
|
+
on: "stop",
|
|
2054
|
+
handler: async () => {
|
|
2055
|
+
if (this.sqlite) {
|
|
2056
|
+
this.log.debug("Closing Bun SQLite connection...");
|
|
2057
|
+
this.sqlite.close();
|
|
2058
|
+
this.sqlite = void 0;
|
|
2059
|
+
this.bunDb = void 0;
|
|
2060
|
+
this.log.info("Bun SQLite connection closed");
|
|
2061
|
+
}
|
|
2062
|
+
}
|
|
2063
|
+
});
|
|
2064
|
+
async executeMigrations(migrationsFolder) {
|
|
2065
|
+
const { migrate: migrate$3 } = await import("drizzle-orm/bun-sqlite/migrator");
|
|
2066
|
+
await migrate$3(this.bunDb, { migrationsFolder });
|
|
2067
|
+
}
|
|
2068
|
+
};
|
|
2069
|
+
|
|
2070
|
+
//#endregion
|
|
2071
|
+
//#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
|
|
2072
|
+
/**
|
|
2073
|
+
* Cloudflare D1 SQLite provider using Drizzle ORM.
|
|
2074
|
+
*
|
|
2075
|
+
* This provider requires a D1 binding to be set via `cloudflareD1Options` before starting.
|
|
2076
|
+
* The binding is typically obtained from the Cloudflare Workers environment.
|
|
2077
|
+
*
|
|
2078
|
+
* @example
|
|
2079
|
+
* ```ts
|
|
2080
|
+
* // In your Cloudflare Worker
|
|
2081
|
+
* alepha.set(cloudflareD1Options, { binding: env.DB });
|
|
2082
|
+
* ```
|
|
2083
|
+
*/
|
|
2084
|
+
var CloudflareD1Provider = class extends DatabaseProvider {
|
|
2085
|
+
kit = $inject(DrizzleKitProvider);
|
|
2086
|
+
log = $logger();
|
|
2087
|
+
builder = $inject(SqliteModelBuilder);
|
|
2088
|
+
env = $env(t.object({ DATABASE_URL: t.string({ description: "Expect to be 'cloudflare-d1://name:id'" }) }));
|
|
2089
|
+
d1;
|
|
2090
|
+
drizzleDb;
|
|
2091
|
+
get name() {
|
|
2092
|
+
return "d1";
|
|
2093
|
+
}
|
|
2094
|
+
dialect = "sqlite";
|
|
2095
|
+
get url() {
|
|
2096
|
+
return this.env.DATABASE_URL;
|
|
2097
|
+
}
|
|
2098
|
+
get db() {
|
|
2099
|
+
if (!this.drizzleDb) throw new AlephaError("D1 database not initialized");
|
|
2100
|
+
return this.drizzleDb;
|
|
2101
|
+
}
|
|
2102
|
+
async execute(query) {
|
|
2103
|
+
const { rows } = await this.db.run(query);
|
|
2104
|
+
return rows;
|
|
2105
|
+
}
|
|
2106
|
+
onStart = $hook({
|
|
2107
|
+
on: "start",
|
|
2108
|
+
handler: async () => {
|
|
2109
|
+
const [bindingName] = this.env.DATABASE_URL.replace("cloudflare-d1://", "").split(":");
|
|
2110
|
+
const cloudflareEnv = this.alepha.store.get("cloudflare.env");
|
|
2111
|
+
if (!cloudflareEnv) throw new AlephaError("Cloudflare Workers environment not found in Alepha store under 'cloudflare.env'.");
|
|
2112
|
+
const binding = cloudflareEnv[bindingName];
|
|
2113
|
+
if (!binding) throw new AlephaError(`D1 binding '${bindingName}' not found in Cloudflare Workers environment.`);
|
|
2114
|
+
this.d1 = binding;
|
|
2115
|
+
const { drizzle: drizzle$3 } = await import("drizzle-orm/d1");
|
|
2116
|
+
this.drizzleDb = drizzle$3(this.d1);
|
|
2117
|
+
await this.migrateDatabase();
|
|
2118
|
+
this.log.info("Using Cloudflare D1 database");
|
|
2119
|
+
}
|
|
2120
|
+
});
|
|
2121
|
+
async executeMigrations(migrationsFolder) {
|
|
2122
|
+
const { migrate: migrate$3 } = await import("drizzle-orm/d1/migrator");
|
|
2123
|
+
await migrate$3(this.db, { migrationsFolder });
|
|
2124
|
+
}
|
|
2125
|
+
/**
|
|
2126
|
+
* Override development migration to skip sync (not supported on D1).
|
|
2127
|
+
* D1 requires proper migrations to be applied.
|
|
2128
|
+
*/
|
|
2129
|
+
async runDevelopmentMigration(migrationsFolder) {
|
|
2130
|
+
await this.executeMigrations(migrationsFolder);
|
|
2131
|
+
}
|
|
2132
|
+
/**
|
|
2133
|
+
* Override test migration to run migrations instead of sync.
|
|
2134
|
+
* D1 doesn't support schema synchronization.
|
|
2135
|
+
*/
|
|
2136
|
+
async runTestMigration() {
|
|
2137
|
+
const migrationsFolder = this.getMigrationsFolder();
|
|
2138
|
+
try {
|
|
2139
|
+
await this.executeMigrations(migrationsFolder);
|
|
2140
|
+
} catch {
|
|
2141
|
+
this.log.warn("D1 migrations failed in test environment - ensure migrations exist");
|
|
2142
|
+
}
|
|
2143
|
+
}
|
|
2144
|
+
};
|
|
2145
|
+
|
|
2146
|
+
//#endregion
|
|
2147
|
+
//#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
|
|
2148
|
+
const envSchema$2 = t.object({
|
|
2149
|
+
DATABASE_URL: t.optional(t.text()),
|
|
2150
|
+
POSTGRES_SCHEMA: t.optional(t.text())
|
|
2151
|
+
});
|
|
2152
|
+
var NodePostgresProvider = class NodePostgresProvider extends DatabaseProvider {
|
|
2153
|
+
static SSL_MODES = [
|
|
2154
|
+
"require",
|
|
2155
|
+
"allow",
|
|
2156
|
+
"prefer",
|
|
2157
|
+
"verify-full"
|
|
2158
|
+
];
|
|
2159
|
+
log = $logger();
|
|
2160
|
+
env = $env(envSchema$2);
|
|
2161
|
+
kit = $inject(DrizzleKitProvider);
|
|
2162
|
+
builder = $inject(PostgresModelBuilder);
|
|
2163
|
+
client;
|
|
2164
|
+
pg;
|
|
2165
|
+
dialect = "postgresql";
|
|
2166
|
+
get name() {
|
|
2167
|
+
return "postgres";
|
|
2168
|
+
}
|
|
2169
|
+
/**
|
|
2170
|
+
* In testing mode, the schema name will be generated and deleted after the test.
|
|
2171
|
+
*/
|
|
2172
|
+
schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
|
|
2173
|
+
get url() {
|
|
2174
|
+
if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
|
|
2175
|
+
return this.env.DATABASE_URL;
|
|
2176
|
+
}
|
|
2177
|
+
/**
|
|
2178
|
+
* Execute a SQL statement.
|
|
2179
|
+
*/
|
|
2180
|
+
execute(statement) {
|
|
2181
|
+
try {
|
|
2182
|
+
return this.db.execute(statement);
|
|
2183
|
+
} catch (error) {
|
|
2184
|
+
throw new DbError("Error executing statement", error);
|
|
2185
|
+
}
|
|
2186
|
+
}
|
|
2187
|
+
/**
|
|
2188
|
+
* Get Postgres schema used by this provider.
|
|
2189
|
+
*/
|
|
2190
|
+
get schema() {
|
|
2191
|
+
if (this.schemaForTesting) return this.schemaForTesting;
|
|
2192
|
+
if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
|
|
2193
|
+
return "public";
|
|
2194
|
+
}
|
|
2195
|
+
/**
|
|
2196
|
+
* Get the Drizzle Postgres database instance.
|
|
2197
|
+
*/
|
|
2198
|
+
get db() {
|
|
2199
|
+
if (!this.pg) throw new AlephaError("Database not initialized");
|
|
2200
|
+
return this.pg;
|
|
2201
|
+
}
|
|
2202
|
+
async executeMigrations(migrationsFolder) {
|
|
2203
|
+
await migrate(this.db, { migrationsFolder });
|
|
2204
|
+
}
|
|
2205
|
+
onStart = $hook({
|
|
2206
|
+
on: "start",
|
|
2207
|
+
handler: async () => {
|
|
2208
|
+
await this.connect();
|
|
2209
|
+
if (!this.alepha.isServerless()) try {
|
|
2210
|
+
await this.migrate.run();
|
|
2211
|
+
} catch (error) {
|
|
2212
|
+
throw new DbMigrationError(error);
|
|
2213
|
+
}
|
|
2214
|
+
}
|
|
2215
|
+
});
|
|
2216
|
+
onStop = $hook({
|
|
2217
|
+
on: "stop",
|
|
2218
|
+
handler: async () => {
|
|
2219
|
+
if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
|
|
2220
|
+
if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
|
|
2221
|
+
this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
|
|
2222
|
+
await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
|
|
2223
|
+
this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
|
|
2224
|
+
}
|
|
2225
|
+
await this.close();
|
|
2226
|
+
}
|
|
2227
|
+
});
|
|
2228
|
+
async connect() {
|
|
2229
|
+
this.log.debug("Connect ..");
|
|
2230
|
+
const client = postgres(this.getClientOptions());
|
|
2231
|
+
await client`SELECT 1`;
|
|
2232
|
+
this.client = client;
|
|
2233
|
+
this.pg = drizzle$1(client, { logger: { logQuery: (query, params) => {
|
|
2234
|
+
this.log.trace(query, { params });
|
|
2235
|
+
} } });
|
|
2236
|
+
this.log.info("Connection OK");
|
|
2237
|
+
}
|
|
2238
|
+
async close() {
|
|
2239
|
+
if (this.client) {
|
|
2240
|
+
this.log.debug("Close...");
|
|
2241
|
+
await this.client.end();
|
|
2242
|
+
this.client = void 0;
|
|
2243
|
+
this.pg = void 0;
|
|
2244
|
+
this.log.info("Connection closed");
|
|
2245
|
+
}
|
|
2246
|
+
}
|
|
2247
|
+
migrate = $lock({ handler: async () => {
|
|
2248
|
+
await this.migrateDatabase();
|
|
2249
|
+
} });
|
|
2250
|
+
/**
|
|
2251
|
+
* Map the DATABASE_URL to postgres client options.
|
|
2252
|
+
*/
|
|
2253
|
+
getClientOptions() {
|
|
2254
|
+
const url = new URL(this.url);
|
|
2255
|
+
return {
|
|
2256
|
+
host: url.hostname,
|
|
2257
|
+
user: decodeURIComponent(url.username),
|
|
2258
|
+
database: decodeURIComponent(url.pathname.replace("/", "")),
|
|
2259
|
+
password: decodeURIComponent(url.password),
|
|
2260
|
+
port: Number(url.port || 5432),
|
|
2261
|
+
ssl: this.ssl(url),
|
|
2262
|
+
onnotice: () => {}
|
|
2263
|
+
};
|
|
2264
|
+
}
|
|
2265
|
+
ssl(url) {
|
|
2266
|
+
const mode = url.searchParams.get("sslmode");
|
|
2267
|
+
for (const it of NodePostgresProvider.SSL_MODES) if (mode === it) return it;
|
|
2268
|
+
}
|
|
2269
|
+
};
|
|
2270
|
+
|
|
2222
2271
|
//#endregion
|
|
2223
2272
|
//#region ../../src/orm/providers/drivers/NodeSqliteProvider.ts
|
|
2224
2273
|
const envSchema$1 = t.object({ DATABASE_URL: t.optional(t.text()) });
|
|
@@ -2289,7 +2338,7 @@ var NodeSqliteProvider = class extends DatabaseProvider {
|
|
|
2289
2338
|
on: "start",
|
|
2290
2339
|
handler: async () => {
|
|
2291
2340
|
const { DatabaseSync } = await import("node:sqlite");
|
|
2292
|
-
const filepath = this.url.replace("sqlite://", "");
|
|
2341
|
+
const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
|
|
2293
2342
|
if (filepath !== ":memory:" && filepath !== "") {
|
|
2294
2343
|
const dirname = filepath.split("/").slice(0, -1).join("/");
|
|
2295
2344
|
if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
|
|
@@ -2861,13 +2910,15 @@ const AlephaPostgres = $module({
|
|
|
2861
2910
|
NodePostgresProvider,
|
|
2862
2911
|
PglitePostgresProvider,
|
|
2863
2912
|
NodeSqliteProvider,
|
|
2913
|
+
BunPostgresProvider,
|
|
2914
|
+
BunSqliteProvider,
|
|
2915
|
+
CloudflareD1Provider,
|
|
2864
2916
|
SqliteModelBuilder,
|
|
2865
2917
|
PostgresModelBuilder,
|
|
2866
2918
|
DrizzleKitProvider,
|
|
2867
2919
|
RepositoryProvider,
|
|
2868
2920
|
Repository,
|
|
2869
2921
|
PgRelationManager,
|
|
2870
|
-
PgJsonQueryManager,
|
|
2871
2922
|
QueryManager
|
|
2872
2923
|
],
|
|
2873
2924
|
register: (alepha) => {
|
|
@@ -2879,7 +2930,16 @@ const AlephaPostgres = $module({
|
|
|
2879
2930
|
const isPostgres = url?.startsWith("postgres:");
|
|
2880
2931
|
const isSqlite = url?.startsWith("sqlite:");
|
|
2881
2932
|
const isMemory = url?.includes(":memory:");
|
|
2882
|
-
|
|
2933
|
+
const isFile = !!url && !isPostgres && !isMemory;
|
|
2934
|
+
if (url?.startsWith("cloudflare-d1:")) {
|
|
2935
|
+
alepha.with({
|
|
2936
|
+
optional: true,
|
|
2937
|
+
provide: DatabaseProvider,
|
|
2938
|
+
use: CloudflareD1Provider
|
|
2939
|
+
});
|
|
2940
|
+
return;
|
|
2941
|
+
}
|
|
2942
|
+
if (hasPGlite && (isMemory || isFile || !url) && !isSqlite) {
|
|
2883
2943
|
alepha.with({
|
|
2884
2944
|
optional: true,
|
|
2885
2945
|
provide: DatabaseProvider,
|
|
@@ -2891,18 +2951,18 @@ const AlephaPostgres = $module({
|
|
|
2891
2951
|
alepha.with({
|
|
2892
2952
|
optional: true,
|
|
2893
2953
|
provide: DatabaseProvider,
|
|
2894
|
-
use: NodePostgresProvider
|
|
2954
|
+
use: alepha.isBun() ? BunPostgresProvider : NodePostgresProvider
|
|
2895
2955
|
});
|
|
2896
2956
|
return;
|
|
2897
2957
|
}
|
|
2898
2958
|
alepha.with({
|
|
2899
2959
|
optional: true,
|
|
2900
2960
|
provide: DatabaseProvider,
|
|
2901
|
-
use: NodeSqliteProvider
|
|
2961
|
+
use: alepha.isBun() ? BunSqliteProvider : NodeSqliteProvider
|
|
2902
2962
|
});
|
|
2903
2963
|
}
|
|
2904
2964
|
});
|
|
2905
2965
|
|
|
2906
2966
|
//#endregion
|
|
2907
|
-
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, DatabaseProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, NodePostgresProvider, NodeSqliteProvider, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, PostgresTypeProvider, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, drizzle, getAttrFields, insertSchema, legacyIdSchema, nodeSqliteOptions, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
|
|
2967
|
+
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, BunPostgresProvider, BunSqliteProvider, CloudflareD1Provider, DatabaseProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, NodePostgresProvider, NodeSqliteProvider, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, PostgresTypeProvider, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, bunSqliteOptions, drizzle, getAttrFields, insertSchema, legacyIdSchema, nodeSqliteOptions, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
|
|
2908
2968
|
//# sourceMappingURL=index.js.map
|