alepha 0.14.3 → 0.14.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/api/audits/index.d.ts +338 -417
- package/dist/api/audits/index.d.ts.map +1 -1
- package/dist/api/files/index.d.ts +1 -80
- package/dist/api/files/index.d.ts.map +1 -1
- package/dist/api/jobs/index.d.ts +156 -235
- package/dist/api/jobs/index.d.ts.map +1 -1
- package/dist/api/notifications/index.d.ts +170 -249
- package/dist/api/notifications/index.d.ts.map +1 -1
- package/dist/api/parameters/index.d.ts +266 -345
- package/dist/api/parameters/index.d.ts.map +1 -1
- package/dist/api/users/index.d.ts +755 -834
- package/dist/api/users/index.d.ts.map +1 -1
- package/dist/api/verifications/index.d.ts +125 -125
- package/dist/api/verifications/index.d.ts.map +1 -1
- package/dist/cli/index.d.ts +116 -20
- package/dist/cli/index.d.ts.map +1 -1
- package/dist/cli/index.js +212 -124
- package/dist/cli/index.js.map +1 -1
- package/dist/command/index.d.ts +6 -11
- package/dist/command/index.d.ts.map +1 -1
- package/dist/command/index.js +2 -2
- package/dist/command/index.js.map +1 -1
- package/dist/core/index.browser.js +26 -4
- package/dist/core/index.browser.js.map +1 -1
- package/dist/core/index.d.ts +16 -1
- package/dist/core/index.d.ts.map +1 -1
- package/dist/core/index.js +26 -4
- package/dist/core/index.js.map +1 -1
- package/dist/core/index.native.js +26 -4
- package/dist/core/index.native.js.map +1 -1
- package/dist/logger/index.d.ts +1 -1
- package/dist/logger/index.d.ts.map +1 -1
- package/dist/logger/index.js +12 -2
- package/dist/logger/index.js.map +1 -1
- package/dist/mcp/index.d.ts.map +1 -1
- package/dist/mcp/index.js +1 -1
- package/dist/mcp/index.js.map +1 -1
- package/dist/orm/index.d.ts +37 -173
- package/dist/orm/index.d.ts.map +1 -1
- package/dist/orm/index.js +193 -422
- package/dist/orm/index.js.map +1 -1
- package/dist/server/auth/index.d.ts +167 -167
- package/dist/server/cache/index.d.ts +12 -0
- package/dist/server/cache/index.d.ts.map +1 -1
- package/dist/server/cache/index.js +55 -2
- package/dist/server/cache/index.js.map +1 -1
- package/dist/server/compress/index.d.ts +6 -0
- package/dist/server/compress/index.d.ts.map +1 -1
- package/dist/server/compress/index.js +36 -1
- package/dist/server/compress/index.js.map +1 -1
- package/dist/server/core/index.browser.js +2 -2
- package/dist/server/core/index.browser.js.map +1 -1
- package/dist/server/core/index.d.ts +10 -10
- package/dist/server/core/index.d.ts.map +1 -1
- package/dist/server/core/index.js +6 -3
- package/dist/server/core/index.js.map +1 -1
- package/dist/server/links/index.d.ts +39 -39
- package/dist/server/links/index.d.ts.map +1 -1
- package/dist/server/security/index.d.ts +9 -9
- package/dist/server/static/index.d.ts.map +1 -1
- package/dist/server/static/index.js +4 -0
- package/dist/server/static/index.js.map +1 -1
- package/dist/server/swagger/index.d.ts.map +1 -1
- package/dist/server/swagger/index.js +2 -3
- package/dist/server/swagger/index.js.map +1 -1
- package/dist/vite/index.d.ts +101 -106
- package/dist/vite/index.d.ts.map +1 -1
- package/dist/vite/index.js +571 -508
- package/dist/vite/index.js.map +1 -1
- package/package.json +1 -1
- package/src/cli/apps/AlephaCli.ts +0 -2
- package/src/cli/atoms/buildOptions.ts +88 -0
- package/src/cli/commands/build.ts +32 -69
- package/src/cli/commands/db.ts +0 -4
- package/src/cli/commands/dev.ts +16 -4
- package/src/cli/commands/gen/env.ts +53 -0
- package/src/cli/commands/gen/openapi.ts +1 -1
- package/src/cli/commands/gen/resource.ts +15 -0
- package/src/cli/commands/gen.ts +7 -1
- package/src/cli/commands/init.ts +0 -1
- package/src/cli/commands/test.ts +0 -1
- package/src/cli/commands/verify.ts +1 -1
- package/src/cli/defineConfig.ts +49 -7
- package/src/cli/index.ts +0 -1
- package/src/cli/services/AlephaCliUtils.ts +36 -25
- package/src/command/helpers/Runner.spec.ts +2 -2
- package/src/command/helpers/Runner.ts +1 -1
- package/src/command/primitives/$command.ts +0 -6
- package/src/command/providers/CliProvider.ts +1 -3
- package/src/core/Alepha.ts +42 -0
- package/src/logger/index.ts +15 -3
- package/src/mcp/transports/StdioMcpTransport.ts +1 -1
- package/src/orm/index.ts +2 -8
- package/src/queue/core/providers/WorkerProvider.spec.ts +48 -32
- package/src/server/cache/providers/ServerCacheProvider.spec.ts +183 -0
- package/src/server/cache/providers/ServerCacheProvider.ts +94 -9
- package/src/server/compress/providers/ServerCompressProvider.ts +61 -2
- package/src/server/core/helpers/ServerReply.ts +2 -2
- package/src/server/core/providers/ServerProvider.ts +11 -1
- package/src/server/static/providers/ServerStaticProvider.ts +10 -0
- package/src/server/swagger/providers/ServerSwaggerProvider.ts +5 -8
- package/src/vite/helpers/importViteReact.ts +13 -0
- package/src/vite/index.ts +1 -21
- package/src/vite/plugins/viteAlephaDev.ts +16 -1
- package/src/vite/plugins/viteAlephaSsrPreload.ts +222 -0
- package/src/vite/tasks/buildClient.ts +11 -0
- package/src/vite/tasks/buildServer.ts +47 -3
- package/src/vite/tasks/devServer.ts +69 -0
- package/src/vite/tasks/index.ts +2 -1
- package/src/cli/assets/viteConfigTs.ts +0 -14
- package/src/cli/commands/run.ts +0 -24
- package/src/vite/plugins/viteAlepha.ts +0 -37
- package/src/vite/plugins/viteAlephaBuild.ts +0 -281
package/dist/orm/index.js
CHANGED
|
@@ -3,15 +3,15 @@ import { $atom, $context, $env, $hook, $inject, $module, $use, Alepha, AlephaErr
|
|
|
3
3
|
import { AlephaDateTime, DateTimeProvider } from "alepha/datetime";
|
|
4
4
|
import * as drizzle from "drizzle-orm";
|
|
5
5
|
import { and, arrayContained, arrayContains, arrayOverlaps, asc, between, desc, eq, getTableName, gt, gte, ilike, inArray, isNotNull, isNull, isSQLWrapper, like, lt, lte, ne, not, notBetween, notIlike, notInArray, notLike, or, sql, sql as sql$1 } from "drizzle-orm";
|
|
6
|
-
import * as pg$
|
|
6
|
+
import * as pg$1 from "drizzle-orm/pg-core";
|
|
7
7
|
import { alias, check, customType, foreignKey, index, pgEnum, pgSchema, pgTable, unique, uniqueIndex } from "drizzle-orm/pg-core";
|
|
8
8
|
import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
|
|
9
9
|
import { $logger } from "alepha/logger";
|
|
10
10
|
import { isSQLWrapper as isSQLWrapper$1 } from "drizzle-orm/sql/sql";
|
|
11
|
-
import { $lock } from "alepha/lock";
|
|
12
11
|
import { randomUUID } from "node:crypto";
|
|
13
|
-
import * as pg$
|
|
12
|
+
import * as pg$2 from "drizzle-orm/sqlite-core";
|
|
14
13
|
import { check as check$1, foreignKey as foreignKey$1, index as index$1, sqliteTable, unique as unique$1, uniqueIndex as uniqueIndex$1 } from "drizzle-orm/sqlite-core";
|
|
14
|
+
import { $lock } from "alepha/lock";
|
|
15
15
|
import { drizzle as drizzle$1 } from "drizzle-orm/postgres-js";
|
|
16
16
|
import { migrate } from "drizzle-orm/postgres-js/migrator";
|
|
17
17
|
import postgres from "postgres";
|
|
@@ -1446,22 +1446,6 @@ const devMigrationsSchema = t.object({
|
|
|
1446
1446
|
created_at: t.string()
|
|
1447
1447
|
});
|
|
1448
1448
|
|
|
1449
|
-
//#endregion
|
|
1450
|
-
//#region ../../src/orm/errors/DbMigrationError.ts
|
|
1451
|
-
var DbMigrationError = class extends DbError {
|
|
1452
|
-
name = "DbMigrationError";
|
|
1453
|
-
constructor(cause) {
|
|
1454
|
-
super("Failed to migrate database", cause);
|
|
1455
|
-
}
|
|
1456
|
-
};
|
|
1457
|
-
|
|
1458
|
-
//#endregion
|
|
1459
|
-
//#region ../../src/orm/types/byte.ts
|
|
1460
|
-
/**
|
|
1461
|
-
* Postgres bytea type.
|
|
1462
|
-
*/
|
|
1463
|
-
const byte = customType({ dataType: () => "bytea" });
|
|
1464
|
-
|
|
1465
1449
|
//#endregion
|
|
1466
1450
|
//#region ../../src/orm/services/ModelBuilder.ts
|
|
1467
1451
|
/**
|
|
@@ -1554,288 +1538,6 @@ var ModelBuilder = class {
|
|
|
1554
1538
|
}
|
|
1555
1539
|
};
|
|
1556
1540
|
|
|
1557
|
-
//#endregion
|
|
1558
|
-
//#region ../../src/orm/services/PostgresModelBuilder.ts
|
|
1559
|
-
var PostgresModelBuilder = class extends ModelBuilder {
|
|
1560
|
-
schemas = /* @__PURE__ */ new Map();
|
|
1561
|
-
getPgSchema(name) {
|
|
1562
|
-
if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
|
|
1563
|
-
const nsp = name !== "public" ? this.schemas.get(name) : {
|
|
1564
|
-
enum: pgEnum,
|
|
1565
|
-
table: pgTable
|
|
1566
|
-
};
|
|
1567
|
-
if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
|
|
1568
|
-
return nsp;
|
|
1569
|
-
}
|
|
1570
|
-
buildTable(entity, options) {
|
|
1571
|
-
const tableName = entity.name;
|
|
1572
|
-
if (options.tables.has(tableName)) return;
|
|
1573
|
-
const nsp = this.getPgSchema(options.schema);
|
|
1574
|
-
const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
|
|
1575
|
-
const configFn = this.getTableConfig(entity, options.tables);
|
|
1576
|
-
const table = nsp.table(tableName, columns, configFn);
|
|
1577
|
-
options.tables.set(tableName, table);
|
|
1578
|
-
}
|
|
1579
|
-
buildSequence(sequence, options) {
|
|
1580
|
-
const sequenceName = sequence.name;
|
|
1581
|
-
if (options.sequences.has(sequenceName)) return;
|
|
1582
|
-
const nsp = this.getPgSchema(options.schema);
|
|
1583
|
-
options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
|
|
1584
|
-
}
|
|
1585
|
-
/**
|
|
1586
|
-
* Get PostgreSQL-specific config builder for the table.
|
|
1587
|
-
*/
|
|
1588
|
-
getTableConfig(entity, tables) {
|
|
1589
|
-
const pgBuilders = {
|
|
1590
|
-
index,
|
|
1591
|
-
uniqueIndex,
|
|
1592
|
-
unique,
|
|
1593
|
-
check,
|
|
1594
|
-
foreignKey
|
|
1595
|
-
};
|
|
1596
|
-
const tableResolver = (entityName) => {
|
|
1597
|
-
return tables.get(entityName);
|
|
1598
|
-
};
|
|
1599
|
-
return this.buildTableConfig(entity, pgBuilders, tableResolver);
|
|
1600
|
-
}
|
|
1601
|
-
schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
|
|
1602
|
-
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
1603
|
-
let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
|
|
1604
|
-
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
1605
|
-
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
1606
|
-
if (PG_REF in value) {
|
|
1607
|
-
const config = value[PG_REF];
|
|
1608
|
-
col = col.references(() => {
|
|
1609
|
-
const ref = config.ref();
|
|
1610
|
-
const table = tables.get(ref.entity.name);
|
|
1611
|
-
if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
1612
|
-
const target = table[ref.name];
|
|
1613
|
-
if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
1614
|
-
return target;
|
|
1615
|
-
}, config.actions);
|
|
1616
|
-
}
|
|
1617
|
-
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
1618
|
-
return {
|
|
1619
|
-
...columns,
|
|
1620
|
-
[key]: col
|
|
1621
|
-
};
|
|
1622
|
-
}, {});
|
|
1623
|
-
};
|
|
1624
|
-
mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
|
|
1625
|
-
const key = this.toColumnName(fieldName);
|
|
1626
|
-
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
|
|
1627
|
-
if (t.schema.isInteger(value)) {
|
|
1628
|
-
if (PG_SERIAL in value) return pg$2.serial(key);
|
|
1629
|
-
if (PG_IDENTITY in value) {
|
|
1630
|
-
const options = value[PG_IDENTITY];
|
|
1631
|
-
if (options.mode === "byDefault") return pg$2.integer().generatedByDefaultAsIdentity(options);
|
|
1632
|
-
return pg$2.integer().generatedAlwaysAsIdentity(options);
|
|
1633
|
-
}
|
|
1634
|
-
return pg$2.integer(key);
|
|
1635
|
-
}
|
|
1636
|
-
if (t.schema.isBigInt(value)) {
|
|
1637
|
-
if (PG_IDENTITY in value) {
|
|
1638
|
-
const options = value[PG_IDENTITY];
|
|
1639
|
-
if (options.mode === "byDefault") return pg$2.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
|
|
1640
|
-
return pg$2.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
|
|
1641
|
-
}
|
|
1642
|
-
}
|
|
1643
|
-
if (t.schema.isNumber(value)) {
|
|
1644
|
-
if (PG_IDENTITY in value) {
|
|
1645
|
-
const options = value[PG_IDENTITY];
|
|
1646
|
-
if (options.mode === "byDefault") return pg$2.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
|
|
1647
|
-
return pg$2.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
|
|
1648
|
-
}
|
|
1649
|
-
if (value.format === "int64") return pg$2.bigint(key, { mode: "number" });
|
|
1650
|
-
return pg$2.numeric(key);
|
|
1651
|
-
}
|
|
1652
|
-
if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
|
|
1653
|
-
if (t.schema.isBoolean(value)) return pg$2.boolean(key);
|
|
1654
|
-
if (t.schema.isObject(value)) return schema(key, value);
|
|
1655
|
-
if (t.schema.isRecord(value)) return schema(key, value);
|
|
1656
|
-
const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
|
|
1657
|
-
if (t.schema.isArray(value)) {
|
|
1658
|
-
if (t.schema.isObject(value.items)) return schema(key, value);
|
|
1659
|
-
if (t.schema.isRecord(value.items)) return schema(key, value);
|
|
1660
|
-
if (t.schema.isString(value.items)) return pg$2.text(key).array();
|
|
1661
|
-
if (t.schema.isInteger(value.items)) return pg$2.integer(key).array();
|
|
1662
|
-
if (t.schema.isNumber(value.items)) return pg$2.numeric(key).array();
|
|
1663
|
-
if (t.schema.isBoolean(value.items)) return pg$2.boolean(key).array();
|
|
1664
|
-
if (isTypeEnum(value.items)) return pg$2.text(key).array();
|
|
1665
|
-
}
|
|
1666
|
-
if (isTypeEnum(value)) {
|
|
1667
|
-
if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
|
|
1668
|
-
if (PG_ENUM in value && value[PG_ENUM]) {
|
|
1669
|
-
const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
|
|
1670
|
-
if (enums.has(enumName)) {
|
|
1671
|
-
const values = enums.get(enumName).enumValues.join(",");
|
|
1672
|
-
const newValues = value.enum.join(",");
|
|
1673
|
-
if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
|
|
1674
|
-
}
|
|
1675
|
-
enums.set(enumName, nsp.enum(enumName, value.enum));
|
|
1676
|
-
return enums.get(enumName)(key);
|
|
1677
|
-
}
|
|
1678
|
-
return this.mapStringToColumn(key, value);
|
|
1679
|
-
}
|
|
1680
|
-
throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
|
|
1681
|
-
};
|
|
1682
|
-
/**
|
|
1683
|
-
* Map a string to a PG column.
|
|
1684
|
-
*
|
|
1685
|
-
* @param key The key of the field.
|
|
1686
|
-
* @param value The value of the field.
|
|
1687
|
-
*/
|
|
1688
|
-
mapStringToColumn = (key, value) => {
|
|
1689
|
-
if ("format" in value) {
|
|
1690
|
-
if (value.format === "uuid") {
|
|
1691
|
-
if (PG_PRIMARY_KEY in value) return pg$2.uuid(key).defaultRandom();
|
|
1692
|
-
return pg$2.uuid(key);
|
|
1693
|
-
}
|
|
1694
|
-
if (value.format === "byte") return byte(key);
|
|
1695
|
-
if (value.format === "date-time") {
|
|
1696
|
-
if (PG_CREATED_AT in value) return pg$2.timestamp(key, {
|
|
1697
|
-
mode: "string",
|
|
1698
|
-
withTimezone: true
|
|
1699
|
-
}).defaultNow();
|
|
1700
|
-
if (PG_UPDATED_AT in value) return pg$2.timestamp(key, {
|
|
1701
|
-
mode: "string",
|
|
1702
|
-
withTimezone: true
|
|
1703
|
-
}).defaultNow();
|
|
1704
|
-
return pg$2.timestamp(key, {
|
|
1705
|
-
mode: "string",
|
|
1706
|
-
withTimezone: true
|
|
1707
|
-
});
|
|
1708
|
-
}
|
|
1709
|
-
if (value.format === "date") return pg$2.date(key, { mode: "string" });
|
|
1710
|
-
}
|
|
1711
|
-
return pg$2.text(key);
|
|
1712
|
-
};
|
|
1713
|
-
};
|
|
1714
|
-
|
|
1715
|
-
//#endregion
|
|
1716
|
-
//#region ../../src/orm/providers/drivers/BunPostgresProvider.ts
|
|
1717
|
-
const envSchema$4 = t.object({
|
|
1718
|
-
DATABASE_URL: t.optional(t.text()),
|
|
1719
|
-
POSTGRES_SCHEMA: t.optional(t.text())
|
|
1720
|
-
});
|
|
1721
|
-
/**
|
|
1722
|
-
* Bun PostgreSQL provider using Drizzle ORM with Bun's native SQL client.
|
|
1723
|
-
*
|
|
1724
|
-
* This provider uses Bun's built-in SQL class for PostgreSQL connections,
|
|
1725
|
-
* which provides excellent performance on the Bun runtime.
|
|
1726
|
-
*
|
|
1727
|
-
* @example
|
|
1728
|
-
* ```ts
|
|
1729
|
-
* // Set DATABASE_URL environment variable
|
|
1730
|
-
* // DATABASE_URL=postgres://user:password@localhost:5432/database
|
|
1731
|
-
*
|
|
1732
|
-
* // Or configure programmatically
|
|
1733
|
-
* alepha.with({
|
|
1734
|
-
* provide: DatabaseProvider,
|
|
1735
|
-
* use: BunPostgresProvider,
|
|
1736
|
-
* });
|
|
1737
|
-
* ```
|
|
1738
|
-
*/
|
|
1739
|
-
var BunPostgresProvider = class extends DatabaseProvider {
|
|
1740
|
-
log = $logger();
|
|
1741
|
-
env = $env(envSchema$4);
|
|
1742
|
-
kit = $inject(DrizzleKitProvider);
|
|
1743
|
-
builder = $inject(PostgresModelBuilder);
|
|
1744
|
-
client;
|
|
1745
|
-
bunDb;
|
|
1746
|
-
dialect = "postgresql";
|
|
1747
|
-
get name() {
|
|
1748
|
-
return "bun-postgres";
|
|
1749
|
-
}
|
|
1750
|
-
/**
|
|
1751
|
-
* In testing mode, the schema name will be generated and deleted after the test.
|
|
1752
|
-
*/
|
|
1753
|
-
schemaForTesting = this.alepha.isTest() ? this.env.POSTGRES_SCHEMA?.startsWith("test_") ? this.env.POSTGRES_SCHEMA : this.generateTestSchemaName() : void 0;
|
|
1754
|
-
get url() {
|
|
1755
|
-
if (!this.env.DATABASE_URL) throw new AlephaError("DATABASE_URL is not defined in the environment");
|
|
1756
|
-
return this.env.DATABASE_URL;
|
|
1757
|
-
}
|
|
1758
|
-
/**
|
|
1759
|
-
* Execute a SQL statement.
|
|
1760
|
-
*/
|
|
1761
|
-
execute(statement) {
|
|
1762
|
-
try {
|
|
1763
|
-
return this.db.execute(statement);
|
|
1764
|
-
} catch (error) {
|
|
1765
|
-
throw new DbError("Error executing statement", error);
|
|
1766
|
-
}
|
|
1767
|
-
}
|
|
1768
|
-
/**
|
|
1769
|
-
* Get Postgres schema used by this provider.
|
|
1770
|
-
*/
|
|
1771
|
-
get schema() {
|
|
1772
|
-
if (this.schemaForTesting) return this.schemaForTesting;
|
|
1773
|
-
if (this.env.POSTGRES_SCHEMA) return this.env.POSTGRES_SCHEMA;
|
|
1774
|
-
return "public";
|
|
1775
|
-
}
|
|
1776
|
-
/**
|
|
1777
|
-
* Get the Drizzle Postgres database instance.
|
|
1778
|
-
*/
|
|
1779
|
-
get db() {
|
|
1780
|
-
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
1781
|
-
return this.bunDb;
|
|
1782
|
-
}
|
|
1783
|
-
async executeMigrations(migrationsFolder) {
|
|
1784
|
-
const { migrate: migrate$3 } = await import("drizzle-orm/bun-sql/migrator");
|
|
1785
|
-
await migrate$3(this.bunDb, { migrationsFolder });
|
|
1786
|
-
}
|
|
1787
|
-
onStart = $hook({
|
|
1788
|
-
on: "start",
|
|
1789
|
-
handler: async () => {
|
|
1790
|
-
await this.connect();
|
|
1791
|
-
if (!this.alepha.isServerless()) try {
|
|
1792
|
-
await this.migrateLock.run();
|
|
1793
|
-
} catch (error) {
|
|
1794
|
-
throw new DbMigrationError(error);
|
|
1795
|
-
}
|
|
1796
|
-
}
|
|
1797
|
-
});
|
|
1798
|
-
onStop = $hook({
|
|
1799
|
-
on: "stop",
|
|
1800
|
-
handler: async () => {
|
|
1801
|
-
if (this.alepha.isTest() && this.schemaForTesting && this.schemaForTesting.startsWith("test_")) {
|
|
1802
|
-
if (!/^test_[a-z0-9_]+$/i.test(this.schemaForTesting)) throw new AlephaError(`Invalid test schema name: ${this.schemaForTesting}. Must match pattern: test_[a-z0-9_]+`);
|
|
1803
|
-
this.log.warn(`Deleting test schema '${this.schemaForTesting}' ...`);
|
|
1804
|
-
await this.execute(sql$1`DROP SCHEMA IF EXISTS ${sql$1.raw(this.schemaForTesting)} CASCADE`);
|
|
1805
|
-
this.log.info(`Test schema '${this.schemaForTesting}' deleted`);
|
|
1806
|
-
}
|
|
1807
|
-
await this.close();
|
|
1808
|
-
}
|
|
1809
|
-
});
|
|
1810
|
-
async connect() {
|
|
1811
|
-
this.log.debug("Connect ..");
|
|
1812
|
-
if (typeof Bun === "undefined") throw new AlephaError("BunPostgresProvider requires the Bun runtime. Use NodePostgresProvider for Node.js.");
|
|
1813
|
-
const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sql");
|
|
1814
|
-
const { SQL: SQL$1 } = await import("bun");
|
|
1815
|
-
this.client = new SQL$1(this.url);
|
|
1816
|
-
await this.client.unsafe("SELECT 1");
|
|
1817
|
-
this.bunDb = drizzle$3({
|
|
1818
|
-
client: this.client,
|
|
1819
|
-
logger: { logQuery: (query, params) => {
|
|
1820
|
-
this.log.trace(query, { params });
|
|
1821
|
-
} }
|
|
1822
|
-
});
|
|
1823
|
-
this.log.info("Connection OK");
|
|
1824
|
-
}
|
|
1825
|
-
async close() {
|
|
1826
|
-
if (this.client) {
|
|
1827
|
-
this.log.debug("Close...");
|
|
1828
|
-
await this.client.close();
|
|
1829
|
-
this.client = void 0;
|
|
1830
|
-
this.bunDb = void 0;
|
|
1831
|
-
this.log.info("Connection closed");
|
|
1832
|
-
}
|
|
1833
|
-
}
|
|
1834
|
-
migrateLock = $lock({ handler: async () => {
|
|
1835
|
-
await this.migrate();
|
|
1836
|
-
} });
|
|
1837
|
-
};
|
|
1838
|
-
|
|
1839
1541
|
//#endregion
|
|
1840
1542
|
//#region ../../src/orm/services/SqliteModelBuilder.ts
|
|
1841
1543
|
var SqliteModelBuilder = class extends ModelBuilder {
|
|
@@ -1894,16 +1596,16 @@ var SqliteModelBuilder = class extends ModelBuilder {
|
|
|
1894
1596
|
const key = this.toColumnName(fieldName);
|
|
1895
1597
|
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
|
|
1896
1598
|
if (t.schema.isInteger(value)) {
|
|
1897
|
-
if (PG_SERIAL in value || PG_IDENTITY in value) return pg$
|
|
1898
|
-
return pg$
|
|
1599
|
+
if (PG_SERIAL in value || PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
1600
|
+
return pg$2.integer(key);
|
|
1899
1601
|
}
|
|
1900
1602
|
if (t.schema.isBigInt(value)) {
|
|
1901
|
-
if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$
|
|
1902
|
-
return pg$
|
|
1603
|
+
if (PG_PRIMARY_KEY in value || PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
1604
|
+
return pg$2.integer(key, { mode: "number" });
|
|
1903
1605
|
}
|
|
1904
1606
|
if (t.schema.isNumber(value)) {
|
|
1905
|
-
if (PG_IDENTITY in value) return pg$
|
|
1906
|
-
return pg$
|
|
1607
|
+
if (PG_IDENTITY in value) return pg$2.integer(key, { mode: "number" }).primaryKey({ autoIncrement: true });
|
|
1608
|
+
return pg$2.numeric(key);
|
|
1907
1609
|
}
|
|
1908
1610
|
if (t.schema.isString(value)) return this.mapStringToSqliteColumn(key, value);
|
|
1909
1611
|
if (t.schema.isBoolean(value)) return this.sqliteBool(key, value);
|
|
@@ -1924,8 +1626,8 @@ var SqliteModelBuilder = class extends ModelBuilder {
|
|
|
1924
1626
|
};
|
|
1925
1627
|
mapStringToSqliteColumn = (key, value) => {
|
|
1926
1628
|
if (value.format === "uuid") {
|
|
1927
|
-
if (PG_PRIMARY_KEY in value) return pg$
|
|
1928
|
-
return pg$
|
|
1629
|
+
if (PG_PRIMARY_KEY in value) return pg$2.text(key).primaryKey().$defaultFn(() => randomUUID());
|
|
1630
|
+
return pg$2.text(key);
|
|
1929
1631
|
}
|
|
1930
1632
|
if (value.format === "byte") return this.sqliteJson(key, value);
|
|
1931
1633
|
if (value.format === "date-time") {
|
|
@@ -1934,28 +1636,28 @@ var SqliteModelBuilder = class extends ModelBuilder {
|
|
|
1934
1636
|
return this.sqliteDateTime(key, {});
|
|
1935
1637
|
}
|
|
1936
1638
|
if (value.format === "date") return this.sqliteDate(key, {});
|
|
1937
|
-
return pg$
|
|
1639
|
+
return pg$2.text(key);
|
|
1938
1640
|
};
|
|
1939
|
-
sqliteJson = (name, document) => pg$
|
|
1641
|
+
sqliteJson = (name, document) => pg$2.customType({
|
|
1940
1642
|
dataType: () => "text",
|
|
1941
1643
|
toDriver: (value) => JSON.stringify(value),
|
|
1942
1644
|
fromDriver: (value) => {
|
|
1943
1645
|
return value && typeof value === "string" ? JSON.parse(value) : value;
|
|
1944
1646
|
}
|
|
1945
1647
|
})(name, { document }).$type();
|
|
1946
|
-
sqliteDateTime = pg$
|
|
1648
|
+
sqliteDateTime = pg$2.customType({
|
|
1947
1649
|
dataType: () => "integer",
|
|
1948
1650
|
toDriver: (value) => new Date(value).getTime(),
|
|
1949
1651
|
fromDriver: (value) => {
|
|
1950
1652
|
return new Date(value).toISOString();
|
|
1951
1653
|
}
|
|
1952
1654
|
});
|
|
1953
|
-
sqliteBool = pg$
|
|
1655
|
+
sqliteBool = pg$2.customType({
|
|
1954
1656
|
dataType: () => "integer",
|
|
1955
1657
|
toDriver: (value) => value ? 1 : 0,
|
|
1956
1658
|
fromDriver: (value) => value === 1
|
|
1957
1659
|
});
|
|
1958
|
-
sqliteDate = pg$
|
|
1660
|
+
sqliteDate = pg$2.customType({
|
|
1959
1661
|
dataType: () => "integer",
|
|
1960
1662
|
toDriver: (value) => new Date(value).getTime(),
|
|
1961
1663
|
fromDriver: (value) => {
|
|
@@ -1964,109 +1666,6 @@ var SqliteModelBuilder = class extends ModelBuilder {
|
|
|
1964
1666
|
});
|
|
1965
1667
|
};
|
|
1966
1668
|
|
|
1967
|
-
//#endregion
|
|
1968
|
-
//#region ../../src/orm/providers/drivers/BunSqliteProvider.ts
|
|
1969
|
-
const envSchema$3 = t.object({ DATABASE_URL: t.optional(t.text()) });
|
|
1970
|
-
/**
|
|
1971
|
-
* Configuration options for the Bun SQLite database provider.
|
|
1972
|
-
*/
|
|
1973
|
-
const bunSqliteOptions = $atom({
|
|
1974
|
-
name: "alepha.postgres.bun-sqlite.options",
|
|
1975
|
-
schema: t.object({ path: t.optional(t.string({ description: "Filepath or :memory:. If empty, provider will use DATABASE_URL from env." })) }),
|
|
1976
|
-
default: {}
|
|
1977
|
-
});
|
|
1978
|
-
/**
|
|
1979
|
-
* Bun SQLite provider using Drizzle ORM with Bun's native SQLite client.
|
|
1980
|
-
*
|
|
1981
|
-
* This provider uses Bun's built-in `bun:sqlite` for SQLite connections,
|
|
1982
|
-
* which provides excellent performance on the Bun runtime.
|
|
1983
|
-
*
|
|
1984
|
-
* @example
|
|
1985
|
-
* ```ts
|
|
1986
|
-
* // Set DATABASE_URL environment variable
|
|
1987
|
-
* // DATABASE_URL=sqlite://./my-database.db
|
|
1988
|
-
*
|
|
1989
|
-
* // Or configure programmatically
|
|
1990
|
-
* alepha.with({
|
|
1991
|
-
* provide: DatabaseProvider,
|
|
1992
|
-
* use: BunSqliteProvider,
|
|
1993
|
-
* });
|
|
1994
|
-
*
|
|
1995
|
-
* // Or use options atom
|
|
1996
|
-
* alepha.store.mut(bunSqliteOptions, (old) => ({
|
|
1997
|
-
* ...old,
|
|
1998
|
-
* path: ":memory:",
|
|
1999
|
-
* }));
|
|
2000
|
-
* ```
|
|
2001
|
-
*/
|
|
2002
|
-
var BunSqliteProvider = class extends DatabaseProvider {
|
|
2003
|
-
kit = $inject(DrizzleKitProvider);
|
|
2004
|
-
log = $logger();
|
|
2005
|
-
env = $env(envSchema$3);
|
|
2006
|
-
builder = $inject(SqliteModelBuilder);
|
|
2007
|
-
options = $use(bunSqliteOptions);
|
|
2008
|
-
sqlite;
|
|
2009
|
-
bunDb;
|
|
2010
|
-
get name() {
|
|
2011
|
-
return "bun-sqlite";
|
|
2012
|
-
}
|
|
2013
|
-
dialect = "sqlite";
|
|
2014
|
-
get url() {
|
|
2015
|
-
const path = this.options.path ?? this.env.DATABASE_URL;
|
|
2016
|
-
if (path) {
|
|
2017
|
-
if (path.startsWith("postgres://")) throw new AlephaError("Postgres URL is not supported for SQLite provider.");
|
|
2018
|
-
return path;
|
|
2019
|
-
}
|
|
2020
|
-
if (this.alepha.isTest() || this.alepha.isServerless()) return ":memory:";
|
|
2021
|
-
else return "node_modules/.alepha/bun-sqlite.db";
|
|
2022
|
-
}
|
|
2023
|
-
get db() {
|
|
2024
|
-
if (!this.bunDb) throw new AlephaError("Database not initialized");
|
|
2025
|
-
return this.bunDb;
|
|
2026
|
-
}
|
|
2027
|
-
async execute(query) {
|
|
2028
|
-
return this.bunDb.all(query);
|
|
2029
|
-
}
|
|
2030
|
-
onStart = $hook({
|
|
2031
|
-
on: "start",
|
|
2032
|
-
handler: async () => {
|
|
2033
|
-
if (typeof Bun === "undefined") throw new AlephaError("BunSqliteProvider requires the Bun runtime. Use NodeSqliteProvider for Node.js.");
|
|
2034
|
-
const { Database } = await import("bun:sqlite");
|
|
2035
|
-
const { drizzle: drizzle$3 } = await import("drizzle-orm/bun-sqlite");
|
|
2036
|
-
const filepath = this.url.replace("sqlite://", "").replace("sqlite:", "");
|
|
2037
|
-
if (filepath !== ":memory:" && filepath !== "") {
|
|
2038
|
-
const dirname = filepath.split("/").slice(0, -1).join("/");
|
|
2039
|
-
if (dirname) await mkdir(dirname, { recursive: true }).catch(() => null);
|
|
2040
|
-
}
|
|
2041
|
-
this.sqlite = new Database(filepath);
|
|
2042
|
-
this.bunDb = drizzle$3({
|
|
2043
|
-
client: this.sqlite,
|
|
2044
|
-
logger: { logQuery: (query, params) => {
|
|
2045
|
-
this.log.trace(query, { params });
|
|
2046
|
-
} }
|
|
2047
|
-
});
|
|
2048
|
-
await this.migrate();
|
|
2049
|
-
this.log.info(`Using Bun SQLite database at ${filepath}`);
|
|
2050
|
-
}
|
|
2051
|
-
});
|
|
2052
|
-
onStop = $hook({
|
|
2053
|
-
on: "stop",
|
|
2054
|
-
handler: async () => {
|
|
2055
|
-
if (this.sqlite) {
|
|
2056
|
-
this.log.debug("Closing Bun SQLite connection...");
|
|
2057
|
-
this.sqlite.close();
|
|
2058
|
-
this.sqlite = void 0;
|
|
2059
|
-
this.bunDb = void 0;
|
|
2060
|
-
this.log.info("Bun SQLite connection closed");
|
|
2061
|
-
}
|
|
2062
|
-
}
|
|
2063
|
-
});
|
|
2064
|
-
async executeMigrations(migrationsFolder) {
|
|
2065
|
-
const { migrate: migrate$3 } = await import("drizzle-orm/bun-sqlite/migrator");
|
|
2066
|
-
await migrate$3(this.bunDb, { migrationsFolder });
|
|
2067
|
-
}
|
|
2068
|
-
};
|
|
2069
|
-
|
|
2070
1669
|
//#endregion
|
|
2071
1670
|
//#region ../../src/orm/providers/drivers/CloudflareD1Provider.ts
|
|
2072
1671
|
/**
|
|
@@ -2143,6 +1742,180 @@ var CloudflareD1Provider = class extends DatabaseProvider {
|
|
|
2143
1742
|
}
|
|
2144
1743
|
};
|
|
2145
1744
|
|
|
1745
|
+
//#endregion
|
|
1746
|
+
//#region ../../src/orm/errors/DbMigrationError.ts
|
|
1747
|
+
var DbMigrationError = class extends DbError {
|
|
1748
|
+
name = "DbMigrationError";
|
|
1749
|
+
constructor(cause) {
|
|
1750
|
+
super("Failed to migrate database", cause);
|
|
1751
|
+
}
|
|
1752
|
+
};
|
|
1753
|
+
|
|
1754
|
+
//#endregion
|
|
1755
|
+
//#region ../../src/orm/types/byte.ts
|
|
1756
|
+
/**
|
|
1757
|
+
* Postgres bytea type.
|
|
1758
|
+
*/
|
|
1759
|
+
const byte = customType({ dataType: () => "bytea" });
|
|
1760
|
+
|
|
1761
|
+
//#endregion
|
|
1762
|
+
//#region ../../src/orm/services/PostgresModelBuilder.ts
|
|
1763
|
+
var PostgresModelBuilder = class extends ModelBuilder {
|
|
1764
|
+
schemas = /* @__PURE__ */ new Map();
|
|
1765
|
+
getPgSchema(name) {
|
|
1766
|
+
if (!this.schemas.has(name) && name !== "public") this.schemas.set(name, pgSchema(name));
|
|
1767
|
+
const nsp = name !== "public" ? this.schemas.get(name) : {
|
|
1768
|
+
enum: pgEnum,
|
|
1769
|
+
table: pgTable
|
|
1770
|
+
};
|
|
1771
|
+
if (!nsp) throw new AlephaError(`Postgres schema ${name} not found`);
|
|
1772
|
+
return nsp;
|
|
1773
|
+
}
|
|
1774
|
+
buildTable(entity, options) {
|
|
1775
|
+
const tableName = entity.name;
|
|
1776
|
+
if (options.tables.has(tableName)) return;
|
|
1777
|
+
const nsp = this.getPgSchema(options.schema);
|
|
1778
|
+
const columns = this.schemaToPgColumns(tableName, entity.schema, nsp, options.enums, options.tables);
|
|
1779
|
+
const configFn = this.getTableConfig(entity, options.tables);
|
|
1780
|
+
const table = nsp.table(tableName, columns, configFn);
|
|
1781
|
+
options.tables.set(tableName, table);
|
|
1782
|
+
}
|
|
1783
|
+
buildSequence(sequence, options) {
|
|
1784
|
+
const sequenceName = sequence.name;
|
|
1785
|
+
if (options.sequences.has(sequenceName)) return;
|
|
1786
|
+
const nsp = this.getPgSchema(options.schema);
|
|
1787
|
+
options.sequences.set(sequenceName, nsp.sequence(sequenceName, sequence.options));
|
|
1788
|
+
}
|
|
1789
|
+
/**
|
|
1790
|
+
* Get PostgreSQL-specific config builder for the table.
|
|
1791
|
+
*/
|
|
1792
|
+
getTableConfig(entity, tables) {
|
|
1793
|
+
const pgBuilders = {
|
|
1794
|
+
index,
|
|
1795
|
+
uniqueIndex,
|
|
1796
|
+
unique,
|
|
1797
|
+
check,
|
|
1798
|
+
foreignKey
|
|
1799
|
+
};
|
|
1800
|
+
const tableResolver = (entityName) => {
|
|
1801
|
+
return tables.get(entityName);
|
|
1802
|
+
};
|
|
1803
|
+
return this.buildTableConfig(entity, pgBuilders, tableResolver);
|
|
1804
|
+
}
|
|
1805
|
+
schemaToPgColumns = (tableName, schema$1, nsp, enums, tables) => {
|
|
1806
|
+
return Object.entries(schema$1.properties).reduce((columns, [key, value]) => {
|
|
1807
|
+
let col = this.mapFieldToColumn(tableName, key, value, nsp, enums);
|
|
1808
|
+
if ("default" in value && value.default != null) col = col.default(value.default);
|
|
1809
|
+
if (PG_PRIMARY_KEY in value) col = col.primaryKey();
|
|
1810
|
+
if (PG_REF in value) {
|
|
1811
|
+
const config = value[PG_REF];
|
|
1812
|
+
col = col.references(() => {
|
|
1813
|
+
const ref = config.ref();
|
|
1814
|
+
const table = tables.get(ref.entity.name);
|
|
1815
|
+
if (!table) throw new AlephaError(`Referenced table ${ref.entity.name} not found for ${tableName}.${key}`);
|
|
1816
|
+
const target = table[ref.name];
|
|
1817
|
+
if (!target) throw new AlephaError(`Referenced column ${ref.name} not found in table ${ref.entity.name} for ${tableName}.${key}`);
|
|
1818
|
+
return target;
|
|
1819
|
+
}, config.actions);
|
|
1820
|
+
}
|
|
1821
|
+
if (schema$1.required?.includes(key)) col = col.notNull();
|
|
1822
|
+
return {
|
|
1823
|
+
...columns,
|
|
1824
|
+
[key]: col
|
|
1825
|
+
};
|
|
1826
|
+
}, {});
|
|
1827
|
+
};
|
|
1828
|
+
mapFieldToColumn = (tableName, fieldName, value, nsp, enums) => {
|
|
1829
|
+
const key = this.toColumnName(fieldName);
|
|
1830
|
+
if ("anyOf" in value && Array.isArray(value.anyOf) && value.anyOf.length === 2 && value.anyOf.some((it) => t.schema.isNull(it))) value = value.anyOf.find((it) => !t.schema.isNull(it));
|
|
1831
|
+
if (t.schema.isInteger(value)) {
|
|
1832
|
+
if (PG_SERIAL in value) return pg$1.serial(key);
|
|
1833
|
+
if (PG_IDENTITY in value) {
|
|
1834
|
+
const options = value[PG_IDENTITY];
|
|
1835
|
+
if (options.mode === "byDefault") return pg$1.integer().generatedByDefaultAsIdentity(options);
|
|
1836
|
+
return pg$1.integer().generatedAlwaysAsIdentity(options);
|
|
1837
|
+
}
|
|
1838
|
+
return pg$1.integer(key);
|
|
1839
|
+
}
|
|
1840
|
+
if (t.schema.isBigInt(value)) {
|
|
1841
|
+
if (PG_IDENTITY in value) {
|
|
1842
|
+
const options = value[PG_IDENTITY];
|
|
1843
|
+
if (options.mode === "byDefault") return pg$1.bigint({ mode: "bigint" }).generatedByDefaultAsIdentity(options);
|
|
1844
|
+
return pg$1.bigint({ mode: "bigint" }).generatedAlwaysAsIdentity(options);
|
|
1845
|
+
}
|
|
1846
|
+
}
|
|
1847
|
+
if (t.schema.isNumber(value)) {
|
|
1848
|
+
if (PG_IDENTITY in value) {
|
|
1849
|
+
const options = value[PG_IDENTITY];
|
|
1850
|
+
if (options.mode === "byDefault") return pg$1.bigint({ mode: "number" }).generatedByDefaultAsIdentity(options);
|
|
1851
|
+
return pg$1.bigint({ mode: "number" }).generatedAlwaysAsIdentity(options);
|
|
1852
|
+
}
|
|
1853
|
+
if (value.format === "int64") return pg$1.bigint(key, { mode: "number" });
|
|
1854
|
+
return pg$1.numeric(key);
|
|
1855
|
+
}
|
|
1856
|
+
if (t.schema.isString(value)) return this.mapStringToColumn(key, value);
|
|
1857
|
+
if (t.schema.isBoolean(value)) return pg$1.boolean(key);
|
|
1858
|
+
if (t.schema.isObject(value)) return schema(key, value);
|
|
1859
|
+
if (t.schema.isRecord(value)) return schema(key, value);
|
|
1860
|
+
const isTypeEnum = (value$1) => t.schema.isUnsafe(value$1) && "type" in value$1 && value$1.type === "string" && "enum" in value$1 && Array.isArray(value$1.enum);
|
|
1861
|
+
if (t.schema.isArray(value)) {
|
|
1862
|
+
if (t.schema.isObject(value.items)) return schema(key, value);
|
|
1863
|
+
if (t.schema.isRecord(value.items)) return schema(key, value);
|
|
1864
|
+
if (t.schema.isString(value.items)) return pg$1.text(key).array();
|
|
1865
|
+
if (t.schema.isInteger(value.items)) return pg$1.integer(key).array();
|
|
1866
|
+
if (t.schema.isNumber(value.items)) return pg$1.numeric(key).array();
|
|
1867
|
+
if (t.schema.isBoolean(value.items)) return pg$1.boolean(key).array();
|
|
1868
|
+
if (isTypeEnum(value.items)) return pg$1.text(key).array();
|
|
1869
|
+
}
|
|
1870
|
+
if (isTypeEnum(value)) {
|
|
1871
|
+
if (!value.enum.every((it) => typeof it === "string")) throw new AlephaError(`Enum for ${fieldName} must be an array of strings, got ${JSON.stringify(value.enum)}`);
|
|
1872
|
+
if (PG_ENUM in value && value[PG_ENUM]) {
|
|
1873
|
+
const enumName = value[PG_ENUM].name ?? `${tableName}_${key}_enum`;
|
|
1874
|
+
if (enums.has(enumName)) {
|
|
1875
|
+
const values = enums.get(enumName).enumValues.join(",");
|
|
1876
|
+
const newValues = value.enum.join(",");
|
|
1877
|
+
if (values !== newValues) throw new AlephaError(`Enum name conflict for ${enumName}: [${values}] vs [${newValues}]`);
|
|
1878
|
+
}
|
|
1879
|
+
enums.set(enumName, nsp.enum(enumName, value.enum));
|
|
1880
|
+
return enums.get(enumName)(key);
|
|
1881
|
+
}
|
|
1882
|
+
return this.mapStringToColumn(key, value);
|
|
1883
|
+
}
|
|
1884
|
+
throw new AlephaError(`Unsupported schema type for ${fieldName} as ${JSON.stringify(value)}`);
|
|
1885
|
+
};
|
|
1886
|
+
/**
|
|
1887
|
+
* Map a string to a PG column.
|
|
1888
|
+
*
|
|
1889
|
+
* @param key The key of the field.
|
|
1890
|
+
* @param value The value of the field.
|
|
1891
|
+
*/
|
|
1892
|
+
mapStringToColumn = (key, value) => {
|
|
1893
|
+
if ("format" in value) {
|
|
1894
|
+
if (value.format === "uuid") {
|
|
1895
|
+
if (PG_PRIMARY_KEY in value) return pg$1.uuid(key).defaultRandom();
|
|
1896
|
+
return pg$1.uuid(key);
|
|
1897
|
+
}
|
|
1898
|
+
if (value.format === "byte") return byte(key);
|
|
1899
|
+
if (value.format === "date-time") {
|
|
1900
|
+
if (PG_CREATED_AT in value) return pg$1.timestamp(key, {
|
|
1901
|
+
mode: "string",
|
|
1902
|
+
withTimezone: true
|
|
1903
|
+
}).defaultNow();
|
|
1904
|
+
if (PG_UPDATED_AT in value) return pg$1.timestamp(key, {
|
|
1905
|
+
mode: "string",
|
|
1906
|
+
withTimezone: true
|
|
1907
|
+
}).defaultNow();
|
|
1908
|
+
return pg$1.timestamp(key, {
|
|
1909
|
+
mode: "string",
|
|
1910
|
+
withTimezone: true
|
|
1911
|
+
});
|
|
1912
|
+
}
|
|
1913
|
+
if (value.format === "date") return pg$1.date(key, { mode: "string" });
|
|
1914
|
+
}
|
|
1915
|
+
return pg$1.text(key);
|
|
1916
|
+
};
|
|
1917
|
+
};
|
|
1918
|
+
|
|
2146
1919
|
//#endregion
|
|
2147
1920
|
//#region ../../src/orm/providers/drivers/NodePostgresProvider.ts
|
|
2148
1921
|
const envSchema$2 = t.object({
|
|
@@ -2935,8 +2708,6 @@ const AlephaPostgres = $module({
|
|
|
2935
2708
|
NodePostgresProvider,
|
|
2936
2709
|
PglitePostgresProvider,
|
|
2937
2710
|
NodeSqliteProvider,
|
|
2938
|
-
BunPostgresProvider,
|
|
2939
|
-
BunSqliteProvider,
|
|
2940
2711
|
CloudflareD1Provider,
|
|
2941
2712
|
SqliteModelBuilder,
|
|
2942
2713
|
PostgresModelBuilder,
|
|
@@ -2976,18 +2747,18 @@ const AlephaPostgres = $module({
|
|
|
2976
2747
|
alepha.with({
|
|
2977
2748
|
optional: true,
|
|
2978
2749
|
provide: DatabaseProvider,
|
|
2979
|
-
use:
|
|
2750
|
+
use: NodePostgresProvider
|
|
2980
2751
|
});
|
|
2981
2752
|
return;
|
|
2982
2753
|
}
|
|
2983
2754
|
alepha.with({
|
|
2984
2755
|
optional: true,
|
|
2985
2756
|
provide: DatabaseProvider,
|
|
2986
|
-
use:
|
|
2757
|
+
use: NodeSqliteProvider
|
|
2987
2758
|
});
|
|
2988
2759
|
}
|
|
2989
2760
|
});
|
|
2990
2761
|
|
|
2991
2762
|
//#endregion
|
|
2992
|
-
export { $entity, $repository, $sequence, $transaction, AlephaPostgres,
|
|
2763
|
+
export { $entity, $repository, $sequence, $transaction, AlephaPostgres, CloudflareD1Provider, DatabaseProvider, DatabaseTypeProvider, DbConflictError, DbEntityNotFoundError, DbError, DbMigrationError, DbVersionMismatchError, DrizzleKitProvider, EntityPrimitive, NodePostgresProvider, NodeSqliteProvider, PG_CREATED_AT, PG_DEFAULT, PG_DELETED_AT, PG_ENUM, PG_IDENTITY, PG_PRIMARY_KEY, PG_REF, PG_SERIAL, PG_UPDATED_AT, PG_VERSION, Repository, RepositoryProvider, SequencePrimitive, buildQueryString, db, drizzle, getAttrFields, insertSchema, legacyIdSchema, nodeSqliteOptions, pageQuerySchema, pageSchema, parseQueryString, pg, pgAttr, schema, sql, updateSchema };
|
|
2993
2764
|
//# sourceMappingURL=index.js.map
|