@llmops/core 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{bun-sqlite-dialect-BY9UbL3J.cjs → bun-sqlite-dialect-Dzur5J1r.cjs} +1 -1
- package/dist/db/index.cjs +1 -1
- package/dist/db/index.d.cts +2 -2
- package/dist/db/index.d.mts +2 -2
- package/dist/db/index.mjs +1 -1
- package/dist/{db-DCfAacY7.mjs → db-DN6gkyRp.mjs} +42 -7
- package/dist/{db-16I7_mus.cjs → db-gMDwCuHR.cjs} +43 -8
- package/dist/{index-BKx9C2tK.d.mts → index-DGUMebzL.d.cts} +28 -4
- package/dist/{index-9HhrwwZi.d.cts → index-DbVk2Vpc.d.mts} +51 -27
- package/dist/index.cjs +3 -2
- package/dist/index.d.cts +148 -135
- package/dist/index.d.mts +325 -312
- package/dist/index.mjs +3 -2
- package/dist/{node-sqlite-dialect-CuJrXyHn.cjs → node-sqlite-dialect-BTbo14Y2.cjs} +1 -1
- package/package.json +2 -2
package/dist/db/index.cjs
CHANGED
package/dist/db/index.d.cts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { A as
|
|
2
|
-
export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseType, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, Insertable, MigrationResult, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, detectDatabaseType, environmentSecretsSchema, environmentsSchema, getMigrations, matchType, parsePartialTableData, parseTableData, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema };
|
|
1
|
+
import { A as TargetingRulesTable, B as schemas, C as EnvironmentSecretsTable, D as Selectable, E as SCHEMA_METADATA, F as VariantsTable, H as variantVersionsSchema, I as configVariantsSchema, L as configsSchema, M as Variant, N as VariantVersion, O as TableName, P as VariantVersionsTable, R as environmentSecretsSchema, S as EnvironmentSecret, T as Insertable, U as variantsSchema, V as targetingRulesSchema, _ as ConfigVariant, a as createDatabaseFromConnection, b as Database, c as MigrationResult, d as runAutoMigrations, f as parsePartialTableData, g as Config, h as validateTableData, i as createDatabase, j as Updateable, k as TargetingRule, l as getMigrations, m as validatePartialTableData, n as DatabaseOptions, o as detectDatabaseType, p as parseTableData, r as DatabaseType, s as MigrationOptions, t as DatabaseConnection, u as matchType, v as ConfigVariantsTable, w as EnvironmentsTable, x as Environment, y as ConfigsTable, z as environmentsSchema } from "../index-DGUMebzL.cjs";
|
|
2
|
+
export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, Insertable, MigrationOptions, MigrationResult, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, detectDatabaseType, environmentSecretsSchema, environmentsSchema, getMigrations, matchType, parsePartialTableData, parseTableData, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema };
|
package/dist/db/index.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { A as
|
|
2
|
-
export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseType, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, Insertable, MigrationResult, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, detectDatabaseType, environmentSecretsSchema, environmentsSchema, getMigrations, matchType, parsePartialTableData, parseTableData, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema };
|
|
1
|
+
import { A as TargetingRulesTable, B as schemas, C as EnvironmentSecretsTable, D as Selectable, E as SCHEMA_METADATA, F as VariantsTable, H as variantVersionsSchema, I as configVariantsSchema, L as configsSchema, M as Variant, N as VariantVersion, O as TableName, P as VariantVersionsTable, R as environmentSecretsSchema, S as EnvironmentSecret, T as Insertable, U as variantsSchema, V as targetingRulesSchema, _ as ConfigVariant, a as createDatabaseFromConnection, b as Database, c as MigrationResult, d as runAutoMigrations, f as parsePartialTableData, g as Config, h as validateTableData, i as createDatabase, j as Updateable, k as TargetingRule, l as getMigrations, m as validatePartialTableData, n as DatabaseOptions, o as detectDatabaseType, p as parseTableData, r as DatabaseType, s as MigrationOptions, t as DatabaseConnection, u as matchType, v as ConfigVariantsTable, w as EnvironmentsTable, x as Environment, y as ConfigsTable, z as environmentsSchema } from "../index-DbVk2Vpc.mjs";
|
|
2
|
+
export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, Insertable, MigrationOptions, MigrationResult, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, detectDatabaseType, environmentSecretsSchema, environmentsSchema, getMigrations, matchType, parsePartialTableData, parseTableData, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema };
|
package/dist/db/index.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { _ as schemas, a as matchType, b as variantsSchema, c as parsePartialTableData, d as validateTableData, f as SCHEMA_METADATA, g as environmentsSchema, h as environmentSecretsSchema, i as getMigrations, l as parseTableData, m as configsSchema, n as createDatabaseFromConnection, o as runAutoMigrations, p as configVariantsSchema, r as detectDatabaseType, t as createDatabase, u as validatePartialTableData, v as targetingRulesSchema, y as variantVersionsSchema } from "../db-
|
|
1
|
+
import { _ as schemas, a as matchType, b as variantsSchema, c as parsePartialTableData, d as validateTableData, f as SCHEMA_METADATA, g as environmentsSchema, h as environmentSecretsSchema, i as getMigrations, l as parseTableData, m as configsSchema, n as createDatabaseFromConnection, o as runAutoMigrations, p as configVariantsSchema, r as detectDatabaseType, t as createDatabase, u as validatePartialTableData, v as targetingRulesSchema, y as variantVersionsSchema } from "../db-DN6gkyRp.mjs";
|
|
2
2
|
|
|
3
3
|
export { SCHEMA_METADATA, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, detectDatabaseType, environmentSecretsSchema, environmentsSchema, getMigrations, matchType, parsePartialTableData, parseTableData, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect, sql } from "kysely";
|
|
1
|
+
import { CompiledQuery, Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect, sql } from "kysely";
|
|
2
2
|
import pino from "pino";
|
|
3
3
|
|
|
4
4
|
//#region rolldown:runtime
|
|
@@ -12767,10 +12767,35 @@ async function getPostgresSchema(db) {
|
|
|
12767
12767
|
} catch {}
|
|
12768
12768
|
return "public";
|
|
12769
12769
|
}
|
|
12770
|
-
|
|
12770
|
+
/**
|
|
12771
|
+
* Ensure the PostgreSQL schema exists, creating it if necessary
|
|
12772
|
+
*/
|
|
12773
|
+
async function ensurePostgresSchemaExists(db, schema) {
|
|
12774
|
+
if (schema === "public") return;
|
|
12775
|
+
try {
|
|
12776
|
+
if (!(await sql`
|
|
12777
|
+
SELECT EXISTS (
|
|
12778
|
+
SELECT 1 FROM information_schema.schemata
|
|
12779
|
+
WHERE schema_name = ${schema}
|
|
12780
|
+
) as exists
|
|
12781
|
+
`.execute(db)).rows[0]?.exists) {
|
|
12782
|
+
logger.info(`Creating PostgreSQL schema: ${schema}`);
|
|
12783
|
+
await sql`CREATE SCHEMA IF NOT EXISTS ${sql.ref(schema)}`.execute(db);
|
|
12784
|
+
}
|
|
12785
|
+
} catch (error$45) {
|
|
12786
|
+
logger.warn(`Could not ensure schema exists: ${error$45}`);
|
|
12787
|
+
try {
|
|
12788
|
+
await sql`CREATE SCHEMA IF NOT EXISTS ${sql.ref(schema)}`.execute(db);
|
|
12789
|
+
} catch {}
|
|
12790
|
+
}
|
|
12791
|
+
}
|
|
12792
|
+
async function getMigrations(db, dbType, options) {
|
|
12771
12793
|
let currentSchema = "public";
|
|
12772
12794
|
if (dbType === "postgres") {
|
|
12773
|
-
|
|
12795
|
+
if (options?.schema) {
|
|
12796
|
+
await ensurePostgresSchemaExists(db, options.schema);
|
|
12797
|
+
currentSchema = options.schema;
|
|
12798
|
+
} else currentSchema = await getPostgresSchema(db);
|
|
12774
12799
|
logger.debug(`PostgreSQL migration: Using schema '${currentSchema}'`);
|
|
12775
12800
|
}
|
|
12776
12801
|
const allTableMetadata = await db.introspection.getTables();
|
|
@@ -12919,15 +12944,16 @@ async function getMigrations(db, dbType) {
|
|
|
12919
12944
|
* @param db - Kysely database instance
|
|
12920
12945
|
* @param dbType - Database type
|
|
12921
12946
|
* @param autoMigrate - Auto-migrate configuration
|
|
12947
|
+
* @param options - Migration options (schema, etc.)
|
|
12922
12948
|
* @returns true if migrations were run, false otherwise
|
|
12923
12949
|
*/
|
|
12924
|
-
async function runAutoMigrations(db, dbType, autoMigrate) {
|
|
12950
|
+
async function runAutoMigrations(db, dbType, autoMigrate, options) {
|
|
12925
12951
|
if (!(autoMigrate === true || autoMigrate === "development" && process.env.NODE_ENV === "development")) return {
|
|
12926
12952
|
ran: false,
|
|
12927
12953
|
tables: [],
|
|
12928
12954
|
fields: []
|
|
12929
12955
|
};
|
|
12930
|
-
const { toBeCreated, toBeAdded, runMigrations, needsMigration } = await getMigrations(db, dbType);
|
|
12956
|
+
const { toBeCreated, toBeAdded, runMigrations, needsMigration } = await getMigrations(db, dbType, options);
|
|
12931
12957
|
if (!needsMigration) {
|
|
12932
12958
|
logger.debug("Auto-migration: No migrations needed");
|
|
12933
12959
|
return {
|
|
@@ -12979,11 +13005,15 @@ function detectDatabaseType(db) {
|
|
|
12979
13005
|
}
|
|
12980
13006
|
/**
|
|
12981
13007
|
* Create database from raw connection
|
|
13008
|
+
*
|
|
13009
|
+
* @param rawConnection - The raw database connection (pg Pool, sqlite Database, etc.)
|
|
13010
|
+
* @param options - Optional configuration (schema for PostgreSQL)
|
|
12982
13011
|
*/
|
|
12983
|
-
async function createDatabaseFromConnection(rawConnection) {
|
|
13012
|
+
async function createDatabaseFromConnection(rawConnection, options) {
|
|
12984
13013
|
const dbType = detectDatabaseType(rawConnection);
|
|
12985
13014
|
if (!dbType) return null;
|
|
12986
13015
|
let dialect;
|
|
13016
|
+
const schema = options?.schema ?? "llmops";
|
|
12987
13017
|
switch (dbType) {
|
|
12988
13018
|
case "sqlite":
|
|
12989
13019
|
if ("aggregate" in rawConnection && !("createSession" in rawConnection)) dialect = new SqliteDialect({ database: rawConnection });
|
|
@@ -13002,7 +13032,12 @@ async function createDatabaseFromConnection(rawConnection) {
|
|
|
13002
13032
|
dialect = new MysqlDialect(rawConnection);
|
|
13003
13033
|
break;
|
|
13004
13034
|
case "postgres":
|
|
13005
|
-
dialect = new PostgresDialect({
|
|
13035
|
+
dialect = new PostgresDialect({
|
|
13036
|
+
pool: rawConnection,
|
|
13037
|
+
onCreateConnection: async (connection) => {
|
|
13038
|
+
await connection.executeQuery(CompiledQuery.raw(`SET search_path TO "${schema}"`));
|
|
13039
|
+
}
|
|
13040
|
+
});
|
|
13006
13041
|
break;
|
|
13007
13042
|
case "mssql":
|
|
13008
13043
|
if ("createDriver" in rawConnection) dialect = rawConnection;
|
|
@@ -12791,10 +12791,35 @@ async function getPostgresSchema(db) {
|
|
|
12791
12791
|
} catch {}
|
|
12792
12792
|
return "public";
|
|
12793
12793
|
}
|
|
12794
|
-
|
|
12794
|
+
/**
|
|
12795
|
+
* Ensure the PostgreSQL schema exists, creating it if necessary
|
|
12796
|
+
*/
|
|
12797
|
+
async function ensurePostgresSchemaExists(db, schema) {
|
|
12798
|
+
if (schema === "public") return;
|
|
12799
|
+
try {
|
|
12800
|
+
if (!(await kysely.sql`
|
|
12801
|
+
SELECT EXISTS (
|
|
12802
|
+
SELECT 1 FROM information_schema.schemata
|
|
12803
|
+
WHERE schema_name = ${schema}
|
|
12804
|
+
) as exists
|
|
12805
|
+
`.execute(db)).rows[0]?.exists) {
|
|
12806
|
+
logger.info(`Creating PostgreSQL schema: ${schema}`);
|
|
12807
|
+
await kysely.sql`CREATE SCHEMA IF NOT EXISTS ${kysely.sql.ref(schema)}`.execute(db);
|
|
12808
|
+
}
|
|
12809
|
+
} catch (error$45) {
|
|
12810
|
+
logger.warn(`Could not ensure schema exists: ${error$45}`);
|
|
12811
|
+
try {
|
|
12812
|
+
await kysely.sql`CREATE SCHEMA IF NOT EXISTS ${kysely.sql.ref(schema)}`.execute(db);
|
|
12813
|
+
} catch {}
|
|
12814
|
+
}
|
|
12815
|
+
}
|
|
12816
|
+
async function getMigrations(db, dbType, options) {
|
|
12795
12817
|
let currentSchema = "public";
|
|
12796
12818
|
if (dbType === "postgres") {
|
|
12797
|
-
|
|
12819
|
+
if (options?.schema) {
|
|
12820
|
+
await ensurePostgresSchemaExists(db, options.schema);
|
|
12821
|
+
currentSchema = options.schema;
|
|
12822
|
+
} else currentSchema = await getPostgresSchema(db);
|
|
12798
12823
|
logger.debug(`PostgreSQL migration: Using schema '${currentSchema}'`);
|
|
12799
12824
|
}
|
|
12800
12825
|
const allTableMetadata = await db.introspection.getTables();
|
|
@@ -12943,15 +12968,16 @@ async function getMigrations(db, dbType) {
|
|
|
12943
12968
|
* @param db - Kysely database instance
|
|
12944
12969
|
* @param dbType - Database type
|
|
12945
12970
|
* @param autoMigrate - Auto-migrate configuration
|
|
12971
|
+
* @param options - Migration options (schema, etc.)
|
|
12946
12972
|
* @returns true if migrations were run, false otherwise
|
|
12947
12973
|
*/
|
|
12948
|
-
async function runAutoMigrations(db, dbType, autoMigrate) {
|
|
12974
|
+
async function runAutoMigrations(db, dbType, autoMigrate, options) {
|
|
12949
12975
|
if (!(autoMigrate === true || autoMigrate === "development" && process.env.NODE_ENV === "development")) return {
|
|
12950
12976
|
ran: false,
|
|
12951
12977
|
tables: [],
|
|
12952
12978
|
fields: []
|
|
12953
12979
|
};
|
|
12954
|
-
const { toBeCreated, toBeAdded, runMigrations, needsMigration } = await getMigrations(db, dbType);
|
|
12980
|
+
const { toBeCreated, toBeAdded, runMigrations, needsMigration } = await getMigrations(db, dbType, options);
|
|
12955
12981
|
if (!needsMigration) {
|
|
12956
12982
|
logger.debug("Auto-migration: No migrations needed");
|
|
12957
12983
|
return {
|
|
@@ -13003,21 +13029,25 @@ function detectDatabaseType(db) {
|
|
|
13003
13029
|
}
|
|
13004
13030
|
/**
|
|
13005
13031
|
* Create database from raw connection
|
|
13032
|
+
*
|
|
13033
|
+
* @param rawConnection - The raw database connection (pg Pool, sqlite Database, etc.)
|
|
13034
|
+
* @param options - Optional configuration (schema for PostgreSQL)
|
|
13006
13035
|
*/
|
|
13007
|
-
async function createDatabaseFromConnection(rawConnection) {
|
|
13036
|
+
async function createDatabaseFromConnection(rawConnection, options) {
|
|
13008
13037
|
const dbType = detectDatabaseType(rawConnection);
|
|
13009
13038
|
if (!dbType) return null;
|
|
13010
13039
|
let dialect;
|
|
13040
|
+
const schema = options?.schema ?? "llmops";
|
|
13011
13041
|
switch (dbType) {
|
|
13012
13042
|
case "sqlite":
|
|
13013
13043
|
if ("aggregate" in rawConnection && !("createSession" in rawConnection)) dialect = new kysely.SqliteDialect({ database: rawConnection });
|
|
13014
13044
|
else if ("fileControl" in rawConnection) {
|
|
13015
|
-
const { BunSqliteDialect } = await Promise.resolve().then(() => require("./bun-sqlite-dialect-
|
|
13045
|
+
const { BunSqliteDialect } = await Promise.resolve().then(() => require("./bun-sqlite-dialect-Dzur5J1r.cjs"));
|
|
13016
13046
|
dialect = new BunSqliteDialect({ database: rawConnection });
|
|
13017
13047
|
} else if ("createSession" in rawConnection && typeof window === "undefined") try {
|
|
13018
13048
|
const { DatabaseSync } = await import("node:sqlite");
|
|
13019
13049
|
if (rawConnection instanceof DatabaseSync) {
|
|
13020
|
-
const { NodeSqliteDialect } = await Promise.resolve().then(() => require("./node-sqlite-dialect-
|
|
13050
|
+
const { NodeSqliteDialect } = await Promise.resolve().then(() => require("./node-sqlite-dialect-BTbo14Y2.cjs"));
|
|
13021
13051
|
dialect = new NodeSqliteDialect({ database: rawConnection });
|
|
13022
13052
|
}
|
|
13023
13053
|
} catch {}
|
|
@@ -13026,7 +13056,12 @@ async function createDatabaseFromConnection(rawConnection) {
|
|
|
13026
13056
|
dialect = new kysely.MysqlDialect(rawConnection);
|
|
13027
13057
|
break;
|
|
13028
13058
|
case "postgres":
|
|
13029
|
-
dialect = new kysely.PostgresDialect({
|
|
13059
|
+
dialect = new kysely.PostgresDialect({
|
|
13060
|
+
pool: rawConnection,
|
|
13061
|
+
onCreateConnection: async (connection) => {
|
|
13062
|
+
await connection.executeQuery(kysely.CompiledQuery.raw(`SET search_path TO "${schema}"`));
|
|
13063
|
+
}
|
|
13064
|
+
});
|
|
13030
13065
|
break;
|
|
13031
13066
|
case "mssql":
|
|
13032
13067
|
if ("createDriver" in rawConnection) dialect = rawConnection;
|
|
@@ -788,6 +788,16 @@ declare function parsePartialTableData<T extends TableName>(table: T, data: unkn
|
|
|
788
788
|
//#endregion
|
|
789
789
|
//#region src/db/migrations.d.ts
|
|
790
790
|
type DatabaseType$1 = 'postgres' | 'mysql' | 'sqlite' | 'mssql';
|
|
791
|
+
/**
|
|
792
|
+
* Options for migration operations
|
|
793
|
+
*/
|
|
794
|
+
interface MigrationOptions {
|
|
795
|
+
/**
|
|
796
|
+
* PostgreSQL schema name to use.
|
|
797
|
+
* If provided, the schema will be created if it doesn't exist.
|
|
798
|
+
*/
|
|
799
|
+
schema?: string;
|
|
800
|
+
}
|
|
791
801
|
declare function matchType(columnDataType: string, fieldType: string, dbType: DatabaseType$1): boolean;
|
|
792
802
|
interface MigrationResult {
|
|
793
803
|
toBeCreated: Array<{
|
|
@@ -805,15 +815,16 @@ interface MigrationResult {
|
|
|
805
815
|
migrations: any[];
|
|
806
816
|
needsMigration: boolean;
|
|
807
817
|
}
|
|
808
|
-
declare function getMigrations(db: Kysely<Database>, dbType: DatabaseType$1): Promise<MigrationResult>;
|
|
818
|
+
declare function getMigrations(db: Kysely<Database>, dbType: DatabaseType$1, options?: MigrationOptions): Promise<MigrationResult>;
|
|
809
819
|
/**
|
|
810
820
|
* Run migrations if needed based on autoMigrate config
|
|
811
821
|
* @param db - Kysely database instance
|
|
812
822
|
* @param dbType - Database type
|
|
813
823
|
* @param autoMigrate - Auto-migrate configuration
|
|
824
|
+
* @param options - Migration options (schema, etc.)
|
|
814
825
|
* @returns true if migrations were run, false otherwise
|
|
815
826
|
*/
|
|
816
|
-
declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1, autoMigrate: boolean | 'development'): Promise<{
|
|
827
|
+
declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1, autoMigrate: boolean | 'development', options?: MigrationOptions): Promise<{
|
|
817
828
|
ran: boolean;
|
|
818
829
|
tables: string[];
|
|
819
830
|
fields: string[];
|
|
@@ -824,6 +835,16 @@ declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1,
|
|
|
824
835
|
* Supported database types
|
|
825
836
|
*/
|
|
826
837
|
type DatabaseType = 'postgres' | 'mysql' | 'sqlite' | 'mssql';
|
|
838
|
+
/**
|
|
839
|
+
* Options for creating a database connection
|
|
840
|
+
*/
|
|
841
|
+
interface DatabaseOptions {
|
|
842
|
+
/**
|
|
843
|
+
* PostgreSQL schema name (sets search_path).
|
|
844
|
+
* Defaults to 'llmops'.
|
|
845
|
+
*/
|
|
846
|
+
schema?: string;
|
|
847
|
+
}
|
|
827
848
|
/**
|
|
828
849
|
* Database connection options
|
|
829
850
|
*/
|
|
@@ -853,7 +874,10 @@ declare function createDatabase(connection: DatabaseConnection): Kysely<Database
|
|
|
853
874
|
declare function detectDatabaseType(db: unknown): DatabaseType | null;
|
|
854
875
|
/**
|
|
855
876
|
* Create database from raw connection
|
|
877
|
+
*
|
|
878
|
+
* @param rawConnection - The raw database connection (pg Pool, sqlite Database, etc.)
|
|
879
|
+
* @param options - Optional configuration (schema for PostgreSQL)
|
|
856
880
|
*/
|
|
857
|
-
declare function createDatabaseFromConnection(rawConnection: any): Promise<Kysely<Database> | null>;
|
|
881
|
+
declare function createDatabaseFromConnection(rawConnection: any, options?: DatabaseOptions): Promise<Kysely<Database> | null>;
|
|
858
882
|
//#endregion
|
|
859
|
-
export {
|
|
883
|
+
export { TargetingRulesTable as A, schemas as B, EnvironmentSecretsTable as C, Selectable as D, SCHEMA_METADATA as E, VariantsTable as F, variantVersionsSchema as H, configVariantsSchema as I, configsSchema as L, Variant as M, VariantVersion as N, TableName as O, VariantVersionsTable as P, environmentSecretsSchema as R, EnvironmentSecret as S, Insertable as T, variantsSchema as U, targetingRulesSchema as V, ConfigVariant as _, createDatabaseFromConnection as a, Database as b, MigrationResult as c, runAutoMigrations as d, parsePartialTableData as f, Config as g, validateTableData as h, createDatabase as i, Updateable as j, TargetingRule as k, getMigrations as l, validatePartialTableData as m, DatabaseOptions as n, detectDatabaseType as o, parseTableData as p, DatabaseType as r, MigrationOptions as s, DatabaseConnection as t, matchType as u, ConfigVariantsTable as v, EnvironmentsTable as w, Environment as x, ConfigsTable as y, environmentsSchema as z };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { ColumnType, Generated, Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect } from "kysely";
|
|
2
|
-
import * as
|
|
2
|
+
import * as zod302 from "zod";
|
|
3
3
|
import { z } from "zod";
|
|
4
4
|
|
|
5
5
|
//#region src/db/schema.d.ts
|
|
@@ -520,29 +520,29 @@ declare const schemas: {
|
|
|
520
520
|
* Validate data against table schema
|
|
521
521
|
* Useful for runtime validation before inserting/updating
|
|
522
522
|
*/
|
|
523
|
-
declare function validateTableData<T extends TableName>(table: T, data: unknown):
|
|
523
|
+
declare function validateTableData<T extends TableName>(table: T, data: unknown): zod302.ZodSafeParseSuccess<{
|
|
524
524
|
slug: string;
|
|
525
525
|
id: string;
|
|
526
526
|
createdAt: Date;
|
|
527
527
|
updatedAt: Date;
|
|
528
528
|
name?: string | undefined;
|
|
529
|
-
}> |
|
|
529
|
+
}> | zod302.ZodSafeParseError<{
|
|
530
530
|
slug: string;
|
|
531
531
|
id: string;
|
|
532
532
|
createdAt: Date;
|
|
533
533
|
updatedAt: Date;
|
|
534
534
|
name?: string | undefined;
|
|
535
|
-
}> |
|
|
535
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
536
536
|
name: string;
|
|
537
537
|
id: string;
|
|
538
538
|
createdAt: Date;
|
|
539
539
|
updatedAt: Date;
|
|
540
|
-
}> |
|
|
540
|
+
}> | zod302.ZodSafeParseError<{
|
|
541
541
|
name: string;
|
|
542
542
|
id: string;
|
|
543
543
|
createdAt: Date;
|
|
544
544
|
updatedAt: Date;
|
|
545
|
-
}> |
|
|
545
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
546
546
|
variantId: string;
|
|
547
547
|
version: number;
|
|
548
548
|
provider: string;
|
|
@@ -551,7 +551,7 @@ declare function validateTableData<T extends TableName>(table: T, data: unknown)
|
|
|
551
551
|
id: string;
|
|
552
552
|
createdAt: Date;
|
|
553
553
|
updatedAt: Date;
|
|
554
|
-
}> |
|
|
554
|
+
}> | zod302.ZodSafeParseError<{
|
|
555
555
|
variantId: string;
|
|
556
556
|
version: number;
|
|
557
557
|
provider: string;
|
|
@@ -560,33 +560,33 @@ declare function validateTableData<T extends TableName>(table: T, data: unknown)
|
|
|
560
560
|
id: string;
|
|
561
561
|
createdAt: Date;
|
|
562
562
|
updatedAt: Date;
|
|
563
|
-
}> |
|
|
563
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
564
564
|
environmentId: string;
|
|
565
565
|
keyName: string;
|
|
566
566
|
keyValue: string;
|
|
567
567
|
id: string;
|
|
568
568
|
createdAt: Date;
|
|
569
569
|
updatedAt: Date;
|
|
570
|
-
}> |
|
|
570
|
+
}> | zod302.ZodSafeParseError<{
|
|
571
571
|
environmentId: string;
|
|
572
572
|
keyName: string;
|
|
573
573
|
keyValue: string;
|
|
574
574
|
id: string;
|
|
575
575
|
createdAt: Date;
|
|
576
576
|
updatedAt: Date;
|
|
577
|
-
}> |
|
|
577
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
578
578
|
configId: string;
|
|
579
579
|
variantId: string;
|
|
580
580
|
id: string;
|
|
581
581
|
createdAt: Date;
|
|
582
582
|
updatedAt: Date;
|
|
583
|
-
}> |
|
|
583
|
+
}> | zod302.ZodSafeParseError<{
|
|
584
584
|
configId: string;
|
|
585
585
|
variantId: string;
|
|
586
586
|
id: string;
|
|
587
587
|
createdAt: Date;
|
|
588
588
|
updatedAt: Date;
|
|
589
|
-
}> |
|
|
589
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
590
590
|
environmentId: string;
|
|
591
591
|
configId: string;
|
|
592
592
|
configVariantId: string;
|
|
@@ -598,7 +598,7 @@ declare function validateTableData<T extends TableName>(table: T, data: unknown)
|
|
|
598
598
|
createdAt: Date;
|
|
599
599
|
updatedAt: Date;
|
|
600
600
|
variantVersionId?: string | null | undefined;
|
|
601
|
-
}> |
|
|
601
|
+
}> | zod302.ZodSafeParseError<{
|
|
602
602
|
environmentId: string;
|
|
603
603
|
configId: string;
|
|
604
604
|
configVariantId: string;
|
|
@@ -614,17 +614,17 @@ declare function validateTableData<T extends TableName>(table: T, data: unknown)
|
|
|
614
614
|
/**
|
|
615
615
|
* Validate partial data (for updates)
|
|
616
616
|
*/
|
|
617
|
-
declare function validatePartialTableData<T extends TableName>(table: T, data: unknown):
|
|
617
|
+
declare function validatePartialTableData<T extends TableName>(table: T, data: unknown): zod302.ZodSafeParseSuccess<{
|
|
618
618
|
name?: string | undefined;
|
|
619
619
|
id?: string | undefined;
|
|
620
620
|
createdAt?: Date | undefined;
|
|
621
621
|
updatedAt?: Date | undefined;
|
|
622
|
-
}> |
|
|
622
|
+
}> | zod302.ZodSafeParseError<{
|
|
623
623
|
name?: string | undefined;
|
|
624
624
|
id?: string | undefined;
|
|
625
625
|
createdAt?: Date | undefined;
|
|
626
626
|
updatedAt?: Date | undefined;
|
|
627
|
-
}> |
|
|
627
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
628
628
|
variantId?: string | undefined;
|
|
629
629
|
version?: number | undefined;
|
|
630
630
|
provider?: string | undefined;
|
|
@@ -633,7 +633,7 @@ declare function validatePartialTableData<T extends TableName>(table: T, data: u
|
|
|
633
633
|
id?: string | undefined;
|
|
634
634
|
createdAt?: Date | undefined;
|
|
635
635
|
updatedAt?: Date | undefined;
|
|
636
|
-
}> |
|
|
636
|
+
}> | zod302.ZodSafeParseError<{
|
|
637
637
|
variantId?: string | undefined;
|
|
638
638
|
version?: number | undefined;
|
|
639
639
|
provider?: string | undefined;
|
|
@@ -642,33 +642,33 @@ declare function validatePartialTableData<T extends TableName>(table: T, data: u
|
|
|
642
642
|
id?: string | undefined;
|
|
643
643
|
createdAt?: Date | undefined;
|
|
644
644
|
updatedAt?: Date | undefined;
|
|
645
|
-
}> |
|
|
645
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
646
646
|
environmentId?: string | undefined;
|
|
647
647
|
keyName?: string | undefined;
|
|
648
648
|
keyValue?: string | undefined;
|
|
649
649
|
id?: string | undefined;
|
|
650
650
|
createdAt?: Date | undefined;
|
|
651
651
|
updatedAt?: Date | undefined;
|
|
652
|
-
}> |
|
|
652
|
+
}> | zod302.ZodSafeParseError<{
|
|
653
653
|
environmentId?: string | undefined;
|
|
654
654
|
keyName?: string | undefined;
|
|
655
655
|
keyValue?: string | undefined;
|
|
656
656
|
id?: string | undefined;
|
|
657
657
|
createdAt?: Date | undefined;
|
|
658
658
|
updatedAt?: Date | undefined;
|
|
659
|
-
}> |
|
|
659
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
660
660
|
configId?: string | undefined;
|
|
661
661
|
variantId?: string | undefined;
|
|
662
662
|
id?: string | undefined;
|
|
663
663
|
createdAt?: Date | undefined;
|
|
664
664
|
updatedAt?: Date | undefined;
|
|
665
|
-
}> |
|
|
665
|
+
}> | zod302.ZodSafeParseError<{
|
|
666
666
|
configId?: string | undefined;
|
|
667
667
|
variantId?: string | undefined;
|
|
668
668
|
id?: string | undefined;
|
|
669
669
|
createdAt?: Date | undefined;
|
|
670
670
|
updatedAt?: Date | undefined;
|
|
671
|
-
}> |
|
|
671
|
+
}> | zod302.ZodSafeParseSuccess<{
|
|
672
672
|
environmentId?: string | undefined;
|
|
673
673
|
configId?: string | undefined;
|
|
674
674
|
configVariantId?: string | undefined;
|
|
@@ -680,7 +680,7 @@ declare function validatePartialTableData<T extends TableName>(table: T, data: u
|
|
|
680
680
|
id?: string | undefined;
|
|
681
681
|
createdAt?: Date | undefined;
|
|
682
682
|
updatedAt?: Date | undefined;
|
|
683
|
-
}> |
|
|
683
|
+
}> | zod302.ZodSafeParseError<{
|
|
684
684
|
environmentId?: string | undefined;
|
|
685
685
|
configId?: string | undefined;
|
|
686
686
|
configVariantId?: string | undefined;
|
|
@@ -788,6 +788,16 @@ declare function parsePartialTableData<T extends TableName>(table: T, data: unkn
|
|
|
788
788
|
//#endregion
|
|
789
789
|
//#region src/db/migrations.d.ts
|
|
790
790
|
type DatabaseType$1 = 'postgres' | 'mysql' | 'sqlite' | 'mssql';
|
|
791
|
+
/**
|
|
792
|
+
* Options for migration operations
|
|
793
|
+
*/
|
|
794
|
+
interface MigrationOptions {
|
|
795
|
+
/**
|
|
796
|
+
* PostgreSQL schema name to use.
|
|
797
|
+
* If provided, the schema will be created if it doesn't exist.
|
|
798
|
+
*/
|
|
799
|
+
schema?: string;
|
|
800
|
+
}
|
|
791
801
|
declare function matchType(columnDataType: string, fieldType: string, dbType: DatabaseType$1): boolean;
|
|
792
802
|
interface MigrationResult {
|
|
793
803
|
toBeCreated: Array<{
|
|
@@ -805,15 +815,16 @@ interface MigrationResult {
|
|
|
805
815
|
migrations: any[];
|
|
806
816
|
needsMigration: boolean;
|
|
807
817
|
}
|
|
808
|
-
declare function getMigrations(db: Kysely<Database>, dbType: DatabaseType$1): Promise<MigrationResult>;
|
|
818
|
+
declare function getMigrations(db: Kysely<Database>, dbType: DatabaseType$1, options?: MigrationOptions): Promise<MigrationResult>;
|
|
809
819
|
/**
|
|
810
820
|
* Run migrations if needed based on autoMigrate config
|
|
811
821
|
* @param db - Kysely database instance
|
|
812
822
|
* @param dbType - Database type
|
|
813
823
|
* @param autoMigrate - Auto-migrate configuration
|
|
824
|
+
* @param options - Migration options (schema, etc.)
|
|
814
825
|
* @returns true if migrations were run, false otherwise
|
|
815
826
|
*/
|
|
816
|
-
declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1, autoMigrate: boolean | 'development'): Promise<{
|
|
827
|
+
declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1, autoMigrate: boolean | 'development', options?: MigrationOptions): Promise<{
|
|
817
828
|
ran: boolean;
|
|
818
829
|
tables: string[];
|
|
819
830
|
fields: string[];
|
|
@@ -824,6 +835,16 @@ declare function runAutoMigrations(db: Kysely<Database>, dbType: DatabaseType$1,
|
|
|
824
835
|
* Supported database types
|
|
825
836
|
*/
|
|
826
837
|
type DatabaseType = 'postgres' | 'mysql' | 'sqlite' | 'mssql';
|
|
838
|
+
/**
|
|
839
|
+
* Options for creating a database connection
|
|
840
|
+
*/
|
|
841
|
+
interface DatabaseOptions {
|
|
842
|
+
/**
|
|
843
|
+
* PostgreSQL schema name (sets search_path).
|
|
844
|
+
* Defaults to 'llmops'.
|
|
845
|
+
*/
|
|
846
|
+
schema?: string;
|
|
847
|
+
}
|
|
827
848
|
/**
|
|
828
849
|
* Database connection options
|
|
829
850
|
*/
|
|
@@ -853,7 +874,10 @@ declare function createDatabase(connection: DatabaseConnection): Kysely<Database
|
|
|
853
874
|
declare function detectDatabaseType(db: unknown): DatabaseType | null;
|
|
854
875
|
/**
|
|
855
876
|
* Create database from raw connection
|
|
877
|
+
*
|
|
878
|
+
* @param rawConnection - The raw database connection (pg Pool, sqlite Database, etc.)
|
|
879
|
+
* @param options - Optional configuration (schema for PostgreSQL)
|
|
856
880
|
*/
|
|
857
|
-
declare function createDatabaseFromConnection(rawConnection: any): Promise<Kysely<Database> | null>;
|
|
881
|
+
declare function createDatabaseFromConnection(rawConnection: any, options?: DatabaseOptions): Promise<Kysely<Database> | null>;
|
|
858
882
|
//#endregion
|
|
859
|
-
export {
|
|
883
|
+
export { TargetingRulesTable as A, schemas as B, EnvironmentSecretsTable as C, Selectable as D, SCHEMA_METADATA as E, VariantsTable as F, variantVersionsSchema as H, configVariantsSchema as I, configsSchema as L, Variant as M, VariantVersion as N, TableName as O, VariantVersionsTable as P, environmentSecretsSchema as R, EnvironmentSecret as S, Insertable as T, variantsSchema as U, targetingRulesSchema as V, ConfigVariant as _, createDatabaseFromConnection as a, Database as b, MigrationResult as c, runAutoMigrations as d, parsePartialTableData as f, Config as g, validateTableData as h, createDatabase as i, Updateable as j, TargetingRule as k, getMigrations as l, validatePartialTableData as m, DatabaseOptions as n, detectDatabaseType as o, parseTableData as p, DatabaseType as r, MigrationOptions as s, DatabaseConnection as t, matchType as u, ConfigVariantsTable as v, EnvironmentsTable as w, Environment as x, ConfigsTable as y, environmentsSchema as z };
|
package/dist/index.cjs
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const require_db = require('./db-
|
|
1
|
+
const require_db = require('./db-gMDwCuHR.cjs');
|
|
2
2
|
let __llmops_gateway = require("@llmops/gateway");
|
|
3
3
|
__llmops_gateway = require_db.__toESM(__llmops_gateway);
|
|
4
4
|
let __better_auth_utils_random = require("@better-auth/utils/random");
|
|
@@ -449,7 +449,8 @@ const llmopsConfigSchema = require_db.object({
|
|
|
449
449
|
auth: authSchema,
|
|
450
450
|
basePath: require_db.string().min(1, "Base path is required and cannot be empty").refine((path) => path.startsWith("/"), "Base path must start with a forward slash"),
|
|
451
451
|
providers: providersSchema,
|
|
452
|
-
autoMigrate: require_db.union([require_db.boolean(), require_db.literal("development")]).optional().default(false)
|
|
452
|
+
autoMigrate: require_db.union([require_db.boolean(), require_db.literal("development")]).optional().default(false),
|
|
453
|
+
schema: require_db.string().optional().default("llmops")
|
|
453
454
|
});
|
|
454
455
|
function validateLLMOpsConfig(config) {
|
|
455
456
|
const result = llmopsConfigSchema.safeParse(config);
|