@llmops/core 0.4.8-beta.3 → 0.4.8-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,4 @@
1
- const require_db = require('./db-B9jJgYNw.cjs');
1
+ const require_db = require('./db-CEt1ZD2V.cjs');
2
2
  let kysely = require("kysely");
3
3
 
4
4
  //#region src/db/bun-sqlite-dialect.ts
package/dist/db/index.cjs CHANGED
@@ -1,5 +1,5 @@
1
- const require_db = require('../db-B9jJgYNw.cjs');
2
- const require_neon_dialect = require('../neon-dialect-BBWePgOv.cjs');
1
+ const require_db = require('../db-CEt1ZD2V.cjs');
2
+ const require_neon_dialect = require('../neon-dialect-sUVNvVYh.cjs');
3
3
 
4
4
  exports.SCHEMA_METADATA = require_db.SCHEMA_METADATA;
5
5
  exports.configVariantsSchema = require_db.configVariantsSchema;
@@ -7,6 +7,7 @@ exports.configsSchema = require_db.configsSchema;
7
7
  exports.createDatabase = require_db.createDatabase;
8
8
  exports.createDatabaseFromConnection = require_db.createDatabaseFromConnection;
9
9
  exports.createNeonDialect = require_neon_dialect.createNeonDialect;
10
+ exports.createNeonSqlFunction = require_db.createNeonSqlFunction;
10
11
  exports.datasetRecordsSchema = require_db.datasetRecordsSchema;
11
12
  exports.datasetVersionRecordsSchema = require_db.datasetVersionRecordsSchema;
12
13
  exports.datasetVersionsSchema = require_db.datasetVersionsSchema;
@@ -15,6 +16,7 @@ exports.detectDatabaseType = require_db.detectDatabaseType;
15
16
  exports.environmentSecretsSchema = require_db.environmentSecretsSchema;
16
17
  exports.environmentsSchema = require_db.environmentsSchema;
17
18
  exports.executeWithSchema = require_neon_dialect.executeWithSchema;
19
+ exports.generatePostgresSchemaSQL = require_db.generatePostgresSchemaSQL;
18
20
  exports.getMigrations = require_db.getMigrations;
19
21
  exports.guardrailConfigsSchema = require_db.guardrailConfigsSchema;
20
22
  exports.llmRequestsSchema = require_db.llmRequestsSchema;
@@ -28,6 +30,7 @@ exports.playgroundsSchema = require_db.playgroundsSchema;
28
30
  exports.providerConfigsSchema = require_db.providerConfigsSchema;
29
31
  exports.providerGuardrailOverridesSchema = require_db.providerGuardrailOverridesSchema;
30
32
  exports.runAutoMigrations = require_db.runAutoMigrations;
33
+ exports.runSchemaSQL = require_db.runSchemaSQL;
31
34
  exports.schemas = require_db.schemas;
32
35
  exports.targetingRulesSchema = require_db.targetingRulesSchema;
33
36
  exports.validatePartialTableData = require_db.validatePartialTableData;
@@ -1,2 +1,2 @@
1
- import { $ as SCHEMA_METADATA, A as DatasetsTable, At as variantsSchema, B as LLMRequest, C as Dataset, Ct as playgroundRunsSchema, D as DatasetVersionRecord, Dt as schemas, E as DatasetVersion, Et as providerGuardrailOverridesSchema, F as GuardrailConfig, G as PlaygroundResultsTable, H as Playground, I as GuardrailConfigsTable, J as PlaygroundsTable, K as PlaygroundRun, L as GuardrailResult, M as EnvironmentSecret, N as EnvironmentSecretsTable, O as DatasetVersionRecordsTable, Ot as targetingRulesSchema, P as EnvironmentsTable, Q as ProviderGuardrailOverridesTable, R as GuardrailResults, S as Database, St as playgroundResultsSchema, T as DatasetRecordsTable, Tt as providerConfigsSchema, U as PlaygroundColumn, V as LLMRequestsTable, W as PlaygroundResult, X as ProviderConfigsTable, Y as ProviderConfig, Z as ProviderGuardrailOverride, _ as validateTableData, _t as environmentSecretsSchema, a as createDatabaseFromConnection, at as Variant, b as ConfigVariantsTable, bt as llmRequestsSchema, c as executeWithSchema, ct as VariantsTable, d as getMigrations, dt as configVariantsSchema, et as Selectable, f as matchType, ft as configsSchema, g as validatePartialTableData, gt as datasetsSchema, h as parseTableData, ht as datasetVersionsSchema, i as createDatabase, it as Updateable, j as Environment, jt as workspaceSettingsSchema, k as DatasetVersionsTable, kt as variantVersionsSchema, l as MigrationOptions, lt as WorkspaceSettings, m as parsePartialTableData, mt as datasetVersionRecordsSchema, n as DatabaseOptions, nt as TargetingRule, o as detectDatabaseType, ot as VariantVersion, p as runAutoMigrations, pt as datasetRecordsSchema, q as PlaygroundRunsTable, r as DatabaseType, rt as TargetingRulesTable, s as createNeonDialect, st as VariantVersionsTable, t as DatabaseConnection, tt as TableName, u as MigrationResult, ut as WorkspaceSettingsTable, v as Config, vt as environmentsSchema, w as DatasetRecord, wt as playgroundsSchema, x as ConfigsTable, xt as playgroundColumnSchema, y as ConfigVariant, yt as guardrailConfigsSchema, z as Insertable } from "../index-BtSgIKup.cjs";
2
- export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Dataset, DatasetRecord, DatasetRecordsTable, DatasetVersion, DatasetVersionRecord, DatasetVersionRecordsTable, DatasetVersionsTable, DatasetsTable, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, GuardrailConfig, GuardrailConfigsTable, GuardrailResult, GuardrailResults, Insertable, LLMRequest, LLMRequestsTable, MigrationOptions, MigrationResult, Playground, PlaygroundColumn, PlaygroundResult, PlaygroundResultsTable, PlaygroundRun, PlaygroundRunsTable, PlaygroundsTable, ProviderConfig, ProviderConfigsTable, ProviderGuardrailOverride, ProviderGuardrailOverridesTable, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, WorkspaceSettings, WorkspaceSettingsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
1
+ import { $ as ProviderConfigsTable, A as DatasetVersionRecord, At as schemas, B as GuardrailResult, C as ConfigVariantsTable, Ct as llmRequestsSchema, D as DatasetRecord, Dt as playgroundsSchema, E as Dataset, Et as playgroundRunsSchema, F as EnvironmentSecret, G as Playground, H as Insertable, I as EnvironmentSecretsTable, J as PlaygroundResultsTable, K as PlaygroundColumn, L as EnvironmentsTable, M as DatasetVersionsTable, Mt as variantVersionsSchema, N as DatasetsTable, Nt as variantsSchema, O as DatasetRecordsTable, Ot as providerConfigsSchema, P as Environment, Pt as workspaceSettingsSchema, Q as ProviderConfig, R as GuardrailConfig, S as ConfigVariant, St as guardrailConfigsSchema, T as Database, Tt as playgroundResultsSchema, U as LLMRequest, V as GuardrailResults, W as LLMRequestsTable, X as PlaygroundRunsTable, Y as PlaygroundRun, Z as PlaygroundsTable, _ as validateTableData, _t as datasetVersionRecordsSchema, a as createDatabaseFromConnection, at as TargetingRule, b as runSchemaSQL, bt as environmentSecretsSchema, c as executeWithSchema, ct as Variant, d as getMigrations, dt as VariantsTable, et as ProviderGuardrailOverride, f as matchType, ft as WorkspaceSettings, g as validatePartialTableData, gt as datasetRecordsSchema, h as parseTableData, ht as configsSchema, i as createDatabase, it as TableName, j as DatasetVersionRecordsTable, jt as targetingRulesSchema, k as DatasetVersion, kt as providerGuardrailOverridesSchema, l as MigrationOptions, lt as VariantVersion, m as parsePartialTableData, mt as configVariantsSchema, n as DatabaseOptions, nt as SCHEMA_METADATA, o as detectDatabaseType, ot as TargetingRulesTable, p as runAutoMigrations, pt as WorkspaceSettingsTable, q as PlaygroundResult, r as DatabaseType, rt as Selectable, s as createNeonDialect, st as Updateable, t as DatabaseConnection, tt as ProviderGuardrailOverridesTable, u as MigrationResult, ut as VariantVersionsTable, v as createNeonSqlFunction, vt as datasetVersionsSchema, w as ConfigsTable, wt as playgroundColumnSchema, x as Config, xt as environmentsSchema, y as generatePostgresSchemaSQL, yt as datasetsSchema, z as GuardrailConfigsTable } from "../index-Bk_flDhH.cjs";
2
+ export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Dataset, DatasetRecord, DatasetRecordsTable, DatasetVersion, DatasetVersionRecord, DatasetVersionRecordsTable, DatasetVersionsTable, DatasetsTable, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, GuardrailConfig, GuardrailConfigsTable, GuardrailResult, GuardrailResults, Insertable, LLMRequest, LLMRequestsTable, MigrationOptions, MigrationResult, Playground, PlaygroundColumn, PlaygroundResult, PlaygroundResultsTable, PlaygroundRun, PlaygroundRunsTable, PlaygroundsTable, ProviderConfig, ProviderConfigsTable, ProviderGuardrailOverride, ProviderGuardrailOverridesTable, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, WorkspaceSettings, WorkspaceSettingsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, createNeonSqlFunction, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, generatePostgresSchemaSQL, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, runSchemaSQL, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
@@ -1,2 +1,2 @@
1
- import { $ as SCHEMA_METADATA, A as DatasetsTable, At as variantsSchema, B as LLMRequest, C as Dataset, Ct as playgroundRunsSchema, D as DatasetVersionRecord, Dt as schemas, E as DatasetVersion, Et as providerGuardrailOverridesSchema, F as GuardrailConfig, G as PlaygroundResultsTable, H as Playground, I as GuardrailConfigsTable, J as PlaygroundsTable, K as PlaygroundRun, L as GuardrailResult, M as EnvironmentSecret, N as EnvironmentSecretsTable, O as DatasetVersionRecordsTable, Ot as targetingRulesSchema, P as EnvironmentsTable, Q as ProviderGuardrailOverridesTable, R as GuardrailResults, S as Database, St as playgroundResultsSchema, T as DatasetRecordsTable, Tt as providerConfigsSchema, U as PlaygroundColumn, V as LLMRequestsTable, W as PlaygroundResult, X as ProviderConfigsTable, Y as ProviderConfig, Z as ProviderGuardrailOverride, _ as validateTableData, _t as environmentSecretsSchema, a as createDatabaseFromConnection, at as Variant, b as ConfigVariantsTable, bt as llmRequestsSchema, c as executeWithSchema, ct as VariantsTable, d as getMigrations, dt as configVariantsSchema, et as Selectable, f as matchType, ft as configsSchema, g as validatePartialTableData, gt as datasetsSchema, h as parseTableData, ht as datasetVersionsSchema, i as createDatabase, it as Updateable, j as Environment, jt as workspaceSettingsSchema, k as DatasetVersionsTable, kt as variantVersionsSchema, l as MigrationOptions, lt as WorkspaceSettings, m as parsePartialTableData, mt as datasetVersionRecordsSchema, n as DatabaseOptions, nt as TargetingRule, o as detectDatabaseType, ot as VariantVersion, p as runAutoMigrations, pt as datasetRecordsSchema, q as PlaygroundRunsTable, r as DatabaseType, rt as TargetingRulesTable, s as createNeonDialect, st as VariantVersionsTable, t as DatabaseConnection, tt as TableName, u as MigrationResult, ut as WorkspaceSettingsTable, v as Config, vt as environmentsSchema, w as DatasetRecord, wt as playgroundsSchema, x as ConfigsTable, xt as playgroundColumnSchema, y as ConfigVariant, yt as guardrailConfigsSchema, z as Insertable } from "../index-LRmy4sz9.mjs";
2
- export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Dataset, DatasetRecord, DatasetRecordsTable, DatasetVersion, DatasetVersionRecord, DatasetVersionRecordsTable, DatasetVersionsTable, DatasetsTable, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, GuardrailConfig, GuardrailConfigsTable, GuardrailResult, GuardrailResults, Insertable, LLMRequest, LLMRequestsTable, MigrationOptions, MigrationResult, Playground, PlaygroundColumn, PlaygroundResult, PlaygroundResultsTable, PlaygroundRun, PlaygroundRunsTable, PlaygroundsTable, ProviderConfig, ProviderConfigsTable, ProviderGuardrailOverride, ProviderGuardrailOverridesTable, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, WorkspaceSettings, WorkspaceSettingsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
1
+ import { $ as ProviderConfigsTable, A as DatasetVersionRecord, At as schemas, B as GuardrailResult, C as ConfigVariantsTable, Ct as llmRequestsSchema, D as DatasetRecord, Dt as playgroundsSchema, E as Dataset, Et as playgroundRunsSchema, F as EnvironmentSecret, G as Playground, H as Insertable, I as EnvironmentSecretsTable, J as PlaygroundResultsTable, K as PlaygroundColumn, L as EnvironmentsTable, M as DatasetVersionsTable, Mt as variantVersionsSchema, N as DatasetsTable, Nt as variantsSchema, O as DatasetRecordsTable, Ot as providerConfigsSchema, P as Environment, Pt as workspaceSettingsSchema, Q as ProviderConfig, R as GuardrailConfig, S as ConfigVariant, St as guardrailConfigsSchema, T as Database, Tt as playgroundResultsSchema, U as LLMRequest, V as GuardrailResults, W as LLMRequestsTable, X as PlaygroundRunsTable, Y as PlaygroundRun, Z as PlaygroundsTable, _ as validateTableData, _t as datasetVersionRecordsSchema, a as createDatabaseFromConnection, at as TargetingRule, b as runSchemaSQL, bt as environmentSecretsSchema, c as executeWithSchema, ct as Variant, d as getMigrations, dt as VariantsTable, et as ProviderGuardrailOverride, f as matchType, ft as WorkspaceSettings, g as validatePartialTableData, gt as datasetRecordsSchema, h as parseTableData, ht as configsSchema, i as createDatabase, it as TableName, j as DatasetVersionRecordsTable, jt as targetingRulesSchema, k as DatasetVersion, kt as providerGuardrailOverridesSchema, l as MigrationOptions, lt as VariantVersion, m as parsePartialTableData, mt as configVariantsSchema, n as DatabaseOptions, nt as SCHEMA_METADATA, o as detectDatabaseType, ot as TargetingRulesTable, p as runAutoMigrations, pt as WorkspaceSettingsTable, q as PlaygroundResult, r as DatabaseType, rt as Selectable, s as createNeonDialect, st as Updateable, t as DatabaseConnection, tt as ProviderGuardrailOverridesTable, u as MigrationResult, ut as VariantVersionsTable, v as createNeonSqlFunction, vt as datasetVersionsSchema, w as ConfigsTable, wt as playgroundColumnSchema, x as Config, xt as environmentsSchema, y as generatePostgresSchemaSQL, yt as datasetsSchema, z as GuardrailConfigsTable } from "../index-BvrZuRon.mjs";
2
+ export { Config, ConfigVariant, ConfigVariantsTable, ConfigsTable, Database, DatabaseConnection, DatabaseOptions, DatabaseType, Dataset, DatasetRecord, DatasetRecordsTable, DatasetVersion, DatasetVersionRecord, DatasetVersionRecordsTable, DatasetVersionsTable, DatasetsTable, Environment, EnvironmentSecret, EnvironmentSecretsTable, EnvironmentsTable, GuardrailConfig, GuardrailConfigsTable, GuardrailResult, GuardrailResults, Insertable, LLMRequest, LLMRequestsTable, MigrationOptions, MigrationResult, Playground, PlaygroundColumn, PlaygroundResult, PlaygroundResultsTable, PlaygroundRun, PlaygroundRunsTable, PlaygroundsTable, ProviderConfig, ProviderConfigsTable, ProviderGuardrailOverride, ProviderGuardrailOverridesTable, SCHEMA_METADATA, Selectable, TableName, TargetingRule, TargetingRulesTable, Updateable, Variant, VariantVersion, VariantVersionsTable, VariantsTable, WorkspaceSettings, WorkspaceSettingsTable, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, createNeonSqlFunction, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, generatePostgresSchemaSQL, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, runSchemaSQL, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
package/dist/db/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { A as schemas, C as llmRequestsSchema, D as playgroundsSchema, E as playgroundRunsSchema, M as variantVersionsSchema, N as variantsSchema, O as providerConfigsSchema, P as workspaceSettingsSchema, S as guardrailConfigsSchema, T as playgroundResultsSchema, _ as datasetVersionRecordsSchema, a as matchType, b as environmentSecretsSchema, d as validatePartialTableData, f as validateTableData, g as datasetRecordsSchema, h as configsSchema, i as getMigrations, j as targetingRulesSchema, k as providerGuardrailOverridesSchema, l as parsePartialTableData, m as configVariantsSchema, n as createDatabaseFromConnection, o as runAutoMigrations, p as SCHEMA_METADATA, r as detectDatabaseType, t as createDatabase, u as parseTableData, v as datasetVersionsSchema, w as playgroundColumnSchema, x as environmentsSchema, y as datasetsSchema } from "../db-DX_QaIkx.mjs";
1
+ import { A as playgroundsSchema, C as environmentSecretsSchema, D as playgroundColumnSchema, E as llmRequestsSchema, F as variantVersionsSchema, I as variantsSchema, L as workspaceSettingsSchema, M as providerGuardrailOverridesSchema, N as schemas, O as playgroundResultsSchema, P as targetingRulesSchema, S as datasetsSchema, T as guardrailConfigsSchema, _ as configVariantsSchema, a as matchType, b as datasetVersionRecordsSchema, d as validatePartialTableData, f as validateTableData, g as SCHEMA_METADATA, h as runSchemaSQL, i as getMigrations, j as providerConfigsSchema, k as playgroundRunsSchema, l as parsePartialTableData, m as generatePostgresSchemaSQL, n as createDatabaseFromConnection, o as runAutoMigrations, p as createNeonSqlFunction, r as detectDatabaseType, t as createDatabase, u as parseTableData, v as configsSchema, w as environmentsSchema, x as datasetVersionsSchema, y as datasetRecordsSchema } from "../db-BN99c_b5.mjs";
2
2
  import { n as executeWithSchema, t as createNeonDialect } from "../neon-dialect-DySGBYUi.mjs";
3
3
 
4
- export { SCHEMA_METADATA, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
4
+ export { SCHEMA_METADATA, configVariantsSchema, configsSchema, createDatabase, createDatabaseFromConnection, createNeonDialect, createNeonSqlFunction, datasetRecordsSchema, datasetVersionRecordsSchema, datasetVersionsSchema, datasetsSchema, detectDatabaseType, environmentSecretsSchema, environmentsSchema, executeWithSchema, generatePostgresSchemaSQL, getMigrations, guardrailConfigsSchema, llmRequestsSchema, matchType, parsePartialTableData, parseTableData, playgroundColumnSchema, playgroundResultsSchema, playgroundRunsSchema, playgroundsSchema, providerConfigsSchema, providerGuardrailOverridesSchema, runAutoMigrations, runSchemaSQL, schemas, targetingRulesSchema, validatePartialTableData, validateTableData, variantVersionsSchema, variantsSchema, workspaceSettingsSchema };
@@ -13915,6 +13915,230 @@ const schemas = {
13915
13915
  llm_requests: llmRequestsSchema
13916
13916
  };
13917
13917
 
13918
+ //#endregion
13919
+ //#region src/db/schema-sql.ts
13920
+ /**
13921
+ * Idempotent SQL Schema Generator
13922
+ *
13923
+ * Generates fully idempotent PostgreSQL schema SQL that can be run on every
13924
+ * server restart. Works in edge environments (no file system access needed).
13925
+ *
13926
+ * This is the programmatic version of the generate-schema-sql.ts script.
13927
+ */
13928
+ function toSnakeCase(str) {
13929
+ return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
13930
+ }
13931
+ function getDefaultValue(fieldType, defaultValue) {
13932
+ if (defaultValue === void 0) return null;
13933
+ if (defaultValue === "now()") return "NOW()";
13934
+ if (typeof defaultValue === "boolean") return defaultValue ? "TRUE" : "FALSE";
13935
+ if (typeof defaultValue === "number") return String(defaultValue);
13936
+ if (typeof defaultValue === "string") {
13937
+ if (defaultValue === "{}") return "'{}'::jsonb";
13938
+ return `'${defaultValue}'`;
13939
+ }
13940
+ return null;
13941
+ }
13942
+ const TYPE_MAPPINGS = {
13943
+ uuid: "UUID",
13944
+ text: "TEXT",
13945
+ integer: "INTEGER",
13946
+ boolean: "BOOLEAN",
13947
+ timestamp: "TIMESTAMP WITH TIME ZONE",
13948
+ jsonb: "JSONB"
13949
+ };
13950
+ /**
13951
+ * Generate idempotent PostgreSQL schema SQL
13952
+ *
13953
+ * @param schemaName - Optional PostgreSQL schema name (e.g., 'llmops').
13954
+ * If not provided, tables are created in the current search_path.
13955
+ * @returns SQL string that can be executed to create/update the schema
13956
+ */
13957
+ function generatePostgresSchemaSQL(schemaName) {
13958
+ const lines = [];
13959
+ const schemaPrefix = schemaName ? `"${schemaName}".` : "";
13960
+ lines.push(`-- LLMOps Database Schema (PostgreSQL)`);
13961
+ lines.push(`-- This SQL is fully idempotent and safe to run on every server restart.`);
13962
+ lines.push("");
13963
+ if (schemaName) {
13964
+ lines.push(`-- Create schema if not exists`);
13965
+ lines.push(`CREATE SCHEMA IF NOT EXISTS "${schemaName}";`);
13966
+ lines.push("");
13967
+ }
13968
+ lines.push(`-- Enable UUID extension`);
13969
+ lines.push(`CREATE EXTENSION IF NOT EXISTS "pgcrypto" SCHEMA public;`);
13970
+ lines.push("");
13971
+ const sortedTables = Object.entries(SCHEMA_METADATA.tables).map(([name, meta$2]) => ({
13972
+ name,
13973
+ meta: meta$2
13974
+ })).sort((a, b) => a.meta.order - b.meta.order);
13975
+ lines.push(`-- STEP 1: Create tables (if not exist)`);
13976
+ for (const { name, meta: meta$2 } of sortedTables) {
13977
+ const fullTableName = schemaPrefix + toSnakeCase(name);
13978
+ if (!Object.entries(meta$2.fields).find(([_, f]) => f.primaryKey)) continue;
13979
+ lines.push(`CREATE TABLE IF NOT EXISTS ${fullTableName} (`);
13980
+ lines.push(` id UUID PRIMARY KEY DEFAULT gen_random_uuid()`);
13981
+ lines.push(`);`);
13982
+ }
13983
+ lines.push("");
13984
+ lines.push(`-- STEP 2: Add columns (if not exist)`);
13985
+ for (const { name, meta: meta$2 } of sortedTables) {
13986
+ const fullTableName = schemaPrefix + toSnakeCase(name);
13987
+ for (const [columnName, field] of Object.entries(meta$2.fields)) {
13988
+ if (field.primaryKey) continue;
13989
+ const snakeColumn = toSnakeCase(columnName);
13990
+ const sqlType = TYPE_MAPPINGS[field.type] || "TEXT";
13991
+ const defaultVal = getDefaultValue(field.type, field.default);
13992
+ let colDef = `${snakeColumn} ${sqlType}`;
13993
+ if (defaultVal) colDef += ` DEFAULT ${defaultVal}`;
13994
+ lines.push(`ALTER TABLE ${fullTableName} ADD COLUMN IF NOT EXISTS ${colDef};`);
13995
+ }
13996
+ }
13997
+ lines.push("");
13998
+ lines.push(`-- STEP 3: Add unique constraints (if not exist)`);
13999
+ for (const { name, meta: meta$2 } of sortedTables) {
14000
+ const snakeTable = toSnakeCase(name);
14001
+ const fullTableName = schemaPrefix + snakeTable;
14002
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.unique) {
14003
+ const snakeColumn = toSnakeCase(columnName);
14004
+ const constraintName = `uq_${snakeTable}_${snakeColumn}`;
14005
+ lines.push(`DO $$`);
14006
+ lines.push(`BEGIN`);
14007
+ lines.push(` IF NOT EXISTS (`);
14008
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14009
+ lines.push(` ) THEN`);
14010
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName} UNIQUE (${snakeColumn});`);
14011
+ lines.push(` END IF;`);
14012
+ lines.push(`END $$;`);
14013
+ }
14014
+ if (meta$2.uniqueConstraints) for (const constraint of meta$2.uniqueConstraints) {
14015
+ const cols = constraint.columns.map(toSnakeCase).join(", ");
14016
+ const constraintName = `uq_${snakeTable}_${constraint.columns.map(toSnakeCase).join("_")}`;
14017
+ lines.push(`DO $$`);
14018
+ lines.push(`BEGIN`);
14019
+ lines.push(` IF NOT EXISTS (`);
14020
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14021
+ lines.push(` ) THEN`);
14022
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName} UNIQUE (${cols});`);
14023
+ lines.push(` END IF;`);
14024
+ lines.push(`END $$;`);
14025
+ }
14026
+ }
14027
+ lines.push("");
14028
+ lines.push(`-- STEP 4: Add foreign keys (if not exist)`);
14029
+ for (const { name, meta: meta$2 } of sortedTables) {
14030
+ const snakeTable = toSnakeCase(name);
14031
+ const fullTableName = schemaPrefix + snakeTable;
14032
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.references) {
14033
+ const snakeColumn = toSnakeCase(columnName);
14034
+ const refTable = schemaPrefix + toSnakeCase(field.references.table);
14035
+ const refColumn = toSnakeCase(field.references.column);
14036
+ const constraintName = `fk_${snakeTable}_${snakeColumn}`;
14037
+ lines.push(`DO $$`);
14038
+ lines.push(`BEGIN`);
14039
+ lines.push(` IF NOT EXISTS (`);
14040
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14041
+ lines.push(` ) THEN`);
14042
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName}`);
14043
+ lines.push(` FOREIGN KEY (${snakeColumn}) REFERENCES ${refTable}(${refColumn}) ON DELETE CASCADE;`);
14044
+ lines.push(` END IF;`);
14045
+ lines.push(`END $$;`);
14046
+ }
14047
+ }
14048
+ lines.push("");
14049
+ lines.push(`-- STEP 5: Create indexes (if not exist)`);
14050
+ for (const { name, meta: meta$2 } of sortedTables) {
14051
+ const snakeTable = toSnakeCase(name);
14052
+ const fullTableName = schemaPrefix + snakeTable;
14053
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.references) {
14054
+ const snakeColumn = toSnakeCase(columnName);
14055
+ lines.push(`CREATE INDEX IF NOT EXISTS idx_${snakeTable}_${snakeColumn} ON ${fullTableName}(${snakeColumn});`);
14056
+ }
14057
+ }
14058
+ lines.push("");
14059
+ lines.push(`-- STEP 6: Create updated_at triggers`);
14060
+ const functionName = `${schemaName ? `"${schemaName}".` : "public."}update_updated_at_column`;
14061
+ lines.push(`CREATE OR REPLACE FUNCTION ${functionName}()`);
14062
+ lines.push(`RETURNS TRIGGER AS $$`);
14063
+ lines.push(`BEGIN`);
14064
+ lines.push(` NEW.updated_at = NOW();`);
14065
+ lines.push(` RETURN NEW;`);
14066
+ lines.push(`END;`);
14067
+ lines.push(`$$ language 'plpgsql';`);
14068
+ for (const { name } of sortedTables) {
14069
+ const snakeTable = toSnakeCase(name);
14070
+ const fullTableName = schemaPrefix + snakeTable;
14071
+ lines.push(`DROP TRIGGER IF EXISTS update_${snakeTable}_updated_at ON ${fullTableName};`);
14072
+ lines.push(`CREATE TRIGGER update_${snakeTable}_updated_at BEFORE UPDATE ON ${fullTableName} FOR EACH ROW EXECUTE FUNCTION ${functionName}();`);
14073
+ }
14074
+ lines.push("");
14075
+ return lines.join("\n");
14076
+ }
14077
+ /**
14078
+ * Execute the schema SQL using a Neon SQL function
14079
+ *
14080
+ * @param sql - Neon sql function (from `neon()` or passed connection)
14081
+ * @param schemaName - Optional PostgreSQL schema name
14082
+ */
14083
+ async function runSchemaSQL(sql$1, schemaName) {
14084
+ const statements = splitSQLStatements(generatePostgresSchemaSQL(schemaName));
14085
+ for (const statement of statements) {
14086
+ const trimmed = statement.trim();
14087
+ if (trimmed && !trimmed.startsWith("--")) await sql$1(trimmed);
14088
+ }
14089
+ }
14090
+ /**
14091
+ * Split SQL into individual statements, keeping DO blocks together
14092
+ */
14093
+ function splitSQLStatements(sql$1) {
14094
+ const statements = [];
14095
+ let current = "";
14096
+ let inDoBlock = false;
14097
+ const lines = sql$1.split("\n");
14098
+ for (const line of lines) {
14099
+ const trimmed = line.trim();
14100
+ if (!trimmed || trimmed.startsWith("--")) continue;
14101
+ if (trimmed === "DO $$") {
14102
+ inDoBlock = true;
14103
+ current = line + "\n";
14104
+ continue;
14105
+ }
14106
+ if (inDoBlock) {
14107
+ current += line + "\n";
14108
+ if (trimmed === "END $$;") {
14109
+ statements.push(current.trim());
14110
+ current = "";
14111
+ inDoBlock = false;
14112
+ }
14113
+ continue;
14114
+ }
14115
+ current += line + "\n";
14116
+ if (trimmed.endsWith(";")) {
14117
+ statements.push(current.trim());
14118
+ current = "";
14119
+ }
14120
+ }
14121
+ if (current.trim()) statements.push(current.trim());
14122
+ return statements;
14123
+ }
14124
+ /**
14125
+ * Create a Neon SQL function from various connection types
14126
+ *
14127
+ * @param rawConnection - neon() function, connection string, or undefined (uses env vars)
14128
+ * @returns SQL function that can be used with runSchemaSQL, or null if unable to create
14129
+ */
14130
+ async function createNeonSqlFunction(rawConnection) {
14131
+ if (typeof rawConnection === "function") return rawConnection;
14132
+ if (typeof rawConnection === "string" && rawConnection) {
14133
+ const { neon: neon$1 } = await import("@neondatabase/serverless");
14134
+ return neon$1(rawConnection);
14135
+ }
14136
+ const connectionString = process.env.NEON_CONNECTION_STRING || process.env.NEON_PG_URL || process.env.DATABASE_URL || process.env.POSTGRES_URL || "";
14137
+ if (!connectionString) return null;
14138
+ const { neon } = await import("@neondatabase/serverless");
14139
+ return neon(connectionString);
14140
+ }
14141
+
13918
14142
  //#endregion
13919
14143
  //#region src/db/validation.ts
13920
14144
  /**
@@ -14416,4 +14640,4 @@ async function createDatabaseFromConnection(rawConnection, options) {
14416
14640
  }
14417
14641
 
14418
14642
  //#endregion
14419
- export { schemas as A, literal as B, llmRequestsSchema as C, playgroundsSchema as D, playgroundRunsSchema as E, zod_default as F, union as G, object as H, _enum as I, unknown as K, any as L, variantVersionsSchema as M, variantsSchema as N, providerConfigsSchema as O, workspaceSettingsSchema as P, array as R, guardrailConfigsSchema as S, playgroundResultsSchema as T, record as U, number$1 as V, string$1 as W, datasetVersionRecordsSchema as _, matchType as a, environmentSecretsSchema as b, logger as c, validatePartialTableData as d, validateTableData as f, datasetRecordsSchema as g, configsSchema as h, getMigrations$1 as i, targetingRulesSchema as j, providerGuardrailOverridesSchema as k, parsePartialTableData as l, configVariantsSchema as m, createDatabaseFromConnection as n, runAutoMigrations as o, SCHEMA_METADATA as p, detectDatabaseType as r, getAuthClientOptions as s, createDatabase as t, parseTableData as u, datasetVersionsSchema as v, playgroundColumnSchema as w, environmentsSchema as x, datasetsSchema as y, boolean$1 as z };
14643
+ export { playgroundsSchema as A, any as B, environmentSecretsSchema as C, playgroundColumnSchema as D, llmRequestsSchema as E, variantVersionsSchema as F, object as G, boolean$1 as H, variantsSchema as I, union as J, record as K, workspaceSettingsSchema as L, providerGuardrailOverridesSchema as M, schemas as N, playgroundResultsSchema as O, targetingRulesSchema as P, zod_default as R, datasetsSchema as S, guardrailConfigsSchema as T, literal as U, array as V, number$1 as W, unknown as Y, configVariantsSchema as _, matchType as a, datasetVersionRecordsSchema as b, logger as c, validatePartialTableData as d, validateTableData as f, SCHEMA_METADATA as g, runSchemaSQL as h, getMigrations$1 as i, providerConfigsSchema as j, playgroundRunsSchema as k, parsePartialTableData as l, generatePostgresSchemaSQL as m, createDatabaseFromConnection as n, runAutoMigrations as o, createNeonSqlFunction as p, string$1 as q, detectDatabaseType as r, getAuthClientOptions as s, createDatabase as t, parseTableData as u, configsSchema as v, environmentsSchema as w, datasetVersionsSchema as x, datasetRecordsSchema as y, _enum as z };
@@ -13939,6 +13939,230 @@ const schemas = {
13939
13939
  llm_requests: llmRequestsSchema
13940
13940
  };
13941
13941
 
13942
+ //#endregion
13943
+ //#region src/db/schema-sql.ts
13944
+ /**
13945
+ * Idempotent SQL Schema Generator
13946
+ *
13947
+ * Generates fully idempotent PostgreSQL schema SQL that can be run on every
13948
+ * server restart. Works in edge environments (no file system access needed).
13949
+ *
13950
+ * This is the programmatic version of the generate-schema-sql.ts script.
13951
+ */
13952
+ function toSnakeCase(str) {
13953
+ return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
13954
+ }
13955
+ function getDefaultValue(fieldType, defaultValue) {
13956
+ if (defaultValue === void 0) return null;
13957
+ if (defaultValue === "now()") return "NOW()";
13958
+ if (typeof defaultValue === "boolean") return defaultValue ? "TRUE" : "FALSE";
13959
+ if (typeof defaultValue === "number") return String(defaultValue);
13960
+ if (typeof defaultValue === "string") {
13961
+ if (defaultValue === "{}") return "'{}'::jsonb";
13962
+ return `'${defaultValue}'`;
13963
+ }
13964
+ return null;
13965
+ }
13966
+ const TYPE_MAPPINGS = {
13967
+ uuid: "UUID",
13968
+ text: "TEXT",
13969
+ integer: "INTEGER",
13970
+ boolean: "BOOLEAN",
13971
+ timestamp: "TIMESTAMP WITH TIME ZONE",
13972
+ jsonb: "JSONB"
13973
+ };
13974
+ /**
13975
+ * Generate idempotent PostgreSQL schema SQL
13976
+ *
13977
+ * @param schemaName - Optional PostgreSQL schema name (e.g., 'llmops').
13978
+ * If not provided, tables are created in the current search_path.
13979
+ * @returns SQL string that can be executed to create/update the schema
13980
+ */
13981
+ function generatePostgresSchemaSQL(schemaName) {
13982
+ const lines = [];
13983
+ const schemaPrefix = schemaName ? `"${schemaName}".` : "";
13984
+ lines.push(`-- LLMOps Database Schema (PostgreSQL)`);
13985
+ lines.push(`-- This SQL is fully idempotent and safe to run on every server restart.`);
13986
+ lines.push("");
13987
+ if (schemaName) {
13988
+ lines.push(`-- Create schema if not exists`);
13989
+ lines.push(`CREATE SCHEMA IF NOT EXISTS "${schemaName}";`);
13990
+ lines.push("");
13991
+ }
13992
+ lines.push(`-- Enable UUID extension`);
13993
+ lines.push(`CREATE EXTENSION IF NOT EXISTS "pgcrypto" SCHEMA public;`);
13994
+ lines.push("");
13995
+ const sortedTables = Object.entries(SCHEMA_METADATA.tables).map(([name, meta$2]) => ({
13996
+ name,
13997
+ meta: meta$2
13998
+ })).sort((a, b) => a.meta.order - b.meta.order);
13999
+ lines.push(`-- STEP 1: Create tables (if not exist)`);
14000
+ for (const { name, meta: meta$2 } of sortedTables) {
14001
+ const fullTableName = schemaPrefix + toSnakeCase(name);
14002
+ if (!Object.entries(meta$2.fields).find(([_, f]) => f.primaryKey)) continue;
14003
+ lines.push(`CREATE TABLE IF NOT EXISTS ${fullTableName} (`);
14004
+ lines.push(` id UUID PRIMARY KEY DEFAULT gen_random_uuid()`);
14005
+ lines.push(`);`);
14006
+ }
14007
+ lines.push("");
14008
+ lines.push(`-- STEP 2: Add columns (if not exist)`);
14009
+ for (const { name, meta: meta$2 } of sortedTables) {
14010
+ const fullTableName = schemaPrefix + toSnakeCase(name);
14011
+ for (const [columnName, field] of Object.entries(meta$2.fields)) {
14012
+ if (field.primaryKey) continue;
14013
+ const snakeColumn = toSnakeCase(columnName);
14014
+ const sqlType = TYPE_MAPPINGS[field.type] || "TEXT";
14015
+ const defaultVal = getDefaultValue(field.type, field.default);
14016
+ let colDef = `${snakeColumn} ${sqlType}`;
14017
+ if (defaultVal) colDef += ` DEFAULT ${defaultVal}`;
14018
+ lines.push(`ALTER TABLE ${fullTableName} ADD COLUMN IF NOT EXISTS ${colDef};`);
14019
+ }
14020
+ }
14021
+ lines.push("");
14022
+ lines.push(`-- STEP 3: Add unique constraints (if not exist)`);
14023
+ for (const { name, meta: meta$2 } of sortedTables) {
14024
+ const snakeTable = toSnakeCase(name);
14025
+ const fullTableName = schemaPrefix + snakeTable;
14026
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.unique) {
14027
+ const snakeColumn = toSnakeCase(columnName);
14028
+ const constraintName = `uq_${snakeTable}_${snakeColumn}`;
14029
+ lines.push(`DO $$`);
14030
+ lines.push(`BEGIN`);
14031
+ lines.push(` IF NOT EXISTS (`);
14032
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14033
+ lines.push(` ) THEN`);
14034
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName} UNIQUE (${snakeColumn});`);
14035
+ lines.push(` END IF;`);
14036
+ lines.push(`END $$;`);
14037
+ }
14038
+ if (meta$2.uniqueConstraints) for (const constraint of meta$2.uniqueConstraints) {
14039
+ const cols = constraint.columns.map(toSnakeCase).join(", ");
14040
+ const constraintName = `uq_${snakeTable}_${constraint.columns.map(toSnakeCase).join("_")}`;
14041
+ lines.push(`DO $$`);
14042
+ lines.push(`BEGIN`);
14043
+ lines.push(` IF NOT EXISTS (`);
14044
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14045
+ lines.push(` ) THEN`);
14046
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName} UNIQUE (${cols});`);
14047
+ lines.push(` END IF;`);
14048
+ lines.push(`END $$;`);
14049
+ }
14050
+ }
14051
+ lines.push("");
14052
+ lines.push(`-- STEP 4: Add foreign keys (if not exist)`);
14053
+ for (const { name, meta: meta$2 } of sortedTables) {
14054
+ const snakeTable = toSnakeCase(name);
14055
+ const fullTableName = schemaPrefix + snakeTable;
14056
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.references) {
14057
+ const snakeColumn = toSnakeCase(columnName);
14058
+ const refTable = schemaPrefix + toSnakeCase(field.references.table);
14059
+ const refColumn = toSnakeCase(field.references.column);
14060
+ const constraintName = `fk_${snakeTable}_${snakeColumn}`;
14061
+ lines.push(`DO $$`);
14062
+ lines.push(`BEGIN`);
14063
+ lines.push(` IF NOT EXISTS (`);
14064
+ lines.push(` SELECT 1 FROM pg_constraint WHERE conname = '${constraintName}'`);
14065
+ lines.push(` ) THEN`);
14066
+ lines.push(` ALTER TABLE ${fullTableName} ADD CONSTRAINT ${constraintName}`);
14067
+ lines.push(` FOREIGN KEY (${snakeColumn}) REFERENCES ${refTable}(${refColumn}) ON DELETE CASCADE;`);
14068
+ lines.push(` END IF;`);
14069
+ lines.push(`END $$;`);
14070
+ }
14071
+ }
14072
+ lines.push("");
14073
+ lines.push(`-- STEP 5: Create indexes (if not exist)`);
14074
+ for (const { name, meta: meta$2 } of sortedTables) {
14075
+ const snakeTable = toSnakeCase(name);
14076
+ const fullTableName = schemaPrefix + snakeTable;
14077
+ for (const [columnName, field] of Object.entries(meta$2.fields)) if (field.references) {
14078
+ const snakeColumn = toSnakeCase(columnName);
14079
+ lines.push(`CREATE INDEX IF NOT EXISTS idx_${snakeTable}_${snakeColumn} ON ${fullTableName}(${snakeColumn});`);
14080
+ }
14081
+ }
14082
+ lines.push("");
14083
+ lines.push(`-- STEP 6: Create updated_at triggers`);
14084
+ const functionName = `${schemaName ? `"${schemaName}".` : "public."}update_updated_at_column`;
14085
+ lines.push(`CREATE OR REPLACE FUNCTION ${functionName}()`);
14086
+ lines.push(`RETURNS TRIGGER AS $$`);
14087
+ lines.push(`BEGIN`);
14088
+ lines.push(` NEW.updated_at = NOW();`);
14089
+ lines.push(` RETURN NEW;`);
14090
+ lines.push(`END;`);
14091
+ lines.push(`$$ language 'plpgsql';`);
14092
+ for (const { name } of sortedTables) {
14093
+ const snakeTable = toSnakeCase(name);
14094
+ const fullTableName = schemaPrefix + snakeTable;
14095
+ lines.push(`DROP TRIGGER IF EXISTS update_${snakeTable}_updated_at ON ${fullTableName};`);
14096
+ lines.push(`CREATE TRIGGER update_${snakeTable}_updated_at BEFORE UPDATE ON ${fullTableName} FOR EACH ROW EXECUTE FUNCTION ${functionName}();`);
14097
+ }
14098
+ lines.push("");
14099
+ return lines.join("\n");
14100
+ }
14101
+ /**
14102
+ * Execute the schema SQL using a Neon SQL function
14103
+ *
14104
+ * @param sql - Neon sql function (from `neon()` or passed connection)
14105
+ * @param schemaName - Optional PostgreSQL schema name
14106
+ */
14107
+ async function runSchemaSQL(sql$1, schemaName) {
14108
+ const statements = splitSQLStatements(generatePostgresSchemaSQL(schemaName));
14109
+ for (const statement of statements) {
14110
+ const trimmed = statement.trim();
14111
+ if (trimmed && !trimmed.startsWith("--")) await sql$1(trimmed);
14112
+ }
14113
+ }
14114
+ /**
14115
+ * Split SQL into individual statements, keeping DO blocks together
14116
+ */
14117
+ function splitSQLStatements(sql$1) {
14118
+ const statements = [];
14119
+ let current = "";
14120
+ let inDoBlock = false;
14121
+ const lines = sql$1.split("\n");
14122
+ for (const line of lines) {
14123
+ const trimmed = line.trim();
14124
+ if (!trimmed || trimmed.startsWith("--")) continue;
14125
+ if (trimmed === "DO $$") {
14126
+ inDoBlock = true;
14127
+ current = line + "\n";
14128
+ continue;
14129
+ }
14130
+ if (inDoBlock) {
14131
+ current += line + "\n";
14132
+ if (trimmed === "END $$;") {
14133
+ statements.push(current.trim());
14134
+ current = "";
14135
+ inDoBlock = false;
14136
+ }
14137
+ continue;
14138
+ }
14139
+ current += line + "\n";
14140
+ if (trimmed.endsWith(";")) {
14141
+ statements.push(current.trim());
14142
+ current = "";
14143
+ }
14144
+ }
14145
+ if (current.trim()) statements.push(current.trim());
14146
+ return statements;
14147
+ }
14148
+ /**
14149
+ * Create a Neon SQL function from various connection types
14150
+ *
14151
+ * @param rawConnection - neon() function, connection string, or undefined (uses env vars)
14152
+ * @returns SQL function that can be used with runSchemaSQL, or null if unable to create
14153
+ */
14154
+ async function createNeonSqlFunction(rawConnection) {
14155
+ if (typeof rawConnection === "function") return rawConnection;
14156
+ if (typeof rawConnection === "string" && rawConnection) {
14157
+ const { neon: neon$1 } = await import("@neondatabase/serverless");
14158
+ return neon$1(rawConnection);
14159
+ }
14160
+ const connectionString = process.env.NEON_CONNECTION_STRING || process.env.NEON_PG_URL || process.env.DATABASE_URL || process.env.POSTGRES_URL || "";
14161
+ if (!connectionString) return null;
14162
+ const { neon } = await import("@neondatabase/serverless");
14163
+ return neon(connectionString);
14164
+ }
14165
+
13942
14166
  //#endregion
13943
14167
  //#region src/db/validation.ts
13944
14168
  /**
@@ -14397,12 +14621,12 @@ async function createDatabaseFromConnection(rawConnection, options) {
14397
14621
  case "sqlite":
14398
14622
  if ("aggregate" in rawConnection && !("createSession" in rawConnection)) dialect = new kysely.SqliteDialect({ database: rawConnection });
14399
14623
  else if ("fileControl" in rawConnection) {
14400
- const { BunSqliteDialect } = await Promise.resolve().then(() => require("./bun-sqlite-dialect-DQa87s1D.cjs"));
14624
+ const { BunSqliteDialect } = await Promise.resolve().then(() => require("./bun-sqlite-dialect-lNniX_lj.cjs"));
14401
14625
  dialect = new BunSqliteDialect({ database: rawConnection });
14402
14626
  } else if ("createSession" in rawConnection && typeof window === "undefined") try {
14403
14627
  const { DatabaseSync } = await import("node:sqlite");
14404
14628
  if (rawConnection instanceof DatabaseSync) {
14405
- const { NodeSqliteDialect } = await Promise.resolve().then(() => require("./node-sqlite-dialect-DLKHH0RE.cjs"));
14629
+ const { NodeSqliteDialect } = await Promise.resolve().then(() => require("./node-sqlite-dialect-BPGejxOC.cjs"));
14406
14630
  dialect = new NodeSqliteDialect({ database: rawConnection });
14407
14631
  }
14408
14632
  } catch {}
@@ -14420,7 +14644,7 @@ async function createDatabaseFromConnection(rawConnection, options) {
14420
14644
  break;
14421
14645
  case "neon": {
14422
14646
  if (typeof rawConnection === "function") {
14423
- const { createNeonDialect: createNeonDialect$1 } = await Promise.resolve().then(() => require("./neon-dialect-BBGTCnek.cjs"));
14647
+ const { createNeonDialect: createNeonDialect$1 } = await Promise.resolve().then(() => require("./neon-dialect-C9_0GrsB.cjs"));
14424
14648
  dialect = createNeonDialect$1(rawConnection);
14425
14649
  break;
14426
14650
  }
@@ -14428,7 +14652,7 @@ async function createDatabaseFromConnection(rawConnection, options) {
14428
14652
  if (!connectionString) throw new Error("Neon connection string is required. Pass it directly as the database option or set one of: NEON_CONNECTION_STRING, NEON_PG_URL, DATABASE_URL, POSTGRES_URL");
14429
14653
  const { neon } = await import("@neondatabase/serverless");
14430
14654
  const sql$1 = neon(connectionString);
14431
- const { createNeonDialect } = await Promise.resolve().then(() => require("./neon-dialect-BBGTCnek.cjs"));
14655
+ const { createNeonDialect } = await Promise.resolve().then(() => require("./neon-dialect-C9_0GrsB.cjs"));
14432
14656
  dialect = createNeonDialect(sql$1);
14433
14657
  break;
14434
14658
  }
@@ -14500,6 +14724,12 @@ Object.defineProperty(exports, 'createDatabaseFromConnection', {
14500
14724
  return createDatabaseFromConnection;
14501
14725
  }
14502
14726
  });
14727
+ Object.defineProperty(exports, 'createNeonSqlFunction', {
14728
+ enumerable: true,
14729
+ get: function () {
14730
+ return createNeonSqlFunction;
14731
+ }
14732
+ });
14503
14733
  Object.defineProperty(exports, 'datasetRecordsSchema', {
14504
14734
  enumerable: true,
14505
14735
  get: function () {
@@ -14542,6 +14772,12 @@ Object.defineProperty(exports, 'environmentsSchema', {
14542
14772
  return environmentsSchema;
14543
14773
  }
14544
14774
  });
14775
+ Object.defineProperty(exports, 'generatePostgresSchemaSQL', {
14776
+ enumerable: true,
14777
+ get: function () {
14778
+ return generatePostgresSchemaSQL;
14779
+ }
14780
+ });
14545
14781
  Object.defineProperty(exports, 'getAuthClientOptions', {
14546
14782
  enumerable: true,
14547
14783
  get: function () {
@@ -14656,6 +14892,12 @@ Object.defineProperty(exports, 'runAutoMigrations', {
14656
14892
  return runAutoMigrations;
14657
14893
  }
14658
14894
  });
14895
+ Object.defineProperty(exports, 'runSchemaSQL', {
14896
+ enumerable: true,
14897
+ get: function () {
14898
+ return runSchemaSQL;
14899
+ }
14900
+ });
14659
14901
  Object.defineProperty(exports, 'schemas', {
14660
14902
  enumerable: true,
14661
14903
  get: function () {