@llmops/core 0.1.0-beta.7 → 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
- import { Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect } from "kysely";
1
+ import { Kysely, MssqlDialect, MysqlDialect, PostgresDialect, SqliteDialect, sql } from "kysely";
2
+ import pino from "pino";
2
3
 
3
4
  //#region rolldown:runtime
4
5
  var __defProp = Object.defineProperty;
@@ -9428,7 +9429,7 @@ function initializeContext(params) {
9428
9429
  external: params?.external ?? void 0
9429
9430
  };
9430
9431
  }
9431
- function process(schema, ctx, _params = {
9432
+ function process$1(schema, ctx, _params = {
9432
9433
  path: [],
9433
9434
  schemaPath: []
9434
9435
  }) {
@@ -9458,7 +9459,7 @@ function process(schema, ctx, _params = {
9458
9459
  const parent = schema._zod.parent;
9459
9460
  if (parent) {
9460
9461
  result.ref = parent;
9461
- process(parent, ctx, params);
9462
+ process$1(parent, ctx, params);
9462
9463
  ctx.seen.get(parent).isParent = true;
9463
9464
  } else if (schema._zod.processJSONSchema) schema._zod.processJSONSchema(ctx, result.schema, params);
9464
9465
  else {
@@ -9648,7 +9649,7 @@ const createToJSONSchemaMethod = (schema, processors = {}) => (params) => {
9648
9649
  ...params,
9649
9650
  processors
9650
9651
  });
9651
- process(schema, ctx);
9652
+ process$1(schema, ctx);
9652
9653
  extractDefs(ctx, schema);
9653
9654
  return finalize(ctx, schema);
9654
9655
  };
@@ -9660,7 +9661,7 @@ const createStandardJSONSchemaMethod = (schema, io) => (params) => {
9660
9661
  io,
9661
9662
  processors: {}
9662
9663
  });
9663
- process(schema, ctx);
9664
+ process$1(schema, ctx);
9664
9665
  extractDefs(ctx, schema);
9665
9666
  return finalize(ctx, schema);
9666
9667
  };
@@ -9833,7 +9834,7 @@ const arrayProcessor = (schema, ctx, _json, params) => {
9833
9834
  if (typeof minimum === "number") json$1.minItems = minimum;
9834
9835
  if (typeof maximum === "number") json$1.maxItems = maximum;
9835
9836
  json$1.type = "array";
9836
- json$1.items = process(def.element, ctx, {
9837
+ json$1.items = process$1(def.element, ctx, {
9837
9838
  ...params,
9838
9839
  path: [...params.path, "items"]
9839
9840
  });
@@ -9844,7 +9845,7 @@ const objectProcessor = (schema, ctx, _json, params) => {
9844
9845
  json$1.type = "object";
9845
9846
  json$1.properties = {};
9846
9847
  const shape = def.shape;
9847
- for (const key in shape) json$1.properties[key] = process(shape[key], ctx, {
9848
+ for (const key in shape) json$1.properties[key] = process$1(shape[key], ctx, {
9848
9849
  ...params,
9849
9850
  path: [
9850
9851
  ...params.path,
@@ -9862,7 +9863,7 @@ const objectProcessor = (schema, ctx, _json, params) => {
9862
9863
  if (def.catchall?._zod.def.type === "never") json$1.additionalProperties = false;
9863
9864
  else if (!def.catchall) {
9864
9865
  if (ctx.io === "output") json$1.additionalProperties = false;
9865
- } else if (def.catchall) json$1.additionalProperties = process(def.catchall, ctx, {
9866
+ } else if (def.catchall) json$1.additionalProperties = process$1(def.catchall, ctx, {
9866
9867
  ...params,
9867
9868
  path: [...params.path, "additionalProperties"]
9868
9869
  });
@@ -9870,7 +9871,7 @@ const objectProcessor = (schema, ctx, _json, params) => {
9870
9871
  const unionProcessor = (schema, ctx, json$1, params) => {
9871
9872
  const def = schema._zod.def;
9872
9873
  const isExclusive = def.inclusive === false;
9873
- const options = def.options.map((x, i) => process(x, ctx, {
9874
+ const options = def.options.map((x, i) => process$1(x, ctx, {
9874
9875
  ...params,
9875
9876
  path: [
9876
9877
  ...params.path,
@@ -9883,7 +9884,7 @@ const unionProcessor = (schema, ctx, json$1, params) => {
9883
9884
  };
9884
9885
  const intersectionProcessor = (schema, ctx, json$1, params) => {
9885
9886
  const def = schema._zod.def;
9886
- const a = process(def.left, ctx, {
9887
+ const a = process$1(def.left, ctx, {
9887
9888
  ...params,
9888
9889
  path: [
9889
9890
  ...params.path,
@@ -9891,7 +9892,7 @@ const intersectionProcessor = (schema, ctx, json$1, params) => {
9891
9892
  0
9892
9893
  ]
9893
9894
  });
9894
- const b = process(def.right, ctx, {
9895
+ const b = process$1(def.right, ctx, {
9895
9896
  ...params,
9896
9897
  path: [
9897
9898
  ...params.path,
@@ -9908,7 +9909,7 @@ const tupleProcessor = (schema, ctx, _json, params) => {
9908
9909
  json$1.type = "array";
9909
9910
  const prefixPath = ctx.target === "draft-2020-12" ? "prefixItems" : "items";
9910
9911
  const restPath = ctx.target === "draft-2020-12" ? "items" : ctx.target === "openapi-3.0" ? "items" : "additionalItems";
9911
- const prefixItems = def.items.map((x, i) => process(x, ctx, {
9912
+ const prefixItems = def.items.map((x, i) => process$1(x, ctx, {
9912
9913
  ...params,
9913
9914
  path: [
9914
9915
  ...params.path,
@@ -9916,7 +9917,7 @@ const tupleProcessor = (schema, ctx, _json, params) => {
9916
9917
  i
9917
9918
  ]
9918
9919
  }));
9919
- const rest = def.rest ? process(def.rest, ctx, {
9920
+ const rest = def.rest ? process$1(def.rest, ctx, {
9920
9921
  ...params,
9921
9922
  path: [
9922
9923
  ...params.path,
@@ -9944,18 +9945,18 @@ const recordProcessor = (schema, ctx, _json, params) => {
9944
9945
  const json$1 = _json;
9945
9946
  const def = schema._zod.def;
9946
9947
  json$1.type = "object";
9947
- if (ctx.target === "draft-07" || ctx.target === "draft-2020-12") json$1.propertyNames = process(def.keyType, ctx, {
9948
+ if (ctx.target === "draft-07" || ctx.target === "draft-2020-12") json$1.propertyNames = process$1(def.keyType, ctx, {
9948
9949
  ...params,
9949
9950
  path: [...params.path, "propertyNames"]
9950
9951
  });
9951
- json$1.additionalProperties = process(def.valueType, ctx, {
9952
+ json$1.additionalProperties = process$1(def.valueType, ctx, {
9952
9953
  ...params,
9953
9954
  path: [...params.path, "additionalProperties"]
9954
9955
  });
9955
9956
  };
9956
9957
  const nullableProcessor = (schema, ctx, json$1, params) => {
9957
9958
  const def = schema._zod.def;
9958
- const inner = process(def.innerType, ctx, params);
9959
+ const inner = process$1(def.innerType, ctx, params);
9959
9960
  const seen = ctx.seen.get(schema);
9960
9961
  if (ctx.target === "openapi-3.0") {
9961
9962
  seen.ref = def.innerType;
@@ -9964,27 +9965,27 @@ const nullableProcessor = (schema, ctx, json$1, params) => {
9964
9965
  };
9965
9966
  const nonoptionalProcessor = (schema, ctx, _json, params) => {
9966
9967
  const def = schema._zod.def;
9967
- process(def.innerType, ctx, params);
9968
+ process$1(def.innerType, ctx, params);
9968
9969
  const seen = ctx.seen.get(schema);
9969
9970
  seen.ref = def.innerType;
9970
9971
  };
9971
9972
  const defaultProcessor = (schema, ctx, json$1, params) => {
9972
9973
  const def = schema._zod.def;
9973
- process(def.innerType, ctx, params);
9974
+ process$1(def.innerType, ctx, params);
9974
9975
  const seen = ctx.seen.get(schema);
9975
9976
  seen.ref = def.innerType;
9976
9977
  json$1.default = JSON.parse(JSON.stringify(def.defaultValue));
9977
9978
  };
9978
9979
  const prefaultProcessor = (schema, ctx, json$1, params) => {
9979
9980
  const def = schema._zod.def;
9980
- process(def.innerType, ctx, params);
9981
+ process$1(def.innerType, ctx, params);
9981
9982
  const seen = ctx.seen.get(schema);
9982
9983
  seen.ref = def.innerType;
9983
9984
  if (ctx.io === "input") json$1._prefault = JSON.parse(JSON.stringify(def.defaultValue));
9984
9985
  };
9985
9986
  const catchProcessor = (schema, ctx, json$1, params) => {
9986
9987
  const def = schema._zod.def;
9987
- process(def.innerType, ctx, params);
9988
+ process$1(def.innerType, ctx, params);
9988
9989
  const seen = ctx.seen.get(schema);
9989
9990
  seen.ref = def.innerType;
9990
9991
  let catchValue;
@@ -9998,32 +9999,32 @@ const catchProcessor = (schema, ctx, json$1, params) => {
9998
9999
  const pipeProcessor = (schema, ctx, _json, params) => {
9999
10000
  const def = schema._zod.def;
10000
10001
  const innerType = ctx.io === "input" ? def.in._zod.def.type === "transform" ? def.out : def.in : def.out;
10001
- process(innerType, ctx, params);
10002
+ process$1(innerType, ctx, params);
10002
10003
  const seen = ctx.seen.get(schema);
10003
10004
  seen.ref = innerType;
10004
10005
  };
10005
10006
  const readonlyProcessor = (schema, ctx, json$1, params) => {
10006
10007
  const def = schema._zod.def;
10007
- process(def.innerType, ctx, params);
10008
+ process$1(def.innerType, ctx, params);
10008
10009
  const seen = ctx.seen.get(schema);
10009
10010
  seen.ref = def.innerType;
10010
10011
  json$1.readOnly = true;
10011
10012
  };
10012
10013
  const promiseProcessor = (schema, ctx, _json, params) => {
10013
10014
  const def = schema._zod.def;
10014
- process(def.innerType, ctx, params);
10015
+ process$1(def.innerType, ctx, params);
10015
10016
  const seen = ctx.seen.get(schema);
10016
10017
  seen.ref = def.innerType;
10017
10018
  };
10018
10019
  const optionalProcessor = (schema, ctx, _json, params) => {
10019
10020
  const def = schema._zod.def;
10020
- process(def.innerType, ctx, params);
10021
+ process$1(def.innerType, ctx, params);
10021
10022
  const seen = ctx.seen.get(schema);
10022
10023
  seen.ref = def.innerType;
10023
10024
  };
10024
10025
  const lazyProcessor = (schema, ctx, _json, params) => {
10025
10026
  const innerType = schema._zod.innerType;
10026
- process(innerType, ctx, params);
10027
+ process$1(innerType, ctx, params);
10027
10028
  const seen = ctx.seen.get(schema);
10028
10029
  seen.ref = innerType;
10029
10030
  };
@@ -10078,7 +10079,7 @@ function toJSONSchema(input, params) {
10078
10079
  const defs = {};
10079
10080
  for (const entry of registry$1._idmap.entries()) {
10080
10081
  const [_, schema] = entry;
10081
- process(schema, ctx$1);
10082
+ process$1(schema, ctx$1);
10082
10083
  }
10083
10084
  const schemas$1 = {};
10084
10085
  ctx$1.external = {
@@ -10098,7 +10099,7 @@ function toJSONSchema(input, params) {
10098
10099
  ...params,
10099
10100
  processors: allProcessors
10100
10101
  });
10101
- process(input, ctx);
10102
+ process$1(input, ctx);
10102
10103
  extractDefs(ctx, input);
10103
10104
  return finalize(ctx, input);
10104
10105
  }
@@ -10175,7 +10176,7 @@ var JSONSchemaGenerator = class {
10175
10176
  path: [],
10176
10177
  schemaPath: []
10177
10178
  }) {
10178
- return process(schema, this.ctx, _params);
10179
+ return process$1(schema, this.ctx, _params);
10179
10180
  }
10180
10181
  /**
10181
10182
  * Emit the final JSON Schema after processing.
@@ -10458,7 +10459,7 @@ var core_exports = /* @__PURE__ */ __export({
10458
10459
  parse: () => parse$1,
10459
10460
  parseAsync: () => parseAsync$1,
10460
10461
  prettifyError: () => prettifyError,
10461
- process: () => process,
10462
+ process: () => process$1,
10462
10463
  regexes: () => regexes_exports,
10463
10464
  registry: () => registry,
10464
10465
  safeDecode: () => safeDecode$1,
@@ -12680,6 +12681,275 @@ function parsePartialTableData(table, data) {
12680
12681
  return schemas[table].partial().parse(data);
12681
12682
  }
12682
12683
 
12684
+ //#endregion
12685
+ //#region src/utils/logger.ts
12686
+ const logger = pino();
12687
+
12688
+ //#endregion
12689
+ //#region src/db/migrations.ts
12690
+ const typeMap = {
12691
+ postgres: {
12692
+ uuid: [
12693
+ "character varying",
12694
+ "varchar",
12695
+ "text",
12696
+ "uuid"
12697
+ ],
12698
+ text: [
12699
+ "character varying",
12700
+ "varchar",
12701
+ "text"
12702
+ ],
12703
+ timestamp: [
12704
+ "timestamptz",
12705
+ "timestamp",
12706
+ "date"
12707
+ ],
12708
+ jsonb: ["json", "jsonb"],
12709
+ integer: [
12710
+ "integer",
12711
+ "int4",
12712
+ "int",
12713
+ "smallint",
12714
+ "bigint",
12715
+ "int2",
12716
+ "int8"
12717
+ ],
12718
+ boolean: ["boolean", "bool"]
12719
+ },
12720
+ mysql: {
12721
+ text: ["varchar", "text"],
12722
+ timestamp: [
12723
+ "timestamp",
12724
+ "datetime",
12725
+ "date"
12726
+ ],
12727
+ jsonb: ["json"]
12728
+ },
12729
+ sqlite: {
12730
+ text: ["TEXT"],
12731
+ date: ["DATE", "INTEGER"],
12732
+ integer: ["INTEGER"],
12733
+ boolean: [
12734
+ "INTEGER",
12735
+ "BOOLEAN",
12736
+ "TEXT"
12737
+ ],
12738
+ jsonb: ["TEXT"]
12739
+ },
12740
+ mssql: {
12741
+ varchar: [
12742
+ "varchar",
12743
+ "nvarchar",
12744
+ "uniqueidentifier"
12745
+ ],
12746
+ datetime2: [
12747
+ "datetime2",
12748
+ "date",
12749
+ "datetime"
12750
+ ],
12751
+ jsonb: ["varchar", "nvarchar"]
12752
+ }
12753
+ };
12754
+ function matchType(columnDataType, fieldType, dbType) {
12755
+ const normalize = (type) => type.toLowerCase().split("(")[0].trim();
12756
+ const types = typeMap[dbType];
12757
+ for (const [expectedType, variants] of Object.entries(types)) if (fieldType.toLowerCase().includes(expectedType.toLowerCase())) return variants.some((variant) => variant.toLowerCase() === normalize(columnDataType));
12758
+ return false;
12759
+ }
12760
+ /**
12761
+ * Get the current PostgreSQL schema (search_path) for the database connection
12762
+ */
12763
+ async function getPostgresSchema(db) {
12764
+ try {
12765
+ const result = await sql`SHOW search_path`.execute(db);
12766
+ if (result.rows[0]?.search_path) return result.rows[0].search_path.split(",").map((s) => s.trim()).map((s) => s.replace(/^["']|["']$/g, "")).filter((s) => !s.startsWith("$"))[0] || "public";
12767
+ } catch {}
12768
+ return "public";
12769
+ }
12770
+ async function getMigrations(db, dbType) {
12771
+ let currentSchema = "public";
12772
+ if (dbType === "postgres") {
12773
+ currentSchema = await getPostgresSchema(db);
12774
+ logger.debug(`PostgreSQL migration: Using schema '${currentSchema}'`);
12775
+ }
12776
+ const allTableMetadata = await db.introspection.getTables();
12777
+ let tableMetadata = allTableMetadata;
12778
+ if (dbType === "postgres") try {
12779
+ const tablesInSchema = await sql`
12780
+ SELECT table_name
12781
+ FROM information_schema.tables
12782
+ WHERE table_schema = ${currentSchema}
12783
+ AND table_type = 'BASE TABLE'
12784
+ `.execute(db);
12785
+ const tableNamesInSchema = new Set(tablesInSchema.rows.map((row) => row.table_name));
12786
+ tableMetadata = allTableMetadata.filter((table) => table.schema === currentSchema && tableNamesInSchema.has(table.name));
12787
+ logger.debug(`Found ${tableMetadata.length} table(s) in schema '${currentSchema}'`);
12788
+ } catch (error$45) {
12789
+ logger.warn("Could not filter tables by schema. Using all discovered tables.");
12790
+ }
12791
+ const schema = SCHEMA_METADATA.tables;
12792
+ const toBeCreated = [];
12793
+ const toBeAdded = [];
12794
+ for (const [tableName, tableConfig] of Object.entries(schema)) {
12795
+ const existingTable = tableMetadata.find((t) => t.name === tableName);
12796
+ if (!existingTable) {
12797
+ toBeCreated.push({
12798
+ table: tableName,
12799
+ fields: tableConfig.fields,
12800
+ order: tableConfig.order
12801
+ });
12802
+ continue;
12803
+ }
12804
+ const missingFields = {};
12805
+ for (const [fieldName, fieldConfig] of Object.entries(tableConfig.fields)) {
12806
+ const existingColumn = existingTable.columns.find((c) => c.name === fieldName);
12807
+ if (!existingColumn) {
12808
+ missingFields[fieldName] = fieldConfig;
12809
+ continue;
12810
+ }
12811
+ if (!matchType(existingColumn.dataType, fieldConfig.type, dbType)) logger.warn(`Field ${fieldName} in table ${tableName} has a different type. Expected ${fieldConfig.type} but got ${existingColumn.dataType}.`);
12812
+ }
12813
+ if (Object.keys(missingFields).length > 0) toBeAdded.push({
12814
+ table: tableName,
12815
+ fields: missingFields,
12816
+ order: tableConfig.order
12817
+ });
12818
+ }
12819
+ toBeCreated.sort((a, b) => a.order - b.order);
12820
+ toBeAdded.sort((a, b) => a.order - b.order);
12821
+ const migrations = [];
12822
+ function getColumnType(fieldConfig, fieldName) {
12823
+ const { type } = fieldConfig;
12824
+ return {
12825
+ uuid: {
12826
+ postgres: "uuid",
12827
+ mysql: "varchar(36)",
12828
+ sqlite: "text",
12829
+ mssql: "varchar(36)"
12830
+ },
12831
+ text: {
12832
+ postgres: "text",
12833
+ mysql: fieldConfig.unique ? "varchar(255)" : "text",
12834
+ sqlite: "text",
12835
+ mssql: fieldConfig.unique ? "varchar(255)" : "varchar(8000)"
12836
+ },
12837
+ timestamp: {
12838
+ postgres: "timestamptz",
12839
+ mysql: "timestamp(3)",
12840
+ sqlite: "date",
12841
+ mssql: sql`datetime2(3)`
12842
+ },
12843
+ jsonb: {
12844
+ postgres: "jsonb",
12845
+ mysql: "json",
12846
+ sqlite: "text",
12847
+ mssql: "varchar(8000)"
12848
+ },
12849
+ boolean: {
12850
+ postgres: "boolean",
12851
+ mysql: "boolean",
12852
+ sqlite: "integer",
12853
+ mssql: sql`bit`
12854
+ },
12855
+ integer: {
12856
+ postgres: "integer",
12857
+ mysql: "integer",
12858
+ sqlite: "integer",
12859
+ mssql: "integer"
12860
+ }
12861
+ }[type]?.[dbType] || "text";
12862
+ }
12863
+ for (const table of toBeCreated) {
12864
+ let builder = db.schema.createTable(table.table);
12865
+ for (const [fieldName, fieldConfig] of Object.entries(table.fields)) {
12866
+ const type = getColumnType(fieldConfig, fieldName);
12867
+ builder = builder.addColumn(fieldName, type, (col) => {
12868
+ let c = col;
12869
+ if (fieldName === "id") if (dbType === "postgres") c = c.primaryKey().defaultTo(sql`gen_random_uuid()`).notNull();
12870
+ else c = c.primaryKey().notNull();
12871
+ else if (!fieldConfig.nullable) c = c.notNull();
12872
+ if (fieldConfig.references && fieldName !== "id") {
12873
+ const refTable = fieldConfig.references.table;
12874
+ const refColumn = fieldConfig.references.column;
12875
+ c = c.references(`${refTable}.${refColumn}`).onDelete("cascade");
12876
+ }
12877
+ if (fieldConfig.unique && fieldName !== "id") c = c.unique();
12878
+ if (fieldConfig.default === "now()" && fieldName !== "id" && dbType !== "sqlite") if (dbType === "mysql") c = c.defaultTo(sql`CURRENT_TIMESTAMP(3)`);
12879
+ else c = c.defaultTo(sql`CURRENT_TIMESTAMP`);
12880
+ return c;
12881
+ });
12882
+ }
12883
+ migrations.push(builder);
12884
+ }
12885
+ for (const table of toBeAdded) for (const [fieldName, fieldConfig] of Object.entries(table.fields)) {
12886
+ const type = getColumnType(fieldConfig, fieldName);
12887
+ const builder = db.schema.alterTable(table.table).addColumn(fieldName, type, (col) => {
12888
+ let c = col;
12889
+ if (!fieldConfig.nullable) c = c.notNull();
12890
+ if (fieldConfig.references) {
12891
+ const refTable = fieldConfig.references.table;
12892
+ const refColumn = fieldConfig.references.column;
12893
+ c = c.references(`${refTable}.${refColumn}`).onDelete("cascade");
12894
+ }
12895
+ if (fieldConfig.unique) c = c.unique();
12896
+ if (fieldConfig.default === "now()" && dbType !== "sqlite") if (dbType === "mysql") c = c.defaultTo(sql`CURRENT_TIMESTAMP(3)`);
12897
+ else c = c.defaultTo(sql`CURRENT_TIMESTAMP`);
12898
+ return c;
12899
+ });
12900
+ migrations.push(builder);
12901
+ }
12902
+ async function runMigrations() {
12903
+ for (const migration of migrations) await migration.execute();
12904
+ }
12905
+ async function compileMigrations() {
12906
+ return migrations.map((m) => m.compile().sql).join(";\n\n") + ";";
12907
+ }
12908
+ return {
12909
+ toBeCreated,
12910
+ toBeAdded,
12911
+ runMigrations,
12912
+ compileMigrations,
12913
+ migrations,
12914
+ needsMigration: toBeCreated.length > 0 || toBeAdded.length > 0
12915
+ };
12916
+ }
12917
+ /**
12918
+ * Run migrations if needed based on autoMigrate config
12919
+ * @param db - Kysely database instance
12920
+ * @param dbType - Database type
12921
+ * @param autoMigrate - Auto-migrate configuration
12922
+ * @returns true if migrations were run, false otherwise
12923
+ */
12924
+ async function runAutoMigrations(db, dbType, autoMigrate) {
12925
+ if (!(autoMigrate === true || autoMigrate === "development" && process.env.NODE_ENV === "development")) return {
12926
+ ran: false,
12927
+ tables: [],
12928
+ fields: []
12929
+ };
12930
+ const { toBeCreated, toBeAdded, runMigrations, needsMigration } = await getMigrations(db, dbType);
12931
+ if (!needsMigration) {
12932
+ logger.debug("Auto-migration: No migrations needed");
12933
+ return {
12934
+ ran: false,
12935
+ tables: [],
12936
+ fields: []
12937
+ };
12938
+ }
12939
+ const tables = toBeCreated.map((t) => t.table);
12940
+ const fields = toBeAdded.flatMap((t) => Object.keys(t.fields).map((f) => `${t.table}.${f}`));
12941
+ logger.info(`Auto-migration: Running migrations for ${tables.length} table(s) and ${fields.length} field(s)`);
12942
+ if (tables.length > 0) logger.debug(`Auto-migration: Creating tables: ${tables.join(", ")}`);
12943
+ if (fields.length > 0) logger.debug(`Auto-migration: Adding fields: ${fields.join(", ")}`);
12944
+ await runMigrations();
12945
+ logger.info("Auto-migration: Completed successfully");
12946
+ return {
12947
+ ran: true,
12948
+ tables,
12949
+ fields
12950
+ };
12951
+ }
12952
+
12683
12953
  //#endregion
12684
12954
  //#region src/db/index.ts
12685
12955
  /**
@@ -12742,4 +13012,4 @@ async function createDatabaseFromConnection(rawConnection) {
12742
13012
  }
12743
13013
 
12744
13014
  //#endregion
12745
- export { number$1 as C, union as D, string$1 as E, unknown as O, literal as S, record as T, zod_default as _, parseTableData as a, array as b, SCHEMA_METADATA as c, environmentSecretsSchema as d, environmentsSchema as f, variantsSchema as g, variantVersionsSchema as h, parsePartialTableData as i, configVariantsSchema as l, targetingRulesSchema as m, createDatabaseFromConnection as n, validatePartialTableData as o, schemas as p, detectDatabaseType as r, validateTableData as s, createDatabase as t, configsSchema as u, _enum as v, object as w, boolean$1 as x, any as y };
13015
+ export { string$1 as A, any as C, number$1 as D, literal as E, unknown as M, object as O, _enum as S, boolean$1 as T, schemas as _, matchType as a, variantsSchema as b, parsePartialTableData as c, validateTableData as d, SCHEMA_METADATA as f, environmentsSchema as g, environmentSecretsSchema as h, getMigrations as i, union as j, record as k, parseTableData as l, configsSchema as m, createDatabaseFromConnection as n, runAutoMigrations as o, configVariantsSchema as p, detectDatabaseType as r, logger as s, createDatabase as t, validatePartialTableData as u, targetingRulesSchema as v, array as w, zod_default as x, variantVersionsSchema as y };