pecunia-root 0.2.7 → 0.2.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
1
  import { DBFieldAttribute, DBFieldType, KyselyDatabaseDialectType, PecuniaOptions } from "pecunia-core";
2
2
 
3
3
  //#region src/db/migrations/index.d.ts
4
- declare function matchType(columnDataType: string, fieldType: DBFieldType, dbType: KyselyDatabaseDialectType): boolean;
4
+ declare function matchType(columnDataType: string, fieldType: DBFieldType, dbType: KyselyDatabaseDialectType): any;
5
5
  declare function getMigrations(config: PecuniaOptions): Promise<{
6
6
  toBeCreated: {
7
7
  table: string;
@@ -86,17 +86,14 @@ function matchType(columnDataType, fieldType, dbType) {
86
86
  function normalize(type) {
87
87
  return type.toLowerCase().split("(")[0].trim();
88
88
  }
89
- if (fieldType === "string[]" || fieldType === "number[]") {
90
- const normalized = columnDataType.toLowerCase();
91
- if (normalized.includes("[]")) return true;
92
- return normalized.includes("json");
93
- }
94
- let effectiveType;
95
- if (fieldType === "uuid") effectiveType = "string";
96
- else if (Array.isArray(fieldType)) effectiveType = "string";
97
- else effectiveType = fieldType;
98
- return (map[dbType][effectiveType]?.map((t) => t.toLowerCase()))?.includes(normalize(columnDataType)) ?? false;
89
+ if (fieldType === "string[]" || fieldType === "number[]") return columnDataType.toLowerCase().includes("json");
90
+ const types = map[dbType];
91
+ return (Array.isArray(fieldType) ? types["string"].map((t) => t.toLowerCase()) : types[fieldType].map((t) => t.toLowerCase())).includes(normalize(columnDataType));
99
92
  }
93
+ /**
94
+ * Get the current PostgreSQL schema (search_path) for the database connection
95
+ * Returns the first schema in the search_path, defaulting to 'public' if not found
96
+ */
100
97
  async function getPostgresSchema(db) {
101
98
  try {
102
99
  const result = await sql`SHOW search_path`.execute(db);
@@ -105,7 +102,7 @@ async function getPostgresSchema(db) {
105
102
  return "public";
106
103
  }
107
104
  async function getMigrations(config) {
108
- const billingEngineSchema = getSchema(config);
105
+ const pecuniaSchema = getSchema(config);
109
106
  let { kysely: db, databaseType: dbType } = await createKyselyAdapter(config);
110
107
  if (!dbType) {
111
108
  console.warn("Could not determine database type, defaulting to sqlite. Please provide a type in the database options to avoid this.");
@@ -119,42 +116,52 @@ async function getMigrations(config) {
119
116
  if (dbType === "postgres") {
120
117
  currentSchema = await getPostgresSchema(db);
121
118
  console.debug(`PostgreSQL migration: Using schema '${currentSchema}' (from search_path)`);
119
+ try {
120
+ if (!(await sql`
121
+ SELECT schema_name
122
+ FROM information_schema.schemata
123
+ WHERE schema_name = ${currentSchema}
124
+ `.execute(db)).rows[0]) console.warn(`Schema '${currentSchema}' does not exist. Tables will be inspected from available schemas. Consider creating the schema first or checking your database configuration.`);
125
+ } catch (error) {
126
+ console.debug(`Could not verify schema existence: ${error instanceof Error ? error.message : String(error)}`);
127
+ }
122
128
  }
123
129
  const allTableMetadata = await db.introspection.getTables();
124
130
  let tableMetadata = allTableMetadata;
125
131
  if (dbType === "postgres") try {
126
132
  const tablesInSchema = await sql`
127
- SELECT table_name
128
- FROM information_schema.tables
129
- WHERE table_schema = ${currentSchema}
130
- AND table_type = 'BASE TABLE'
131
- `.execute(db);
133
+ SELECT table_name
134
+ FROM information_schema.tables
135
+ WHERE table_schema = ${currentSchema}
136
+ AND table_type = 'BASE TABLE'
137
+ `.execute(db);
132
138
  const tableNamesInSchema = new Set(tablesInSchema.rows.map((row) => row.table_name));
133
139
  tableMetadata = allTableMetadata.filter((table) => table.schema === currentSchema && tableNamesInSchema.has(table.name));
140
+ console.debug(`Found ${tableMetadata.length} table(s) in schema '${currentSchema}': ${tableMetadata.map((t) => t.name).join(", ") || "(none)"}`);
134
141
  } catch (error) {
135
142
  console.warn(`Could not filter tables by schema. Using all discovered tables. Error: ${error instanceof Error ? error.message : String(error)}`);
136
143
  }
137
144
  const toBeCreated = [];
138
145
  const toBeAdded = [];
139
- for (const [key, value] of Object.entries(billingEngineSchema)) {
146
+ for (const [key, value] of Object.entries(pecuniaSchema)) {
140
147
  const table = tableMetadata.find((t) => t.name === key);
141
148
  if (!table) {
142
- const existing = toBeCreated.findIndex((t) => t.table === key);
149
+ const tIndex = toBeCreated.findIndex((t) => t.table === key);
143
150
  const tableData = {
144
151
  table: key,
145
152
  fields: value.fields,
146
153
  order: value.order || Infinity
147
154
  };
148
155
  const insertIndex = toBeCreated.findIndex((t) => (t.order || Infinity) > tableData.order);
149
- if (insertIndex === -1) if (existing === -1) toBeCreated.push(tableData);
150
- else toBeCreated[existing].fields = {
151
- ...toBeCreated[existing].fields,
156
+ if (insertIndex === -1) if (tIndex === -1) toBeCreated.push(tableData);
157
+ else toBeCreated[tIndex].fields = {
158
+ ...toBeCreated[tIndex].fields,
152
159
  ...value.fields
153
160
  };
154
161
  else toBeCreated.splice(insertIndex, 0, tableData);
155
162
  continue;
156
163
  }
157
- const toBeAddedFields = {};
164
+ let toBeAddedFields = {};
158
165
  for (const [fieldName, field] of Object.entries(value.fields)) {
159
166
  const column = table.columns.find((c) => c.name === fieldName);
160
167
  if (!column) {
@@ -162,7 +169,7 @@ async function getMigrations(config) {
162
169
  continue;
163
170
  }
164
171
  if (matchType(column.dataType, field.type, dbType)) continue;
165
- console.warn(`Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`);
172
+ else console.warn(`Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`);
166
173
  }
167
174
  if (Object.keys(toBeAddedFields).length > 0) toBeAdded.push({
168
175
  table: key,
@@ -205,12 +212,6 @@ async function getMigrations(config) {
205
212
  mysql: "json",
206
213
  mssql: "varchar(8000)"
207
214
  },
208
- uuid: {
209
- postgres: "uuid",
210
- mysql: "varchar(36)",
211
- mssql: "varchar(36)",
212
- sqlite: "text"
213
- },
214
215
  id: {
215
216
  postgres: "uuid",
216
217
  mysql: "varchar(36)",
@@ -225,13 +226,13 @@ async function getMigrations(config) {
225
226
  },
226
227
  "string[]": {
227
228
  sqlite: "text",
228
- postgres: sql`text[]`,
229
+ postgres: "jsonb",
229
230
  mysql: "json",
230
231
  mssql: "varchar(8000)"
231
232
  },
232
233
  "number[]": {
233
234
  sqlite: "text",
234
- postgres: sql`integer[]`,
235
+ postgres: "jsonb",
235
236
  mysql: "json",
236
237
  mssql: "varchar(8000)"
237
238
  }
@@ -241,7 +242,7 @@ async function getMigrations(config) {
241
242
  return typeMap.foreignKeyId[provider];
242
243
  }
243
244
  if (Array.isArray(type)) return "text";
244
- if (!(type in typeMap)) throw new Error(`Unsupported field type '${String(type)}' for field '${fieldName}'.`);
245
+ if (!(type in typeMap)) throw new Error(`Unsupported field type '${String(type)}' for field '${fieldName}'. Allowed types are: string, number, boolean, date, string[], number[]. If you need to store structured data, store it as a JSON string (type: "string") or split it into primitive fields. See https://better-auth.com/docs/advanced/schema#additional-fields`);
245
246
  return typeMap[type][provider];
246
247
  }
247
248
  const getModelName = initGetModelName({
@@ -262,19 +263,24 @@ async function getMigrations(config) {
262
263
  return `${model}.${field}`;
263
264
  }
264
265
  }
265
- const applyColumnOptions = (field, col) => {
266
- col = field.required === true ? col.notNull() : col;
267
- if (field.references) col = col.references(getReferencePath(field.references.model, field.references.field)).onDelete(field.references.onDelete || "no action");
268
- if (field.unique) col = col.unique();
269
- if (field.type === "date" && typeof field.defaultValue === "function" && (dbType === "postgres" || dbType === "mysql" || dbType === "mssql")) col = dbType === "mysql" ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`) : col.defaultTo(sql`CURRENT_TIMESTAMP`);
270
- return col;
271
- };
272
266
  if (toBeAdded.length) for (const table of toBeAdded) for (const [fieldName, field] of Object.entries(table.fields)) {
273
267
  const type = getType(field, fieldName);
274
- if (field.index) migrations.push(db.schema.alterTable(table.table).addIndex(`${table.table}_${fieldName}_idx`));
275
- migrations.push(db.schema.alterTable(table.table).addColumn(fieldName, type, (col) => applyColumnOptions(field, col)));
268
+ let builder = db.schema.alterTable(table.table);
269
+ if (field.index) {
270
+ const index = db.schema.alterTable(table.table).addIndex(`${table.table}_${fieldName}_idx`);
271
+ migrations.push(index);
272
+ }
273
+ const built = builder.addColumn(fieldName, type, (col) => {
274
+ col = field.required !== false ? col.notNull() : col;
275
+ if (field.references) col = col.references(getReferencePath(field.references.model, field.references.field)).onDelete(field.references.onDelete || "cascade");
276
+ if (field.unique) col = col.unique();
277
+ if (field.type === "date" && typeof field.defaultValue === "function" && (dbType === "postgres" || dbType === "mysql" || dbType === "mssql")) if (dbType === "mysql") col = col.defaultTo(sql`CURRENT_TIMESTAMP(3)`);
278
+ else col = col.defaultTo(sql`CURRENT_TIMESTAMP`);
279
+ return col;
280
+ });
281
+ migrations.push(built);
276
282
  }
277
- const toBeIndexed = [];
283
+ let toBeIndexed = [];
278
284
  if (toBeCreated.length) for (const table of toBeCreated) {
279
285
  const idType = getType({ type: "string" }, "id");
280
286
  let dbT = db.schema.createTable(table.table).addColumn("id", idType, (col) => {
@@ -283,15 +289,22 @@ async function getMigrations(config) {
283
289
  });
284
290
  for (const [fieldName, field] of Object.entries(table.fields)) {
285
291
  const type = getType(field, fieldName);
286
- dbT = dbT.addColumn(fieldName, type, (col) => applyColumnOptions(field, col));
292
+ dbT = dbT.addColumn(fieldName, type, (col) => {
293
+ col = field.required !== false ? col.notNull() : col;
294
+ if (field.references) col = col.references(getReferencePath(field.references.model, field.references.field)).onDelete(field.references.onDelete || "cascade");
295
+ if (field.unique) col = col.unique();
296
+ if (field.type === "date" && typeof field.defaultValue === "function" && (dbType === "postgres" || dbType === "mysql" || dbType === "mssql")) if (dbType === "mysql") col = col.defaultTo(sql`CURRENT_TIMESTAMP(3)`);
297
+ else col = col.defaultTo(sql`CURRENT_TIMESTAMP`);
298
+ return col;
299
+ });
287
300
  if (field.index) {
288
- const idx = db.schema.createIndex(`${table.table}_${fieldName}_${field.unique ? "uidx" : "idx"}`).on(table.table).columns([fieldName]);
289
- toBeIndexed.push(field.unique ? idx.unique() : idx);
301
+ const builder = db.schema.createIndex(`${table.table}_${fieldName}_${field.unique ? "uidx" : "idx"}`).on(table.table).columns([fieldName]);
302
+ toBeIndexed.push(field.unique ? builder.unique() : builder);
290
303
  }
291
304
  }
292
305
  migrations.push(dbT);
293
306
  }
294
- for (const index of toBeIndexed) migrations.push(index);
307
+ if (toBeIndexed.length) for (const index of toBeIndexed) migrations.push(index);
295
308
  async function runMigrations() {
296
309
  for (const migration of migrations) await migration.execute();
297
310
  }
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":[],"sources":["../../../src/db/migrations/index.ts"],"sourcesContent":["import type { PecuniaOptions } from \"pecunia-core\";\nimport type { DBFieldAttribute, DBFieldType } from \"pecunia-core\";\nimport { initGetFieldName, initGetModelName } from \"pecunia-core\";\nimport type {\n AlterTableBuilder,\n AlterTableColumnAlteringBuilder,\n ColumnDataType,\n CreateIndexBuilder,\n CreateTableBuilder,\n Kysely,\n RawBuilder,\n} from \"kysely\";\nimport { sql } from \"kysely\";\nimport { createKyselyAdapter } from \"../../adapters/kysely-adapter/dialect\";\nimport type { KyselyDatabaseDialectType } from \"pecunia-core\";\nimport { getSchema } from \"../schema/get-schema\";\nimport { getPaymentTables } from \"pecunia-core\";\n\ntype DbTypeBuckets = Record<\n \"string\" | \"number\" | \"boolean\" | \"date\" | \"json\",\n string[]\n>;\n\nconst postgresMap = {\n string: [\"character varying\", \"varchar\", \"text\", \"uuid\"],\n number: [\n \"int4\",\n \"integer\",\n \"bigint\",\n \"smallint\",\n \"numeric\",\n \"real\",\n \"double precision\",\n ],\n boolean: [\"bool\", \"boolean\"],\n date: [\"timestamptz\", \"timestamp\", \"date\"],\n json: [\"json\", \"jsonb\"],\n};\n\nconst mysqlMap = {\n string: [\"varchar\", \"text\", \"uuid\"],\n number: [\n \"integer\",\n \"int\",\n \"bigint\",\n \"smallint\",\n \"decimal\",\n \"float\",\n \"double\",\n ],\n boolean: [\"boolean\", \"tinyint\"],\n date: [\"timestamp\", \"datetime\", \"date\"],\n json: [\"json\"],\n};\n\nconst sqliteMap = {\n string: [\"TEXT\"],\n number: [\"INTEGER\", \"REAL\"],\n boolean: [\"INTEGER\", \"BOOLEAN\"],\n date: [\"DATE\", \"INTEGER\"],\n json: [\"TEXT\"],\n};\n\nconst mssqlMap = {\n string: [\"varchar\", \"nvarchar\", \"uniqueidentifier\"],\n number: [\"int\", \"bigint\", \"smallint\", \"decimal\", \"float\", \"double\"],\n boolean: [\"bit\", \"smallint\"],\n date: [\"datetime2\", \"date\", \"datetime\"],\n json: [\"varchar\", \"nvarchar\"],\n};\n\nconst map = {\n postgres: postgresMap,\n mysql: mysqlMap,\n sqlite: sqliteMap,\n mssql: mssqlMap,\n};\n\nexport function matchType(\n columnDataType: string,\n fieldType: DBFieldType,\n dbType: KyselyDatabaseDialectType,\n) {\n function normalize(type: string) {\n return type.toLowerCase().split(\"(\")[0]!.trim();\n }\n\n if (fieldType === \"string[]\" || fieldType === \"number[]\") {\n const normalized = columnDataType.toLowerCase();\n if (normalized.includes(\"[]\")) return true;\n return normalized.includes(\"json\");\n }\n\n // uuid should be treated as string for matching purposes\n // Also handle array types (which are already filtered above, but TypeScript doesn't know)\n let effectiveType: keyof DbTypeBuckets;\n if (fieldType === \"uuid\") {\n effectiveType = \"string\";\n } else if (Array.isArray(fieldType)) {\n effectiveType = \"string\";\n } else {\n effectiveType = fieldType as keyof DbTypeBuckets;\n }\n\n const types: Partial<DbTypeBuckets> = map[dbType]!;\n const expected = types[effectiveType]?.map((t: string) => t.toLowerCase());\n\n return expected?.includes(normalize(columnDataType)) ?? false;\n}\n\nasync function getPostgresSchema(db: Kysely<unknown>): Promise<string> {\n try {\n const result = await sql<{ search_path: string }>`SHOW search_path`.execute(\n db,\n );\n\n if (result.rows[0]?.search_path) {\n const schemas = result.rows[0].search_path\n .split(\",\")\n .map((s) => s.trim())\n .map((s) => s.replace(/^[\"']|[\"']$/g, \"\"))\n .filter((s) => !s.startsWith(\"$\"));\n\n return schemas[0] || \"public\";\n }\n } catch {\n // fall back\n }\n\n return \"public\";\n}\n\nexport async function getMigrations(config: PecuniaOptions) {\n const billingEngineSchema = getSchema(config);\n\n let { kysely: db, databaseType: dbType } = await createKyselyAdapter(config);\n\n if (!dbType) {\n console.warn(\n \"Could not determine database type, defaulting to sqlite. Please provide a type in the database options to avoid this.\",\n );\n dbType = \"sqlite\";\n }\n\n if (!db) {\n console.error(\n \"Only kysely adapter is supported for migrations. You can use `generate` command to generate the schema, if you're using a different adapter.\",\n );\n process.exit(1);\n }\n\n let currentSchema = \"public\";\n if (dbType === \"postgres\") {\n currentSchema = await getPostgresSchema(db);\n console.debug(\n `PostgreSQL migration: Using schema '${currentSchema}' (from search_path)`,\n );\n }\n\n const allTableMetadata = await db.introspection.getTables();\n\n let tableMetadata = allTableMetadata;\n if (dbType === \"postgres\") {\n try {\n const tablesInSchema = await sql<{ table_name: string }>`\n SELECT table_name\n FROM information_schema.tables\n WHERE table_schema = ${currentSchema}\n AND table_type = 'BASE TABLE'\n `.execute(db);\n\n const tableNamesInSchema = new Set(\n tablesInSchema.rows.map((row) => row.table_name),\n );\n\n tableMetadata = allTableMetadata.filter(\n (table) =>\n table.schema === currentSchema && tableNamesInSchema.has(table.name),\n );\n } catch (error) {\n console.warn(\n `Could not filter tables by schema. Using all discovered tables. Error: ${\n error instanceof Error ? error.message : String(error)\n }`,\n );\n }\n }\n\n const toBeCreated: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n\n const toBeAdded: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n\n for (const [key, value] of Object.entries(billingEngineSchema)) {\n const table = tableMetadata.find((t) => t.name === key);\n\n if (!table) {\n const existing = toBeCreated.findIndex((t) => t.table === key);\n const tableData = {\n table: key,\n fields: value.fields,\n order: value.order || Infinity,\n };\n\n const insertIndex = toBeCreated.findIndex(\n (t) => (t.order || Infinity) > tableData.order,\n );\n\n if (insertIndex === -1) {\n if (existing === -1) toBeCreated.push(tableData);\n else {\n toBeCreated[existing]!.fields = {\n ...toBeCreated[existing]!.fields,\n ...value.fields,\n };\n }\n } else {\n toBeCreated.splice(insertIndex, 0, tableData);\n }\n\n continue;\n }\n\n const toBeAddedFields: Record<string, DBFieldAttribute> = {};\n\n for (const [fieldName, field] of Object.entries(value.fields)) {\n const column = table.columns.find((c) => c.name === fieldName);\n\n if (!column) {\n toBeAddedFields[fieldName] = field;\n continue;\n }\n\n if (matchType(column.dataType, field.type, dbType)) continue;\n\n console.warn(\n `Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`,\n );\n }\n\n if (Object.keys(toBeAddedFields).length > 0) {\n toBeAdded.push({\n table: key,\n fields: toBeAddedFields,\n order: value.order || Infinity,\n });\n }\n }\n\n const migrations: (\n | AlterTableColumnAlteringBuilder\n | ReturnType<AlterTableBuilder[\"addIndex\"]>\n | CreateTableBuilder<string, string>\n | CreateIndexBuilder\n )[] = [];\n\n const useUUIDs = true;\n\n function getType(field: DBFieldAttribute, fieldName: string) {\n const type = field.type;\n const provider = dbType || \"sqlite\";\n\n type StringOnlyUnion<T> = T extends string ? T : never;\n\n const typeMap: Record<\n StringOnlyUnion<DBFieldType> | \"id\" | \"foreignKeyId\",\n Record<KyselyDatabaseDialectType, ColumnDataType | RawBuilder<unknown>>\n > = {\n string: {\n sqlite: \"text\",\n postgres: \"text\",\n mysql: field.unique\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : field.sortable\n ? \"varchar(255)\"\n : field.index\n ? \"varchar(255)\"\n : \"text\",\n mssql:\n field.unique || field.sortable\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : \"varchar(8000)\",\n },\n boolean: {\n sqlite: \"integer\",\n postgres: \"boolean\",\n mysql: \"boolean\",\n mssql: \"smallint\",\n },\n number: {\n sqlite: field.bigint ? \"bigint\" : \"integer\",\n postgres: field.bigint ? \"bigint\" : \"integer\",\n mysql: field.bigint ? \"bigint\" : \"integer\",\n mssql: field.bigint ? \"bigint\" : \"integer\",\n },\n date: {\n sqlite: \"date\",\n postgres: \"timestamptz\",\n mysql: \"timestamp(3)\",\n mssql: sql`datetime2(3)`,\n },\n json: {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n uuid: {\n postgres: \"uuid\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\",\n sqlite: \"text\",\n },\n id: {\n postgres: useUUIDs ? \"uuid\" : \"text\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\",\n sqlite: \"text\",\n },\n foreignKeyId: {\n postgres: useUUIDs ? \"uuid\" : \"text\",\n mysql: \"varchar(36)\",\n mssql: \"varchar(36)\",\n sqlite: \"text\",\n },\n \"string[]\": {\n sqlite: \"text\",\n postgres: sql`text[]`,\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n \"number[]\": {\n sqlite: \"text\",\n postgres: sql`integer[]`,\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n } as const;\n\n if (fieldName === \"id\" || field.references?.field === \"id\") {\n if (fieldName === \"id\") return typeMap.id[provider];\n return typeMap.foreignKeyId[provider];\n }\n\n if (Array.isArray(type)) return \"text\";\n if (!(type in typeMap)) {\n throw new Error(\n `Unsupported field type '${String(type)}' for field '${fieldName}'.`,\n );\n }\n\n return typeMap[type][provider];\n }\n\n const getModelName = initGetModelName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n\n const getFieldName = initGetFieldName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n\n function getReferencePath(model: string, field: string): string {\n try {\n const modelName = getModelName(model);\n const fieldName = getFieldName({ model, field });\n return `${modelName}.${fieldName}`;\n } catch {\n return `${model}.${field}`;\n }\n }\n\n const applyColumnOptions = (\n field: DBFieldAttribute,\n col: any, // Kysely column builder type is gnarly; keep minimal changes\n ) => {\n // Align with Drizzle generator: only NOT NULL if explicitly required: true\n col = field.required === true ? col.notNull() : col;\n\n if (field.references) {\n col = col\n .references(\n getReferencePath(field.references.model, field.references.field),\n )\n // Safer default: don't cascade unless explicitly requested\n .onDelete(field.references.onDelete || \"no action\");\n }\n\n if (field.unique) col = col.unique();\n\n if (\n field.type === \"date\" &&\n typeof field.defaultValue === \"function\" &&\n (dbType === \"postgres\" || dbType === \"mysql\" || dbType === \"mssql\")\n ) {\n col =\n dbType === \"mysql\"\n ? col.defaultTo(sql`CURRENT_TIMESTAMP(3)`)\n : col.defaultTo(sql`CURRENT_TIMESTAMP`);\n }\n\n return col;\n };\n\n if (toBeAdded.length) {\n for (const table of toBeAdded) {\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n\n if (field.index) {\n migrations.push(\n db.schema\n .alterTable(table.table)\n .addIndex(`${table.table}_${fieldName}_idx`),\n );\n }\n\n migrations.push(\n db.schema\n .alterTable(table.table)\n .addColumn(fieldName, type, (col) => applyColumnOptions(field, col)),\n );\n }\n }\n }\n\n const toBeIndexed: CreateIndexBuilder[] = [];\n\n if (toBeCreated.length) {\n for (const table of toBeCreated) {\n const idType = getType({ type: \"string\" }, \"id\");\n\n let dbT = db.schema\n .createTable(table.table)\n .addColumn(\"id\", idType, (col) => {\n if (dbType === \"postgres\") {\n return col\n .primaryKey()\n .defaultTo(sql`pg_catalog.gen_random_uuid()`)\n .notNull();\n }\n return col.primaryKey().notNull();\n });\n\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n\n dbT = dbT.addColumn(fieldName, type, (col) =>\n applyColumnOptions(field, col),\n );\n\n if (field.index) {\n const idx = db.schema\n .createIndex(\n `${table.table}_${fieldName}_${field.unique ? \"uidx\" : \"idx\"}`,\n )\n .on(table.table)\n .columns([fieldName]);\n\n toBeIndexed.push(field.unique ? idx.unique() : idx);\n }\n }\n\n migrations.push(dbT);\n }\n }\n\n for (const index of toBeIndexed) migrations.push(index);\n\n async function runMigrations() {\n for (const migration of migrations) {\n await migration.execute();\n }\n }\n\n async function compileMigrations() {\n // Debug SQL string only. For exact reproduction across dialects, include parameters too.\n const compiled = migrations.map((m) => m.compile().sql);\n return compiled.join(\";\\n\\n\") + \";\";\n }\n\n return { toBeCreated, toBeAdded, runMigrations, compileMigrations };\n}"],"mappings":";;;;;;AAuEA,MAAM,MAAM;CACV,UAjDkB;EAClB,QAAQ;GAAC;GAAqB;GAAW;GAAQ;GAAO;EACxD,QAAQ;GACN;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,SAAS,CAAC,QAAQ,UAAU;EAC5B,MAAM;GAAC;GAAe;GAAa;GAAO;EAC1C,MAAM,CAAC,QAAQ,QAAQ;EACxB;CAoCC,OAlCe;EACf,QAAQ;GAAC;GAAW;GAAQ;GAAO;EACnC,QAAQ;GACN;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM;GAAC;GAAa;GAAY;GAAO;EACvC,MAAM,CAAC,OAAO;EACf;CAqBC,QAnBgB;EAChB,QAAQ,CAAC,OAAO;EAChB,QAAQ,CAAC,WAAW,OAAO;EAC3B,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM,CAAC,QAAQ,UAAU;EACzB,MAAM,CAAC,OAAO;EACf;CAcC,OAZe;EACf,QAAQ;GAAC;GAAW;GAAY;GAAmB;EACnD,QAAQ;GAAC;GAAO;GAAU;GAAY;GAAW;GAAS;GAAS;EACnE,SAAS,CAAC,OAAO,WAAW;EAC5B,MAAM;GAAC;GAAa;GAAQ;GAAW;EACvC,MAAM,CAAC,WAAW,WAAW;EAC9B;CAOA;AAED,SAAgB,UACd,gBACA,WACA,QACA;CACA,SAAS,UAAU,MAAc;AAC/B,SAAO,KAAK,aAAa,CAAC,MAAM,IAAI,CAAC,GAAI,MAAM;;AAGjD,KAAI,cAAc,cAAc,cAAc,YAAY;EACxD,MAAM,aAAa,eAAe,aAAa;AAC/C,MAAI,WAAW,SAAS,KAAK,CAAE,QAAO;AACtC,SAAO,WAAW,SAAS,OAAO;;CAKpC,IAAI;AACJ,KAAI,cAAc,OAChB,iBAAgB;UACP,MAAM,QAAQ,UAAU,CACjC,iBAAgB;KAEhB,iBAAgB;AAMlB,SAHsC,IAAI,QACnB,gBAAgB,KAAK,MAAc,EAAE,aAAa,CAAC,GAEzD,SAAS,UAAU,eAAe,CAAC,IAAI;;AAG1D,eAAe,kBAAkB,IAAsC;AACrE,KAAI;EACF,MAAM,SAAS,MAAM,GAA4B,mBAAmB,QAClE,GACD;AAED,MAAI,OAAO,KAAK,IAAI,YAOlB,QANgB,OAAO,KAAK,GAAG,YAC5B,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,MAAM,CAAC,CACpB,KAAK,MAAM,EAAE,QAAQ,gBAAgB,GAAG,CAAC,CACzC,QAAQ,MAAM,CAAC,EAAE,WAAW,IAAI,CAAC,CAErB,MAAM;SAEjB;AAIR,QAAO;;AAGT,eAAsB,cAAc,QAAwB;CAC1D,MAAM,sBAAsB,UAAU,OAAO;CAE7C,IAAI,EAAE,QAAQ,IAAI,cAAc,WAAW,MAAM,oBAAoB,OAAO;AAE5E,KAAI,CAAC,QAAQ;AACX,UAAQ,KACN,wHACD;AACD,WAAS;;AAGX,KAAI,CAAC,IAAI;AACP,UAAQ,MACN,+IACD;AACD,UAAQ,KAAK,EAAE;;CAGjB,IAAI,gBAAgB;AACpB,KAAI,WAAW,YAAY;AACzB,kBAAgB,MAAM,kBAAkB,GAAG;AAC3C,UAAQ,MACN,uCAAuC,cAAc,sBACtD;;CAGH,MAAM,mBAAmB,MAAM,GAAG,cAAc,WAAW;CAE3D,IAAI,gBAAgB;AACpB,KAAI,WAAW,WACb,KAAI;EACF,MAAM,iBAAiB,MAAM,GAA2B;;;+BAG/B,cAAc;;QAErC,QAAQ,GAAG;EAEb,MAAM,qBAAqB,IAAI,IAC7B,eAAe,KAAK,KAAK,QAAQ,IAAI,WAAW,CACjD;AAED,kBAAgB,iBAAiB,QAC9B,UACC,MAAM,WAAW,iBAAiB,mBAAmB,IAAI,MAAM,KAAK,CACvE;UACM,OAAO;AACd,UAAQ,KACN,0EACE,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAEzD;;CAIL,MAAM,cAIA,EAAE;CAER,MAAM,YAIA,EAAE;AAER,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,oBAAoB,EAAE;EAC9D,MAAM,QAAQ,cAAc,MAAM,MAAM,EAAE,SAAS,IAAI;AAEvD,MAAI,CAAC,OAAO;GACV,MAAM,WAAW,YAAY,WAAW,MAAM,EAAE,UAAU,IAAI;GAC9D,MAAM,YAAY;IAChB,OAAO;IACP,QAAQ,MAAM;IACd,OAAO,MAAM,SAAS;IACvB;GAED,MAAM,cAAc,YAAY,WAC7B,OAAO,EAAE,SAAS,YAAY,UAAU,MAC1C;AAED,OAAI,gBAAgB,GAClB,KAAI,aAAa,GAAI,aAAY,KAAK,UAAU;OAE9C,aAAY,UAAW,SAAS;IAC9B,GAAG,YAAY,UAAW;IAC1B,GAAG,MAAM;IACV;OAGH,aAAY,OAAO,aAAa,GAAG,UAAU;AAG/C;;EAGF,MAAM,kBAAoD,EAAE;AAE5D,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,SAAS,MAAM,QAAQ,MAAM,MAAM,EAAE,SAAS,UAAU;AAE9D,OAAI,CAAC,QAAQ;AACX,oBAAgB,aAAa;AAC7B;;AAGF,OAAI,UAAU,OAAO,UAAU,MAAM,MAAM,OAAO,CAAE;AAEpD,WAAQ,KACN,SAAS,UAAU,YAAY,IAAI,kDAAkD,MAAM,KAAK,WAAW,OAAO,SAAS,GAC5H;;AAGH,MAAI,OAAO,KAAK,gBAAgB,CAAC,SAAS,EACxC,WAAU,KAAK;GACb,OAAO;GACP,QAAQ;GACR,OAAO,MAAM,SAAS;GACvB,CAAC;;CAIN,MAAM,aAKA,EAAE;CAIR,SAAS,QAAQ,OAAyB,WAAmB;EAC3D,MAAM,OAAO,MAAM;EACnB,MAAM,WAAW,UAAU;EAI3B,MAAM,UAGF;GACF,QAAQ;IACN,QAAQ;IACR,UAAU;IACV,OAAO,MAAM,SACT,iBACA,MAAM,aACJ,gBACA,MAAM,WACJ,iBACA,MAAM,QACJ,iBACA;IACV,OACE,MAAM,UAAU,MAAM,WAClB,iBACA,MAAM,aACJ,gBACA;IACT;GACD,SAAS;IACP,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,QAAQ;IACN,QAAQ,MAAM,SAAS,WAAW;IAClC,UAAU,MAAM,SAAS,WAAW;IACpC,OAAO,MAAM,SAAS,WAAW;IACjC,OAAO,MAAM,SAAS,WAAW;IAClC;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO,GAAG;IACX;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,MAAM;IACJ,UAAU;IACV,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,IAAI;IACF,UAAqB;IACrB,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,cAAc;IACZ,UAAqB;IACrB,OAAO;IACP,OAAO;IACP,QAAQ;IACT;GACD,YAAY;IACV,QAAQ;IACR,UAAU,GAAG;IACb,OAAO;IACP,OAAO;IACR;GACD,YAAY;IACV,QAAQ;IACR,UAAU,GAAG;IACb,OAAO;IACP,OAAO;IACR;GACF;AAED,MAAI,cAAc,QAAQ,MAAM,YAAY,UAAU,MAAM;AAC1D,OAAI,cAAc,KAAM,QAAO,QAAQ,GAAG;AAC1C,UAAO,QAAQ,aAAa;;AAG9B,MAAI,MAAM,QAAQ,KAAK,CAAE,QAAO;AAChC,MAAI,EAAE,QAAQ,SACZ,OAAM,IAAI,MACR,2BAA2B,OAAO,KAAK,CAAC,eAAe,UAAU,IAClE;AAGH,SAAO,QAAQ,MAAM;;CAGvB,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CAEF,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CAEF,SAAS,iBAAiB,OAAe,OAAuB;AAC9D,MAAI;AAGF,UAAO,GAFW,aAAa,MAAM,CAEjB,GADF,aAAa;IAAE;IAAO;IAAO,CAAC;UAE1C;AACN,UAAO,GAAG,MAAM,GAAG;;;CAIvB,MAAM,sBACJ,OACA,QACG;AAEH,QAAM,MAAM,aAAa,OAAO,IAAI,SAAS,GAAG;AAEhD,MAAI,MAAM,WACR,OAAM,IACH,WACC,iBAAiB,MAAM,WAAW,OAAO,MAAM,WAAW,MAAM,CACjE,CAEA,SAAS,MAAM,WAAW,YAAY,YAAY;AAGvD,MAAI,MAAM,OAAQ,OAAM,IAAI,QAAQ;AAEpC,MACE,MAAM,SAAS,UACf,OAAO,MAAM,iBAAiB,eAC7B,WAAW,cAAc,WAAW,WAAW,WAAW,SAE3D,OACE,WAAW,UACP,IAAI,UAAU,GAAG,uBAAuB,GACxC,IAAI,UAAU,GAAG,oBAAoB;AAG7C,SAAO;;AAGT,KAAI,UAAU,OACZ,MAAK,MAAM,SAAS,UAClB,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;EAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;AAEtC,MAAI,MAAM,MACR,YAAW,KACT,GAAG,OACA,WAAW,MAAM,MAAM,CACvB,SAAS,GAAG,MAAM,MAAM,GAAG,UAAU,MAAM,CAC/C;AAGH,aAAW,KACT,GAAG,OACA,WAAW,MAAM,MAAM,CACvB,UAAU,WAAW,OAAO,QAAQ,mBAAmB,OAAO,IAAI,CAAC,CACvE;;CAKP,MAAM,cAAoC,EAAE;AAE5C,KAAI,YAAY,OACd,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,SAAS,QAAQ,EAAE,MAAM,UAAU,EAAE,KAAK;EAEhD,IAAI,MAAM,GAAG,OACV,YAAY,MAAM,MAAM,CACxB,UAAU,MAAM,SAAS,QAAQ;AAChC,OAAI,WAAW,WACb,QAAO,IACJ,YAAY,CACZ,UAAU,GAAG,+BAA+B,CAC5C,SAAS;AAEd,UAAO,IAAI,YAAY,CAAC,SAAS;IACjC;AAEJ,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;AAEtC,SAAM,IAAI,UAAU,WAAW,OAAO,QACpC,mBAAmB,OAAO,IAAI,CAC/B;AAED,OAAI,MAAM,OAAO;IACf,MAAM,MAAM,GAAG,OACZ,YACC,GAAG,MAAM,MAAM,GAAG,UAAU,GAAG,MAAM,SAAS,SAAS,QACxD,CACA,GAAG,MAAM,MAAM,CACf,QAAQ,CAAC,UAAU,CAAC;AAEvB,gBAAY,KAAK,MAAM,SAAS,IAAI,QAAQ,GAAG,IAAI;;;AAIvD,aAAW,KAAK,IAAI;;AAIxB,MAAK,MAAM,SAAS,YAAa,YAAW,KAAK,MAAM;CAEvD,eAAe,gBAAgB;AAC7B,OAAK,MAAM,aAAa,WACtB,OAAM,UAAU,SAAS;;CAI7B,eAAe,oBAAoB;AAGjC,SADiB,WAAW,KAAK,MAAM,EAAE,SAAS,CAAC,IAAI,CACvC,KAAK,QAAQ,GAAG;;AAGlC,QAAO;EAAE;EAAa;EAAW;EAAe;EAAmB"}
1
+ {"version":3,"file":"index.mjs","names":[],"sources":["../../../src/db/migrations/index.ts"],"sourcesContent":["import type { PecuniaOptions } from \"pecunia-core\";\nimport type { DBFieldAttribute, DBFieldType } from \"pecunia-core\";\nimport { getPaymentTables } from \"pecunia-core\";\nimport { initGetFieldName, initGetModelName } from \"pecunia-core\";\nimport type {\n AlterTableBuilder,\n AlterTableColumnAlteringBuilder,\n ColumnDataType,\n CreateIndexBuilder,\n CreateTableBuilder,\n Kysely,\n RawBuilder,\n} from \"kysely\";\nimport { sql } from \"kysely\";\nimport { createKyselyAdapter } from \"../../adapters/kysely-adapter/dialect\";\nimport type { KyselyDatabaseDialectType } from \"pecunia-core\";\nimport { getSchema } from \"../schema/get-schema\";\n\nconst postgresMap = {\n string: [\"character varying\", \"varchar\", \"text\", \"uuid\"],\n number: [\n \"int4\",\n \"integer\",\n \"bigint\",\n \"smallint\",\n \"numeric\",\n \"real\",\n \"double precision\",\n ],\n boolean: [\"bool\", \"boolean\"],\n date: [\"timestamptz\", \"timestamp\", \"date\"],\n json: [\"json\", \"jsonb\"],\n};\nconst mysqlMap = {\n string: [\"varchar\", \"text\", \"uuid\"],\n number: [\"integer\", \"int\", \"bigint\", \"smallint\", \"decimal\", \"float\", \"double\"],\n boolean: [\"boolean\", \"tinyint\"],\n date: [\"timestamp\", \"datetime\", \"date\"],\n json: [\"json\"],\n};\n\nconst sqliteMap = {\n string: [\"TEXT\"],\n number: [\"INTEGER\", \"REAL\"],\n boolean: [\"INTEGER\", \"BOOLEAN\"], // 0 or 1\n date: [\"DATE\", \"INTEGER\"],\n json: [\"TEXT\"],\n};\n\nconst mssqlMap = {\n string: [\"varchar\", \"nvarchar\", \"uniqueidentifier\"],\n number: [\"int\", \"bigint\", \"smallint\", \"decimal\", \"float\", \"double\"],\n boolean: [\"bit\", \"smallint\"],\n date: [\"datetime2\", \"date\", \"datetime\"],\n json: [\"varchar\", \"nvarchar\"],\n};\n\nconst map = {\n postgres: postgresMap,\n mysql: mysqlMap,\n sqlite: sqliteMap,\n mssql: mssqlMap,\n};\n\nexport function matchType(\n columnDataType: string,\n fieldType: DBFieldType,\n dbType: KyselyDatabaseDialectType,\n) {\n function normalize(type: string) {\n return type.toLowerCase().split(\"(\")[0]!.trim();\n }\n if (fieldType === \"string[]\" || fieldType === \"number[]\") {\n return columnDataType.toLowerCase().includes(\"json\");\n }\n const types = map[dbType]!;\n const expected = Array.isArray(fieldType)\n ? types[\"string\"].map((t) => t.toLowerCase())\n : types[fieldType]!.map((t) => t.toLowerCase());\n return expected.includes(normalize(columnDataType));\n}\n\n/**\n * Get the current PostgreSQL schema (search_path) for the database connection\n * Returns the first schema in the search_path, defaulting to 'public' if not found\n */\nasync function getPostgresSchema(db: Kysely<unknown>): Promise<string> {\n try {\n const result = await sql<{ search_path: string }>`SHOW search_path`.execute(\n db,\n );\n if (result.rows[0]?.search_path) {\n // search_path can be a comma-separated list like \"$user, public\" or '\"$user\", public'\n // We want the first non-variable schema\n const schemas = result.rows[0].search_path\n .split(\",\")\n .map((s) => s.trim())\n // Remove quotes and filter out variables like $user\n .map((s) => s.replace(/^[\"']|[\"']$/g, \"\"))\n .filter((s) => !s.startsWith(\"$\"));\n return schemas[0] || \"public\";\n }\n } catch {\n // If query fails, fall back to public schema\n }\n return \"public\";\n}\n\nexport async function getMigrations(config: PecuniaOptions) {\n const pecuniaSchema = getSchema(config);\n\n let { kysely: db, databaseType: dbType } = await createKyselyAdapter(config);\n\n if (!dbType) {\n console.warn(\n \"Could not determine database type, defaulting to sqlite. Please provide a type in the database options to avoid this.\",\n );\n dbType = \"sqlite\";\n }\n\n if (!db) {\n console.error(\n \"Only kysely adapter is supported for migrations. You can use `generate` command to generate the schema, if you're using a different adapter.\",\n );\n process.exit(1);\n }\n\n // For PostgreSQL, detect and log the current schema being used\n let currentSchema = \"public\";\n if (dbType === \"postgres\") {\n currentSchema = await getPostgresSchema(db);\n console.debug(\n `PostgreSQL migration: Using schema '${currentSchema}' (from search_path)`,\n );\n\n // Verify the schema exists\n try {\n const schemaCheck = await sql<{ schema_name: string }>`\n\t\t\t\tSELECT schema_name\n\t\t\t\tFROM information_schema.schemata\n\t\t\t\tWHERE schema_name = ${currentSchema}\n\t\t\t`.execute(db);\n\n if (!schemaCheck.rows[0]) {\n console.warn(\n `Schema '${currentSchema}' does not exist. Tables will be inspected from available schemas. Consider creating the schema first or checking your database configuration.`,\n );\n }\n } catch (error) {\n console.debug(\n `Could not verify schema existence: ${error instanceof Error ? error.message : String(error)}`,\n );\n }\n }\n\n const allTableMetadata = await db.introspection.getTables();\n\n // For PostgreSQL, filter tables to only those in the target schema\n let tableMetadata = allTableMetadata;\n if (dbType === \"postgres\") {\n // Get tables with their schema information\n try {\n const tablesInSchema = await sql<{\n table_name: string;\n }>`\n\t\t\t\tSELECT table_name\n\t\t\t\tFROM information_schema.tables\n\t\t\t\tWHERE table_schema = ${currentSchema}\n\t\t\t\tAND table_type = 'BASE TABLE'\n\t\t\t`.execute(db);\n\n const tableNamesInSchema = new Set(\n tablesInSchema.rows.map((row) => row.table_name),\n );\n\n // Filter to only tables that exist in the target schema\n tableMetadata = allTableMetadata.filter(\n (table) =>\n table.schema === currentSchema && tableNamesInSchema.has(table.name),\n );\n\n console.debug(\n `Found ${tableMetadata.length} table(s) in schema '${currentSchema}': ${tableMetadata.map((t) => t.name).join(\", \") || \"(none)\"}`,\n );\n } catch (error) {\n console.warn(\n `Could not filter tables by schema. Using all discovered tables. Error: ${error instanceof Error ? error.message : String(error)}`,\n );\n // Fall back to using all tables if schema filtering fails\n }\n }\n\n const toBeCreated: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n const toBeAdded: {\n table: string;\n fields: Record<string, DBFieldAttribute>;\n order: number;\n }[] = [];\n\n for (const [key, value] of Object.entries(pecuniaSchema)) {\n const table = tableMetadata.find((t) => t.name === key);\n if (!table) {\n const tIndex = toBeCreated.findIndex((t) => t.table === key);\n const tableData = {\n table: key,\n fields: value.fields,\n order: value.order || Infinity,\n };\n\n const insertIndex = toBeCreated.findIndex(\n (t) => (t.order || Infinity) > tableData.order,\n );\n\n if (insertIndex === -1) {\n if (tIndex === -1) {\n toBeCreated.push(tableData);\n } else {\n toBeCreated[tIndex]!.fields = {\n ...toBeCreated[tIndex]!.fields,\n ...value.fields,\n };\n }\n } else {\n toBeCreated.splice(insertIndex, 0, tableData);\n }\n continue;\n }\n\n let toBeAddedFields: Record<string, DBFieldAttribute> = {};\n for (const [fieldName, field] of Object.entries(value.fields)) {\n const column = table.columns.find((c) => c.name === fieldName);\n if (!column) {\n toBeAddedFields[fieldName] = field;\n continue;\n }\n\n if (matchType(column.dataType, field.type, dbType)) {\n continue;\n } else {\n console.warn(\n `Field ${fieldName} in table ${key} has a different type in the database. Expected ${field.type} but got ${column.dataType}.`,\n );\n }\n }\n if (Object.keys(toBeAddedFields).length > 0) {\n toBeAdded.push({\n table: key,\n fields: toBeAddedFields,\n order: value.order || Infinity,\n });\n }\n }\n\n const migrations: (\n | AlterTableColumnAlteringBuilder\n | ReturnType<AlterTableBuilder[\"addIndex\"]>\n | CreateTableBuilder<string, string>\n | CreateIndexBuilder\n )[] = [];\n\n // Default to UUID IDs whenever possible:\n // - Postgres: native UUID + default gen_random_uuid()\n // - MySQL/MSSQL: store UUID as varchar(36) (broad compatibility; Kysely doesn't type uniqueidentifier)\n // - SQLite: store UUID as text\n const useNumberId = false;\n const useUUIDs = true;\n\n function getType(field: DBFieldAttribute, fieldName: string) {\n const type = field.type;\n const provider = dbType || \"sqlite\";\n type StringOnlyUnion<T> = T extends string ? T : never;\n const typeMap: Record<\n StringOnlyUnion<DBFieldType> | \"id\" | \"foreignKeyId\",\n Record<KyselyDatabaseDialectType, ColumnDataType | RawBuilder<unknown>>\n > = {\n string: {\n sqlite: \"text\",\n postgres: \"text\",\n mysql: field.unique\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : field.sortable\n ? \"varchar(255)\"\n : field.index\n ? \"varchar(255)\"\n : \"text\",\n mssql:\n field.unique || field.sortable\n ? \"varchar(255)\"\n : field.references\n ? \"varchar(36)\"\n : // mssql deprecated `text`, and the alternative is `varchar(max)`.\n // Kysely type interface doesn't support `text`, so we set this to `varchar(8000)` as\n // that's the max length for `varchar`\n \"varchar(8000)\",\n },\n boolean: {\n sqlite: \"integer\",\n postgres: \"boolean\",\n mysql: \"boolean\",\n mssql: \"smallint\",\n },\n number: {\n sqlite: field.bigint ? \"bigint\" : \"integer\",\n postgres: field.bigint ? \"bigint\" : \"integer\",\n mysql: field.bigint ? \"bigint\" : \"integer\",\n mssql: field.bigint ? \"bigint\" : \"integer\",\n },\n date: {\n sqlite: \"date\",\n postgres: \"timestamptz\",\n mysql: \"timestamp(3)\",\n mssql: sql`datetime2(3)`,\n },\n json: {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n id: {\n postgres: useNumberId\n ? sql`integer GENERATED BY DEFAULT AS IDENTITY`\n : useUUIDs\n ? \"uuid\"\n : \"text\",\n mysql: useNumberId ? \"integer\" : \"varchar(36)\",\n mssql: useNumberId\n ? \"integer\"\n : \"varchar(36)\" /* should be UNIQUEIDENTIFIER but Kysely doesn't support it */,\n sqlite: useNumberId ? \"integer\" : \"text\",\n },\n foreignKeyId: {\n postgres: useNumberId ? \"integer\" : useUUIDs ? \"uuid\" : \"text\",\n mysql: useNumberId ? \"integer\" : \"varchar(36)\",\n mssql: useNumberId\n ? \"integer\"\n : \"varchar(36)\" /* should be UNIQUEIDENTIFIER but Kysely doesn't support it */,\n sqlite: useNumberId ? \"integer\" : \"text\",\n },\n \"string[]\": {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n \"number[]\": {\n sqlite: \"text\",\n postgres: \"jsonb\",\n mysql: \"json\",\n mssql: \"varchar(8000)\",\n },\n } as const;\n\n if (fieldName === \"id\" || field.references?.field === \"id\") {\n if (fieldName === \"id\") {\n return typeMap.id[provider];\n }\n return typeMap.foreignKeyId[provider];\n }\n\n if (Array.isArray(type)) {\n return \"text\";\n }\n\n if (!(type in typeMap)) {\n throw new Error(\n `Unsupported field type '${String(type)}' for field '${fieldName}'. Allowed types are: string, number, boolean, date, string[], number[]. If you need to store structured data, store it as a JSON string (type: \"string\") or split it into primitive fields. See https://better-auth.com/docs/advanced/schema#additional-fields`,\n );\n }\n\n return typeMap[type][provider];\n }\n\n const getModelName = initGetModelName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n const getFieldName = initGetFieldName({\n schema: getPaymentTables(config),\n usePlural: false,\n });\n\n // Helper function to safely resolve model and field names, falling back to\n // user-supplied strings for external tables not in the BetterAuth schema\n function getReferencePath(model: string, field: string): string {\n try {\n const modelName = getModelName(model);\n const fieldName = getFieldName({ model, field });\n return `${modelName}.${fieldName}`;\n } catch {\n // If resolution fails (external table), fall back to user-supplied references\n return `${model}.${field}`;\n }\n }\n\n if (toBeAdded.length) {\n for (const table of toBeAdded) {\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n let builder = db.schema.alterTable(table.table);\n\n if (field.index) {\n const index = db.schema\n .alterTable(table.table)\n .addIndex(`${table.table}_${fieldName}_idx`);\n migrations.push(index);\n }\n\n const built = builder.addColumn(fieldName, type, (col) => {\n col = field.required !== false ? col.notNull() : col;\n if (field.references) {\n col = col\n .references(\n getReferencePath(field.references.model, field.references.field),\n )\n .onDelete(field.references.onDelete || \"cascade\");\n }\n if (field.unique) {\n col = col.unique();\n }\n if (\n field.type === \"date\" &&\n typeof field.defaultValue === \"function\" &&\n (dbType === \"postgres\" || dbType === \"mysql\" || dbType === \"mssql\")\n ) {\n if (dbType === \"mysql\") {\n col = col.defaultTo(sql`CURRENT_TIMESTAMP(3)`);\n } else {\n col = col.defaultTo(sql`CURRENT_TIMESTAMP`);\n }\n }\n return col;\n });\n migrations.push(built);\n }\n }\n }\n\n let toBeIndexed: CreateIndexBuilder[] = [];\n\n if (toBeCreated.length) {\n for (const table of toBeCreated) {\n const idType = getType({ type: \"string\" } as DBFieldAttribute, \"id\");\n let dbT = db.schema\n .createTable(table.table)\n .addColumn(\"id\", idType, (col) => {\n if (dbType === \"postgres\") {\n // Postgres: native UUID primary key with default generator\n return col\n .primaryKey()\n .defaultTo(sql`pg_catalog.gen_random_uuid()`)\n .notNull();\n }\n // Other DBs: String PK, app should generate UUID\n return col.primaryKey().notNull();\n });\n\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const type = getType(field, fieldName);\n dbT = dbT.addColumn(fieldName, type, (col) => {\n col = field.required !== false ? col.notNull() : col;\n if (field.references) {\n col = col\n .references(\n getReferencePath(field.references.model, field.references.field),\n )\n .onDelete(field.references.onDelete || \"cascade\");\n }\n\n if (field.unique) {\n col = col.unique();\n }\n if (\n field.type === \"date\" &&\n typeof field.defaultValue === \"function\" &&\n (dbType === \"postgres\" || dbType === \"mysql\" || dbType === \"mssql\")\n ) {\n if (dbType === \"mysql\") {\n col = col.defaultTo(sql`CURRENT_TIMESTAMP(3)`);\n } else {\n col = col.defaultTo(sql`CURRENT_TIMESTAMP`);\n }\n }\n return col;\n });\n\n if (field.index) {\n const builder = db.schema\n .createIndex(\n `${table.table}_${fieldName}_${field.unique ? \"uidx\" : \"idx\"}`,\n )\n .on(table.table)\n .columns([fieldName]);\n toBeIndexed.push(field.unique ? builder.unique() : builder);\n }\n }\n migrations.push(dbT);\n }\n }\n\n // Create indexes after tables\n if (toBeIndexed.length) {\n for (const index of toBeIndexed) {\n migrations.push(index);\n }\n }\n\n async function runMigrations() {\n for (const migration of migrations) {\n await migration.execute();\n }\n }\n\n async function compileMigrations() {\n const compiled = migrations.map((m) => m.compile().sql);\n return compiled.join(\";\\n\\n\") + \";\";\n }\n\n return { toBeCreated, toBeAdded, runMigrations, compileMigrations };\n}"],"mappings":";;;;;;AAyDA,MAAM,MAAM;CACV,UAxCkB;EAClB,QAAQ;GAAC;GAAqB;GAAW;GAAQ;GAAO;EACxD,QAAQ;GACN;GACA;GACA;GACA;GACA;GACA;GACA;GACD;EACD,SAAS,CAAC,QAAQ,UAAU;EAC5B,MAAM;GAAC;GAAe;GAAa;GAAO;EAC1C,MAAM,CAAC,QAAQ,QAAQ;EACxB;CA2BC,OA1Be;EACf,QAAQ;GAAC;GAAW;GAAQ;GAAO;EACnC,QAAQ;GAAC;GAAW;GAAO;GAAU;GAAY;GAAW;GAAS;GAAS;EAC9E,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM;GAAC;GAAa;GAAY;GAAO;EACvC,MAAM,CAAC,OAAO;EACf;CAqBC,QAnBgB;EAChB,QAAQ,CAAC,OAAO;EAChB,QAAQ,CAAC,WAAW,OAAO;EAC3B,SAAS,CAAC,WAAW,UAAU;EAC/B,MAAM,CAAC,QAAQ,UAAU;EACzB,MAAM,CAAC,OAAO;EACf;CAcC,OAZe;EACf,QAAQ;GAAC;GAAW;GAAY;GAAmB;EACnD,QAAQ;GAAC;GAAO;GAAU;GAAY;GAAW;GAAS;GAAS;EACnE,SAAS,CAAC,OAAO,WAAW;EAC5B,MAAM;GAAC;GAAa;GAAQ;GAAW;EACvC,MAAM,CAAC,WAAW,WAAW;EAC9B;CAOA;AAED,SAAgB,UACd,gBACA,WACA,QACA;CACA,SAAS,UAAU,MAAc;AAC/B,SAAO,KAAK,aAAa,CAAC,MAAM,IAAI,CAAC,GAAI,MAAM;;AAEjD,KAAI,cAAc,cAAc,cAAc,WAC5C,QAAO,eAAe,aAAa,CAAC,SAAS,OAAO;CAEtD,MAAM,QAAQ,IAAI;AAIlB,SAHiB,MAAM,QAAQ,UAAU,GACrC,MAAM,UAAU,KAAK,MAAM,EAAE,aAAa,CAAC,GAC3C,MAAM,WAAY,KAAK,MAAM,EAAE,aAAa,CAAC,EACjC,SAAS,UAAU,eAAe,CAAC;;;;;;AAOrD,eAAe,kBAAkB,IAAsC;AACrE,KAAI;EACF,MAAM,SAAS,MAAM,GAA4B,mBAAmB,QAClE,GACD;AACD,MAAI,OAAO,KAAK,IAAI,YASlB,QANgB,OAAO,KAAK,GAAG,YAC5B,MAAM,IAAI,CACV,KAAK,MAAM,EAAE,MAAM,CAAC,CAEpB,KAAK,MAAM,EAAE,QAAQ,gBAAgB,GAAG,CAAC,CACzC,QAAQ,MAAM,CAAC,EAAE,WAAW,IAAI,CAAC,CACrB,MAAM;SAEjB;AAGR,QAAO;;AAGT,eAAsB,cAAc,QAAwB;CAC1D,MAAM,gBAAgB,UAAU,OAAO;CAEvC,IAAI,EAAE,QAAQ,IAAI,cAAc,WAAW,MAAM,oBAAoB,OAAO;AAE5E,KAAI,CAAC,QAAQ;AACX,UAAQ,KACN,wHACD;AACD,WAAS;;AAGX,KAAI,CAAC,IAAI;AACP,UAAQ,MACN,+IACD;AACD,UAAQ,KAAK,EAAE;;CAIjB,IAAI,gBAAgB;AACpB,KAAI,WAAW,YAAY;AACzB,kBAAgB,MAAM,kBAAkB,GAAG;AAC3C,UAAQ,MACN,uCAAuC,cAAc,sBACtD;AAGD,MAAI;AAOF,OAAI,EANgB,MAAM,GAA4B;;;0BAGlC,cAAc;KACnC,QAAQ,GAAG,EAEO,KAAK,GACpB,SAAQ,KACN,WAAW,cAAc,gJAC1B;WAEI,OAAO;AACd,WAAQ,MACN,sCAAsC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GAC7F;;;CAIL,MAAM,mBAAmB,MAAM,GAAG,cAAc,WAAW;CAG3D,IAAI,gBAAgB;AACpB,KAAI,WAAW,WAEb,KAAI;EACF,MAAM,iBAAiB,MAAM,GAE3B;;;2BAGmB,cAAc;;KAEpC,QAAQ,GAAG;EAEV,MAAM,qBAAqB,IAAI,IAC7B,eAAe,KAAK,KAAK,QAAQ,IAAI,WAAW,CACjD;AAGD,kBAAgB,iBAAiB,QAC9B,UACC,MAAM,WAAW,iBAAiB,mBAAmB,IAAI,MAAM,KAAK,CACvE;AAED,UAAQ,MACN,SAAS,cAAc,OAAO,uBAAuB,cAAc,KAAK,cAAc,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK,IAAI,WACxH;UACM,OAAO;AACd,UAAQ,KACN,0EAA0E,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,GACjI;;CAKL,MAAM,cAIA,EAAE;CACR,MAAM,YAIA,EAAE;AAER,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,cAAc,EAAE;EACxD,MAAM,QAAQ,cAAc,MAAM,MAAM,EAAE,SAAS,IAAI;AACvD,MAAI,CAAC,OAAO;GACV,MAAM,SAAS,YAAY,WAAW,MAAM,EAAE,UAAU,IAAI;GAC5D,MAAM,YAAY;IAChB,OAAO;IACP,QAAQ,MAAM;IACd,OAAO,MAAM,SAAS;IACvB;GAED,MAAM,cAAc,YAAY,WAC7B,OAAO,EAAE,SAAS,YAAY,UAAU,MAC1C;AAED,OAAI,gBAAgB,GAClB,KAAI,WAAW,GACb,aAAY,KAAK,UAAU;OAE3B,aAAY,QAAS,SAAS;IAC5B,GAAG,YAAY,QAAS;IACxB,GAAG,MAAM;IACV;OAGH,aAAY,OAAO,aAAa,GAAG,UAAU;AAE/C;;EAGF,IAAI,kBAAoD,EAAE;AAC1D,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,SAAS,MAAM,QAAQ,MAAM,MAAM,EAAE,SAAS,UAAU;AAC9D,OAAI,CAAC,QAAQ;AACX,oBAAgB,aAAa;AAC7B;;AAGF,OAAI,UAAU,OAAO,UAAU,MAAM,MAAM,OAAO,CAChD;OAEA,SAAQ,KACN,SAAS,UAAU,YAAY,IAAI,kDAAkD,MAAM,KAAK,WAAW,OAAO,SAAS,GAC5H;;AAGL,MAAI,OAAO,KAAK,gBAAgB,CAAC,SAAS,EACxC,WAAU,KAAK;GACb,OAAO;GACP,QAAQ;GACR,OAAO,MAAM,SAAS;GACvB,CAAC;;CAIN,MAAM,aAKA,EAAE;CASR,SAAS,QAAQ,OAAyB,WAAmB;EAC3D,MAAM,OAAO,MAAM;EACnB,MAAM,WAAW,UAAU;EAE3B,MAAM,UAGF;GACF,QAAQ;IACN,QAAQ;IACR,UAAU;IACV,OAAO,MAAM,SACT,iBACA,MAAM,aACJ,gBACA,MAAM,WACJ,iBACA,MAAM,QACJ,iBACA;IACV,OACE,MAAM,UAAU,MAAM,WAClB,iBACA,MAAM,aACJ,gBAIA;IACT;GACD,SAAS;IACP,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,QAAQ;IACN,QAAQ,MAAM,SAAS,WAAW;IAClC,UAAU,MAAM,SAAS,WAAW;IACpC,OAAO,MAAM,SAAS,WAAW;IACjC,OAAO,MAAM,SAAS,WAAW;IAClC;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO,GAAG;IACX;GACD,MAAM;IACJ,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,IAAI;IACF,UAGM;IAEN,OAAiC;IACjC,OAEI;IACJ,QAAkC;IACnC;GACD,cAAc;IACZ,UAA+C;IAC/C,OAAiC;IACjC,OAEI;IACJ,QAAkC;IACnC;GACD,YAAY;IACV,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACD,YAAY;IACV,QAAQ;IACR,UAAU;IACV,OAAO;IACP,OAAO;IACR;GACF;AAED,MAAI,cAAc,QAAQ,MAAM,YAAY,UAAU,MAAM;AAC1D,OAAI,cAAc,KAChB,QAAO,QAAQ,GAAG;AAEpB,UAAO,QAAQ,aAAa;;AAG9B,MAAI,MAAM,QAAQ,KAAK,CACrB,QAAO;AAGT,MAAI,EAAE,QAAQ,SACZ,OAAM,IAAI,MACR,2BAA2B,OAAO,KAAK,CAAC,eAAe,UAAU,iQAClE;AAGH,SAAO,QAAQ,MAAM;;CAGvB,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CACF,MAAM,eAAe,iBAAiB;EACpC,QAAQ,iBAAiB,OAAO;EAChC,WAAW;EACZ,CAAC;CAIF,SAAS,iBAAiB,OAAe,OAAuB;AAC9D,MAAI;AAGF,UAAO,GAFW,aAAa,MAAM,CAEjB,GADF,aAAa;IAAE;IAAO;IAAO,CAAC;UAE1C;AAEN,UAAO,GAAG,MAAM,GAAG;;;AAIvB,KAAI,UAAU,OACZ,MAAK,MAAM,SAAS,UAClB,MAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;EAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;EACtC,IAAI,UAAU,GAAG,OAAO,WAAW,MAAM,MAAM;AAE/C,MAAI,MAAM,OAAO;GACf,MAAM,QAAQ,GAAG,OACd,WAAW,MAAM,MAAM,CACvB,SAAS,GAAG,MAAM,MAAM,GAAG,UAAU,MAAM;AAC9C,cAAW,KAAK,MAAM;;EAGxB,MAAM,QAAQ,QAAQ,UAAU,WAAW,OAAO,QAAQ;AACxD,SAAM,MAAM,aAAa,QAAQ,IAAI,SAAS,GAAG;AACjD,OAAI,MAAM,WACR,OAAM,IACH,WACC,iBAAiB,MAAM,WAAW,OAAO,MAAM,WAAW,MAAM,CACjE,CACA,SAAS,MAAM,WAAW,YAAY,UAAU;AAErD,OAAI,MAAM,OACR,OAAM,IAAI,QAAQ;AAEpB,OACE,MAAM,SAAS,UACf,OAAO,MAAM,iBAAiB,eAC7B,WAAW,cAAc,WAAW,WAAW,WAAW,SAE3D,KAAI,WAAW,QACb,OAAM,IAAI,UAAU,GAAG,uBAAuB;OAE9C,OAAM,IAAI,UAAU,GAAG,oBAAoB;AAG/C,UAAO;IACP;AACF,aAAW,KAAK,MAAM;;CAK5B,IAAI,cAAoC,EAAE;AAE1C,KAAI,YAAY,OACd,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,SAAS,QAAQ,EAAE,MAAM,UAAU,EAAsB,KAAK;EACpE,IAAI,MAAM,GAAG,OACV,YAAY,MAAM,MAAM,CACxB,UAAU,MAAM,SAAS,QAAQ;AAChC,OAAI,WAAW,WAEb,QAAO,IACJ,YAAY,CACZ,UAAU,GAAG,+BAA+B,CAC5C,SAAS;AAGd,UAAO,IAAI,YAAY,CAAC,SAAS;IACjC;AAEJ,OAAK,MAAM,CAAC,WAAW,UAAU,OAAO,QAAQ,MAAM,OAAO,EAAE;GAC7D,MAAM,OAAO,QAAQ,OAAO,UAAU;AACtC,SAAM,IAAI,UAAU,WAAW,OAAO,QAAQ;AAC5C,UAAM,MAAM,aAAa,QAAQ,IAAI,SAAS,GAAG;AACjD,QAAI,MAAM,WACR,OAAM,IACH,WACC,iBAAiB,MAAM,WAAW,OAAO,MAAM,WAAW,MAAM,CACjE,CACA,SAAS,MAAM,WAAW,YAAY,UAAU;AAGrD,QAAI,MAAM,OACR,OAAM,IAAI,QAAQ;AAEpB,QACE,MAAM,SAAS,UACf,OAAO,MAAM,iBAAiB,eAC7B,WAAW,cAAc,WAAW,WAAW,WAAW,SAE3D,KAAI,WAAW,QACb,OAAM,IAAI,UAAU,GAAG,uBAAuB;QAE9C,OAAM,IAAI,UAAU,GAAG,oBAAoB;AAG/C,WAAO;KACP;AAEF,OAAI,MAAM,OAAO;IACf,MAAM,UAAU,GAAG,OAChB,YACC,GAAG,MAAM,MAAM,GAAG,UAAU,GAAG,MAAM,SAAS,SAAS,QACxD,CACA,GAAG,MAAM,MAAM,CACf,QAAQ,CAAC,UAAU,CAAC;AACvB,gBAAY,KAAK,MAAM,SAAS,QAAQ,QAAQ,GAAG,QAAQ;;;AAG/D,aAAW,KAAK,IAAI;;AAKxB,KAAI,YAAY,OACd,MAAK,MAAM,SAAS,YAClB,YAAW,KAAK,MAAM;CAI1B,eAAe,gBAAgB;AAC7B,OAAK,MAAM,aAAa,WACtB,OAAM,UAAU,SAAS;;CAI7B,eAAe,oBAAoB;AAEjC,SADiB,WAAW,KAAK,MAAM,EAAE,SAAS,CAAC,IAAI,CACvC,KAAK,QAAQ,GAAG;;AAGlC,QAAO;EAAE;EAAa;EAAW;EAAe;EAAmB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pecunia-root",
3
- "version": "0.2.7",
3
+ "version": "0.2.8",
4
4
  "type": "module",
5
5
  "license": "MIT",
6
6
  "publishConfig": {
@@ -95,7 +95,7 @@
95
95
  "jose": "^6.1.0",
96
96
  "kysely": "^0.28.5",
97
97
  "nanostores": "^1.0.1",
98
- "pecunia-core": "^0.1.6",
98
+ "pecunia-core": "^0.1.8",
99
99
  "zod": "^4.1.12"
100
100
  },
101
101
  "devDependencies": {