pecunia-cli 0.1.8 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1268 @@
1
+ import fs, { existsSync } from "node:fs";
2
+ import fs$1 from "node:fs/promises";
3
+ import path from "node:path";
4
+ import { getMigrations, getPaymentTables } from "pecunia-root";
5
+ import { capitalizeFirstLetter, getPaymentTables as getPaymentTables$1, initGetFieldName, initGetModelName } from "pecunia-core";
6
+ import prettier from "prettier";
7
+ import { produceSchema } from "@mrleebo/prisma-ast";
8
+
9
+ //#region src/generators/invariants.ts
10
+ /**
11
+ * Normalize invariants from schema into IR format.
12
+ * This parses invariant descriptions and extracts structured logic.
13
+ */
14
+ function normalizeInvariants(schema, options) {
15
+ const invariants = [];
16
+ for (const [tableKey, table] of Object.entries(schema)) {
17
+ if (!table.invariants) continue;
18
+ const modelName = options.getModelName(tableKey);
19
+ const tableName = table.modelName;
20
+ for (const invariant of table.invariants) {
21
+ const logic = parseInvariantLogic(invariant.description, tableKey, table, schema, options);
22
+ invariants.push({
23
+ id: invariant.id,
24
+ description: invariant.description,
25
+ modelName,
26
+ tableName,
27
+ appliesTo: invariant.appliesTo,
28
+ enforcement: invariant.enforcement || {},
29
+ logic
30
+ });
31
+ }
32
+ }
33
+ return invariants;
34
+ }
35
+ /**
36
+ * Parse invariant description into structured logic.
37
+ * This is a heuristic parser that extracts common patterns.
38
+ */
39
+ function parseInvariantLogic(description, tableKey, table, schema, options) {
40
+ const desc = description.toLowerCase();
41
+ if (desc.includes("determines") && desc.includes("presence")) {
42
+ const modeMatch = description.match(/(\w+)\.(\w+)\s+determines/);
43
+ const subMatch = description.match(/(\w+)\s+must be (present|null\/absent)/);
44
+ const enumMatches = description.matchAll(/(\w+)\s*=>\s*(\w+)\s+must be (present|null\/absent)/g);
45
+ if (modeMatch && subMatch) {
46
+ const [, modelName, fieldName] = modeMatch;
47
+ const [, conditionalField] = subMatch;
48
+ const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
49
+ const conditional = Object.keys(table.fields).find((k) => k.toLowerCase() === conditionalField.toLowerCase());
50
+ if (field && conditional) {
51
+ const fieldAttr = table.fields[field];
52
+ let allowedValues = [];
53
+ if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) allowedValues = fieldAttr.type;
54
+ else {
55
+ const enumValues = /* @__PURE__ */ new Set();
56
+ for (const match of enumMatches) {
57
+ const val = match[1]?.trim();
58
+ if (val) enumValues.add(val);
59
+ }
60
+ if (enumValues.size > 0) allowedValues = Array.from(enumValues);
61
+ else allowedValues = ["PAYMENT", "SUBSCRIPTION"].filter((e) => desc.includes(e.toLowerCase()));
62
+ }
63
+ let whenPresent = false;
64
+ for (const match of enumMatches) {
65
+ const enumVal = match[1];
66
+ const requirement = match[3];
67
+ if (enumVal && requirement === "present") {
68
+ whenPresent = true;
69
+ break;
70
+ }
71
+ }
72
+ if (!whenPresent && desc.includes("subscription") && desc.includes("must be present")) whenPresent = true;
73
+ return {
74
+ type: "field_enum_constraint",
75
+ field,
76
+ fieldName: options.getFieldName({
77
+ model: tableKey,
78
+ field
79
+ }),
80
+ allowedValues: allowedValues.length > 0 ? allowedValues : ["PAYMENT", "SUBSCRIPTION"],
81
+ conditionalField: {
82
+ field: conditional,
83
+ fieldName: options.getFieldName({
84
+ model: tableKey,
85
+ field: conditional
86
+ }),
87
+ whenPresent
88
+ }
89
+ };
90
+ }
91
+ }
92
+ }
93
+ if (desc.includes("must belong to same") || desc.includes("must equal") && desc.includes("when")) {
94
+ const fieldMatch = description.match(/(\w+)\.(\w+)\s+must/);
95
+ const refMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
96
+ description.match(/(\w+)\.(\w+)\s+must equal/);
97
+ if (fieldMatch && refMatch) {
98
+ const [, modelName, fieldName] = fieldMatch;
99
+ const [, refTable, refField, ownerTable, ownerField] = refMatch;
100
+ const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
101
+ if (field && table.fields[field]?.references) {
102
+ table.fields[field].references;
103
+ const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
104
+ const ownerTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === ownerTable || k === ownerTable);
105
+ if (refTableKey && ownerTableKey) {
106
+ const ownershipField = Object.keys(table.fields).find((k) => k.toLowerCase() === ownerField.toLowerCase());
107
+ if (ownershipField) return {
108
+ type: "cross_table_ownership",
109
+ field,
110
+ fieldName: options.getFieldName({
111
+ model: tableKey,
112
+ field
113
+ }),
114
+ referencedTable: options.getModelName(refTableKey),
115
+ referencedField: options.getFieldName({
116
+ model: refTableKey,
117
+ field: refField
118
+ }),
119
+ ownershipField,
120
+ ownershipFieldName: options.getFieldName({
121
+ model: tableKey,
122
+ field: ownershipField
123
+ })
124
+ };
125
+ }
126
+ }
127
+ }
128
+ }
129
+ if (desc.includes("must match") && desc.includes("of its")) {
130
+ const fieldMatch = description.match(/(\w+)\.(\w+)\s+must match/);
131
+ const refMatch = description.match(/of its\s+(\w+)/);
132
+ const equalityMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
133
+ if (fieldMatch && refMatch && equalityMatch) {
134
+ const [, , equalityFieldName] = fieldMatch;
135
+ const [, refFieldName] = refMatch;
136
+ const [, refTable, refEqualityField, , sourceEqualityField] = equalityMatch;
137
+ const equalityField = Object.keys(table.fields).find((k) => k.toLowerCase() === equalityFieldName.toLowerCase());
138
+ const refField = Object.keys(table.fields).find((k) => k.toLowerCase() === refFieldName.toLowerCase());
139
+ if (equalityField && refField && table.fields[refField]?.references) {
140
+ const ref = table.fields[refField].references;
141
+ const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
142
+ if (refTableKey) {
143
+ const refEquality = Object.keys(schema[refTableKey].fields).find((k) => k.toLowerCase() === refEqualityField.toLowerCase());
144
+ if (refEquality) return {
145
+ type: "cross_table_equality",
146
+ field: refField,
147
+ fieldName: options.getFieldName({
148
+ model: tableKey,
149
+ field: refField
150
+ }),
151
+ referencedTable: options.getModelName(refTableKey),
152
+ referencedField: options.getFieldName({
153
+ model: refTableKey,
154
+ field: ref.field
155
+ }),
156
+ equalityField,
157
+ equalityFieldName: options.getFieldName({
158
+ model: tableKey,
159
+ field: equalityField
160
+ }),
161
+ referencedEqualityField: refEquality,
162
+ referencedEqualityFieldName: options.getFieldName({
163
+ model: refTableKey,
164
+ field: refEquality
165
+ })
166
+ };
167
+ }
168
+ }
169
+ }
170
+ }
171
+ return {
172
+ type: "raw",
173
+ description
174
+ };
175
+ }
176
+
177
+ //#endregion
178
+ //#region src/generators/invariants-sql.ts
179
+ /**
180
+ * Generate SQL for enforcing invariants in Postgres (and compatible databases).
181
+ * Returns SQL statements for CHECK constraints and triggers.
182
+ */
183
+ function emitPostgresInvariantSql(invariants, schemaName = "public") {
184
+ const statements = [];
185
+ const checkInvariants = invariants.filter((inv) => inv.enforcement.postgres === "check" && inv.appliesTo.includes("postgres"));
186
+ const triggerInvariants = invariants.filter((inv) => inv.enforcement.postgres === "trigger" && inv.appliesTo.includes("postgres"));
187
+ for (const inv of checkInvariants) {
188
+ const sql = generateCheckConstraint(inv, schemaName);
189
+ if (sql) statements.push(sql);
190
+ }
191
+ for (const inv of triggerInvariants) {
192
+ const sql = generateTrigger(inv, schemaName);
193
+ if (sql) statements.push(...sql);
194
+ }
195
+ if (statements.length === 0) return "-- No invariant enforcement SQL generated\n";
196
+ return `-- Invariant enforcement SQL
197
+ -- Generated from schema invariants
198
+ -- DO NOT EDIT MANUALLY - This file is auto-generated
199
+
200
+ ${statements.join("\n\n")}
201
+ `;
202
+ }
203
+ /**
204
+ * Generate CHECK constraint SQL for an invariant.
205
+ */
206
+ function generateCheckConstraint(inv, schemaName) {
207
+ const { logic, tableName, id } = inv;
208
+ switch (logic.type) {
209
+ case "field_enum_constraint": {
210
+ const { fieldName, allowedValues, conditionalField } = logic;
211
+ if (conditionalField) {
212
+ const constraintName$1 = `${tableName}_${id}_check`;
213
+ const enumCheck$1 = allowedValues.map((val) => `'${val}'`).join(", ");
214
+ const subscriptionValue = allowedValues.find((v) => v === "SUBSCRIPTION") || allowedValues.find((v) => v !== "PAYMENT") || allowedValues[0];
215
+ const paymentValue = allowedValues.find((v) => v === "PAYMENT") || allowedValues[0];
216
+ const conditionalCheck = `(
217
+ (${fieldName} = '${subscriptionValue}') = (${conditionalField.fieldName} IS NOT NULL) AND
218
+ (${fieldName} = '${paymentValue}') = (${conditionalField.fieldName} IS NULL)
219
+ )`;
220
+ return `-- ${inv.description}
221
+ ALTER TABLE ${schemaName}.${tableName}
222
+ ADD CONSTRAINT ${constraintName$1}
223
+ CHECK (
224
+ ${fieldName} IN (${enumCheck$1}) AND
225
+ ${conditionalCheck}
226
+ );`;
227
+ }
228
+ const constraintName = `${tableName}_${id}_check`;
229
+ const enumCheck = allowedValues.map((val) => `'${val}'`).join(", ");
230
+ return `-- ${inv.description}
231
+ ALTER TABLE ${schemaName}.${tableName}
232
+ ADD CONSTRAINT ${constraintName}
233
+ CHECK (${fieldName} IN (${enumCheck}));`;
234
+ }
235
+ default: return null;
236
+ }
237
+ }
238
+ /**
239
+ * Generate trigger SQL for an invariant.
240
+ * Returns array of SQL statements (function + trigger).
241
+ */
242
+ function generateTrigger(inv, schemaName) {
243
+ const { logic, tableName, id } = inv;
244
+ switch (logic.type) {
245
+ case "cross_table_ownership": {
246
+ const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
247
+ const functionName = `${tableName}_${id}_fn`;
248
+ const triggerName = `${tableName}_${id}_trigger`;
249
+ return [`-- Function to enforce: ${inv.description}
250
+ CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
251
+ RETURNS TRIGGER AS $$
252
+ BEGIN
253
+ IF NEW.${fieldName} IS NOT NULL THEN
254
+ IF NOT EXISTS (
255
+ SELECT 1
256
+ FROM ${schemaName}.${referencedTable}
257
+ WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
258
+ AND ${referencedTable}.${ownershipFieldName} = NEW.${ownershipFieldName}
259
+ ) THEN
260
+ RAISE EXCEPTION 'Invariant violation: % must belong to the same % as the record', '${fieldName}', '${ownershipFieldName}';
261
+ END IF;
262
+ END IF;
263
+ RETURN NEW;
264
+ END;
265
+ $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
266
+ DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
267
+ CREATE TRIGGER ${triggerName}
268
+ BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
269
+ FOR EACH ROW
270
+ EXECUTE FUNCTION ${schemaName}.${functionName}();`];
271
+ }
272
+ case "cross_table_equality": {
273
+ const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
274
+ const functionName = `${tableName}_${id}_fn`;
275
+ const triggerName = `${tableName}_${id}_trigger`;
276
+ return [`-- Function to enforce: ${inv.description}
277
+ CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
278
+ RETURNS TRIGGER AS $$
279
+ DECLARE
280
+ ref_${referencedEqualityFieldName} TEXT;
281
+ BEGIN
282
+ IF NEW.${fieldName} IS NOT NULL THEN
283
+ SELECT ${referencedEqualityFieldName} INTO ref_${referencedEqualityFieldName}
284
+ FROM ${schemaName}.${referencedTable}
285
+ WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
286
+ LIMIT 1;
287
+
288
+ IF ref_${referencedEqualityFieldName} IS NULL THEN
289
+ RAISE EXCEPTION 'Invariant violation: Referenced record not found in ${referencedTable}';
290
+ END IF;
291
+
292
+ IF NEW.${equalityFieldName} != ref_${referencedEqualityFieldName} THEN
293
+ RAISE EXCEPTION 'Invariant violation: % must equal %.%', '${equalityFieldName}', '${referencedTable}', '${referencedEqualityFieldName}';
294
+ END IF;
295
+ END IF;
296
+ RETURN NEW;
297
+ END;
298
+ $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
299
+ DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
300
+ CREATE TRIGGER ${triggerName}
301
+ BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
302
+ FOR EACH ROW
303
+ EXECUTE FUNCTION ${schemaName}.${functionName}();`];
304
+ }
305
+ default: return null;
306
+ }
307
+ }
308
+
309
+ //#endregion
310
+ //#region src/generators/drizzle.ts
311
+ function convertToSnakeCase(str, camelCase) {
312
+ if (camelCase) return str;
313
+ return str.replace(/([A-Z]+)([A-Z][a-z])/g, "$1_$2").replace(/([a-z\d])([A-Z])/g, "$1_$2").toLowerCase();
314
+ }
315
+ const generateDrizzleSchema = async ({ options, file, adapter }) => {
316
+ const tables = getPaymentTables$1(options);
317
+ const filePath = file || "./payment-schema.ts";
318
+ const databaseType = adapter.options?.provider;
319
+ if (!databaseType) throw new Error("Database provider type is undefined during Drizzle schema generation. Please define a `provider` in the Drizzle adapter config.");
320
+ const fileExist = existsSync(filePath);
321
+ let code = generateImport({
322
+ databaseType,
323
+ tables,
324
+ options
325
+ });
326
+ const getModelName = initGetModelName({
327
+ schema: tables,
328
+ usePlural: adapter.options?.adapterConfig?.usePlural
329
+ });
330
+ const getFieldName = initGetFieldName({
331
+ schema: tables,
332
+ usePlural: adapter.options?.adapterConfig?.usePlural
333
+ });
334
+ const tableNameMap = /* @__PURE__ */ new Map();
335
+ for (const tableKey in tables) tableNameMap.set(tableKey, getModelName(tableKey));
336
+ function getType(name, field, databaseType$1) {
337
+ name = convertToSnakeCase(name, adapter.options?.camelCase);
338
+ if (field.references?.field === "id") {
339
+ if (databaseType$1 === "mysql") return `varchar('${name}', { length: 36 })`;
340
+ return `text('${name}')`;
341
+ }
342
+ const type = field.type;
343
+ if (typeof type !== "string") {
344
+ if (Array.isArray(type) && type.every((x) => typeof x === "string")) return {
345
+ sqlite: `text({ enum: [${type.map((x) => `'${x}'`).join(", ")}] })`,
346
+ pg: `text('${name}', { enum: [${type.map((x) => `'${x}'`).join(", ")}] })`,
347
+ mysql: `mysqlEnum([${type.map((x) => `'${x}'`).join(", ")}])`
348
+ }[databaseType$1];
349
+ throw new TypeError(`Invalid field type for field ${name}`);
350
+ }
351
+ const dbTypeMap = {
352
+ string: {
353
+ sqlite: `text('${name}')`,
354
+ pg: `text('${name}')`,
355
+ mysql: field.unique ? `varchar('${name}', { length: 255 })` : field.references ? `varchar('${name}', { length: 36 })` : field.sortable ? `varchar('${name}', { length: 255 })` : field.index ? `varchar('${name}', { length: 255 })` : `text('${name}')`
356
+ },
357
+ boolean: {
358
+ sqlite: `integer('${name}', { mode: 'boolean' })`,
359
+ pg: `boolean('${name}')`,
360
+ mysql: `boolean('${name}')`
361
+ },
362
+ number: {
363
+ sqlite: `integer('${name}')`,
364
+ pg: field.bigint ? `bigint('${name}', { mode: 'number' })` : `integer('${name}')`,
365
+ mysql: field.bigint ? `bigint('${name}', { mode: 'number' })` : `int('${name}')`
366
+ },
367
+ date: {
368
+ sqlite: `integer('${name}', { mode: 'timestamp_ms' })`,
369
+ pg: `timestamp('${name}')`,
370
+ mysql: `timestamp('${name}', { fsp: 3 })`
371
+ },
372
+ "number[]": {
373
+ sqlite: `text('${name}', { mode: "json" })`,
374
+ pg: field.bigint ? `bigint('${name}', { mode: 'number' }).array()` : `integer('${name}').array()`,
375
+ mysql: `text('${name}', { mode: 'json' })`
376
+ },
377
+ "string[]": {
378
+ sqlite: `text('${name}', { mode: "json" })`,
379
+ pg: `text('${name}').array()`,
380
+ mysql: `text('${name}', { mode: "json" })`
381
+ },
382
+ json: {
383
+ sqlite: `text('${name}', { mode: "json" })`,
384
+ pg: `jsonb('${name}')`,
385
+ mysql: `json('${name}', { mode: "json" })`
386
+ },
387
+ uuid: {
388
+ sqlite: `text('${name}')`,
389
+ pg: `uuid('${name}')`,
390
+ mysql: `varchar('${name}', { length: 36 })`
391
+ }
392
+ }[type];
393
+ if (!dbTypeMap) throw new Error(`Unsupported field type '${field.type}' for field '${name}'.`);
394
+ return dbTypeMap[databaseType$1];
395
+ }
396
+ const tableDefinitions = [];
397
+ for (const tableKey in tables) {
398
+ const table = tables[tableKey];
399
+ const modelName = getModelName(tableKey);
400
+ const fields = table.fields;
401
+ const idFieldType = table.fields.id?.type;
402
+ let id;
403
+ if (databaseType === "pg" && idFieldType === "uuid") id = `uuid('id').primaryKey()`;
404
+ else if (databaseType === "mysql") id = `varchar('id', { length: 36 }).primaryKey()`;
405
+ else id = `text('id').primaryKey()`;
406
+ const indexes = [];
407
+ const references = [];
408
+ for (const field of Object.keys(fields)) {
409
+ if (field === "id") continue;
410
+ const attr = fields[field];
411
+ const fieldName = attr.fieldName || field;
412
+ if (attr.index && !attr.unique) indexes.push({
413
+ type: "index",
414
+ name: `${modelName}_${fieldName}_idx`,
415
+ on: fieldName
416
+ });
417
+ else if (attr.index && attr.unique) indexes.push({
418
+ type: "uniqueIndex",
419
+ name: `${modelName}_${fieldName}_uidx`,
420
+ on: fieldName
421
+ });
422
+ if (attr.references) {
423
+ const referencedModelName = tableNameMap.get(attr.references.model) || getModelName(attr.references.model);
424
+ references.push({
425
+ fieldName,
426
+ referencedTable: referencedModelName,
427
+ referencedField: getFieldName({
428
+ model: attr.references.model,
429
+ field: attr.references.field
430
+ }),
431
+ onDelete: attr.references.onDelete || "cascade",
432
+ required: attr.required || false,
433
+ originalModel: attr.references.model
434
+ });
435
+ }
436
+ }
437
+ tableDefinitions.push({
438
+ modelName,
439
+ tableKey,
440
+ fields,
441
+ id,
442
+ indexes,
443
+ references
444
+ });
445
+ }
446
+ const modelKeyToTableKey = /* @__PURE__ */ new Map();
447
+ for (const tableKey in tables) {
448
+ const table = tables[tableKey];
449
+ const modelName = getModelName(tableKey);
450
+ modelKeyToTableKey.set(tableKey, tableKey);
451
+ modelKeyToTableKey.set(modelName, tableKey);
452
+ modelKeyToTableKey.set(table.modelName, tableKey);
453
+ }
454
+ const referenceGraph = /* @__PURE__ */ new Map();
455
+ for (const tableDef of tableDefinitions) for (const ref of tableDef.references) {
456
+ const referencedTableKey = modelKeyToTableKey.get(ref.originalModel);
457
+ if (!referencedTableKey) continue;
458
+ const key = `${tableDef.tableKey}->${referencedTableKey}`;
459
+ referenceGraph.set(key, {
460
+ ...ref,
461
+ sourceTable: tableDef.tableKey,
462
+ sourceModelName: tableDef.modelName
463
+ });
464
+ }
465
+ const skipReferences = /* @__PURE__ */ new Set();
466
+ for (const tableDef of tableDefinitions) for (const ref of tableDef.references) {
467
+ const referencedTableKey = modelKeyToTableKey.get(ref.originalModel);
468
+ if (!referencedTableKey) continue;
469
+ const reverseKey = `${referencedTableKey}->${tableDef.tableKey}`;
470
+ const reverseRef = referenceGraph.get(reverseKey);
471
+ if (reverseRef) {
472
+ if (!ref.required && ref.onDelete === "set null" && (reverseRef.required || reverseRef.onDelete !== "set null")) skipReferences.add(`${tableDef.tableKey}.${ref.fieldName}`);
473
+ }
474
+ }
475
+ for (const tableDef of tableDefinitions) {
476
+ const { modelName, fields, id, indexes, references } = tableDef;
477
+ const assignIndexes = (indexesToAssign) => {
478
+ if (!indexesToAssign.length) return "";
479
+ const parts = [`, (table) => [`];
480
+ for (const index of indexesToAssign) parts.push(` ${index.type}("${index.name}").on(table.${index.on}),`);
481
+ parts.push(`]`);
482
+ return parts.join("\n");
483
+ };
484
+ const referenceMap = /* @__PURE__ */ new Map();
485
+ for (const ref of references) referenceMap.set(ref.fieldName, ref);
486
+ const fieldDefinitions = Object.keys(fields).filter((field) => field !== "id").map((field) => {
487
+ const attr = fields[field];
488
+ const fieldName = attr.fieldName || field;
489
+ let type = getType(fieldName, attr, databaseType);
490
+ let comment = "";
491
+ if (attr.defaultValue !== null && typeof attr.defaultValue !== "undefined") if (typeof attr.defaultValue === "function") {
492
+ if (attr.type === "date" && attr.defaultValue.toString().includes("new Date()")) if (databaseType === "sqlite") type += `.default(sql\`(cast(unixepoch('subsecond') * 1000 as integer))\`)`;
493
+ else type += `.defaultNow()`;
494
+ } else if (typeof attr.defaultValue === "string") type += `.default("${attr.defaultValue}")`;
495
+ else type += `.default(${attr.defaultValue})`;
496
+ if (attr.onUpdate && attr.type === "date") {
497
+ if (typeof attr.onUpdate === "function") type += `.$onUpdate(${attr.onUpdate})`;
498
+ }
499
+ const ref = referenceMap.get(fieldName);
500
+ const shouldSkipReference = skipReferences.has(`${tableDef.tableKey}.${fieldName}`);
501
+ let referenceChain = "";
502
+ if (ref && !shouldSkipReference) referenceChain = `.references(() => ${ref.referencedTable}.${ref.referencedField}, { onDelete: '${ref.onDelete}' })`;
503
+ else if (ref && shouldSkipReference) {
504
+ const reverseKey = `${ref.originalModel}->${tableDef.tableKey}`;
505
+ const reverseRef = referenceGraph.get(reverseKey);
506
+ if (reverseRef) comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // Primary FK: ${reverseRef.sourceModelName}.${reverseRef.fieldName} -> ${modelName}.${fieldName}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
507
+ else comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
508
+ }
509
+ const fieldDef = `${fieldName}: ${type}${attr.required ? ".notNull()" : ""}${attr.unique ? ".unique()" : ""}${referenceChain}`;
510
+ return comment ? `${comment}\n ${fieldDef}` : fieldDef;
511
+ });
512
+ const schema = `export const ${modelName} = ${databaseType}Table("${convertToSnakeCase(modelName, adapter.options?.camelCase)}", {
513
+ id: ${id},
514
+ ${fieldDefinitions.join(",\n ")}
515
+ }${assignIndexes(indexes)});`;
516
+ code += `\n${schema}\n`;
517
+ }
518
+ let relationsString = "";
519
+ for (const tableKey in tables) {
520
+ const table = tables[tableKey];
521
+ const modelName = getModelName(tableKey);
522
+ const oneRelations = [];
523
+ const manyRelations = [];
524
+ if (table.relations) for (const [relationName, relationDef] of Object.entries(table.relations)) {
525
+ const referencedModelName = getModelName(relationDef.model);
526
+ const foreignKeyField = table.fields[relationDef.foreignKey];
527
+ if (relationDef.kind === "one") {
528
+ if (foreignKeyField?.references) {
529
+ const fieldRef = `${modelName}.${getFieldName({
530
+ model: tableKey,
531
+ field: relationDef.foreignKey
532
+ })}`;
533
+ const referenceRef = `${referencedModelName}.${getFieldName({
534
+ model: relationDef.model,
535
+ field: foreignKeyField.references.field || "id"
536
+ })}`;
537
+ oneRelations.push({
538
+ key: relationName,
539
+ model: referencedModelName,
540
+ type: "one",
541
+ reference: {
542
+ field: fieldRef,
543
+ references: referenceRef,
544
+ fieldName: relationDef.foreignKey
545
+ }
546
+ });
547
+ }
548
+ } else if (relationDef.kind === "many") {
549
+ const referencedTable = tables[relationDef.model];
550
+ if (referencedTable) {
551
+ const fkField = Object.entries(referencedTable.fields).find(([_, field]) => field.references && (field.references.model === tableKey || field.references.model === getModelName(tableKey)));
552
+ if (fkField) {
553
+ const [fkFieldName] = fkField;
554
+ const fieldRef = `${referencedModelName}.${getFieldName({
555
+ model: relationDef.model,
556
+ field: fkFieldName
557
+ })}`;
558
+ const referenceRef = `${modelName}.${getFieldName({
559
+ model: tableKey,
560
+ field: "id"
561
+ })}`;
562
+ manyRelations.push({
563
+ key: relationName,
564
+ model: referencedModelName,
565
+ type: "many",
566
+ reference: {
567
+ field: fieldRef,
568
+ references: referenceRef,
569
+ fieldName: fkFieldName
570
+ }
571
+ });
572
+ } else manyRelations.push({
573
+ key: relationName,
574
+ model: referencedModelName,
575
+ type: "many"
576
+ });
577
+ }
578
+ }
579
+ }
580
+ const relationsByModel = /* @__PURE__ */ new Map();
581
+ for (const relation of oneRelations) {
582
+ if (!relation.reference) continue;
583
+ const modelKey = relation.key;
584
+ if (!relationsByModel.has(modelKey)) relationsByModel.set(modelKey, []);
585
+ relationsByModel.get(modelKey).push(relation);
586
+ }
587
+ const duplicateRelations = [];
588
+ const singleRelations = [];
589
+ for (const [_modelKey, rels] of relationsByModel.entries()) if (rels.length > 1) duplicateRelations.push(...rels);
590
+ else singleRelations.push(rels[0]);
591
+ for (const relation of duplicateRelations) {
592
+ if (!relation.reference) continue;
593
+ const fieldName = relation.reference.fieldName;
594
+ const tableRelation = `export const ${`${modelName}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Relations`} = relations(${modelName}, ({ one }) => ({
595
+ ${relation.key}: one(${relation.model}, {
596
+ fields: [${relation.reference.field}],
597
+ references: [${relation.reference.references}],
598
+ })
599
+ }))`;
600
+ relationsString += `\n${tableRelation}\n`;
601
+ }
602
+ const hasOne = singleRelations.length > 0;
603
+ const hasMany = manyRelations.length > 0;
604
+ if (hasOne && hasMany) {
605
+ const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ one, many }) => ({
606
+ ${singleRelations.map((relation) => relation.reference ? ` ${relation.key}: one(${relation.model}, {
607
+ fields: [${relation.reference.field}],
608
+ references: [${relation.reference.references}],
609
+ })` : "").filter((x) => x !== "").join(",\n ")}${singleRelations.length > 0 && manyRelations.length > 0 ? "," : ""}
610
+ ${manyRelations.map(({ key, model }) => ` ${key}: many(${model})`).join(",\n ")}
611
+ }))`;
612
+ relationsString += `\n${tableRelation}\n`;
613
+ } else if (hasOne) {
614
+ const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ one }) => ({
615
+ ${singleRelations.map((relation) => relation.reference ? ` ${relation.key}: one(${relation.model}, {
616
+ fields: [${relation.reference.field}],
617
+ references: [${relation.reference.references}],
618
+ })` : "").filter((x) => x !== "").join(",\n ")}
619
+ }))`;
620
+ relationsString += `\n${tableRelation}\n`;
621
+ } else if (hasMany) {
622
+ const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ many }) => ({
623
+ ${manyRelations.map(({ key, model }) => ` ${key}: many(${model})`).join(",\n ")}
624
+ }))`;
625
+ relationsString += `\n${tableRelation}\n`;
626
+ }
627
+ }
628
+ code += `\n${relationsString}`;
629
+ const typeHints = generateInvariantTypeHints(tables, getModelName, getFieldName);
630
+ if (typeHints) code += `\n\n${typeHints}`;
631
+ const formattedCode = await prettier.format(code, { parser: "typescript" });
632
+ if (databaseType === "pg") {
633
+ const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
634
+ getModelName,
635
+ getFieldName
636
+ }), "public");
637
+ const sqlFilePath = filePath.replace(/\.ts$/, "-invariants.sql");
638
+ const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
639
+ await fs$1.mkdir(sqlDir, { recursive: true });
640
+ await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
641
+ console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
642
+ }
643
+ return {
644
+ code: formattedCode,
645
+ fileName: filePath,
646
+ overwrite: fileExist
647
+ };
648
+ };
649
+ /**
650
+ * Generate TypeScript type hints for invariants (e.g., enum unions).
651
+ */
652
+ function generateInvariantTypeHints(tables, getModelName, getFieldName) {
653
+ const hints = [];
654
+ for (const [tableKey, table] of Object.entries(tables)) {
655
+ if (!table.invariants) continue;
656
+ for (const invariant of table.invariants) {
657
+ const desc = invariant.description.toLowerCase();
658
+ if (desc.includes("determines") && desc.includes("presence")) {
659
+ const modeMatch = invariant.description.match(/(\w+)\.(\w+)\s+determines/);
660
+ if (modeMatch) {
661
+ const [, , fieldName] = modeMatch;
662
+ const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
663
+ if (field) {
664
+ const fieldAttr = table.fields[field];
665
+ if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) {
666
+ const enumName = `${getModelName(tableKey)}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Mode`;
667
+ const enumValues = fieldAttr.type.map((v) => `"${v}"`).join(" | ");
668
+ hints.push(`// Type hint for invariant: ${invariant.id}`);
669
+ hints.push(`export type ${enumName} = ${enumValues};`);
670
+ }
671
+ }
672
+ }
673
+ }
674
+ }
675
+ }
676
+ return hints.length > 0 ? `\n// Invariant type hints\n${hints.join("\n")}` : "";
677
+ }
678
+ function generateImport({ databaseType, tables }) {
679
+ const rootImports = ["relations"];
680
+ const coreImports = [];
681
+ let hasBigint = false;
682
+ let hasJson = false;
683
+ let hasUuid = false;
684
+ for (const table of Object.values(tables)) {
685
+ for (const field of Object.values(table.fields)) {
686
+ if (field.bigint) hasBigint = true;
687
+ if (field.type === "json") hasJson = true;
688
+ if (field.type === "uuid") hasUuid = true;
689
+ }
690
+ if (hasJson && hasBigint && hasUuid) break;
691
+ }
692
+ coreImports.push(`${databaseType}Table`);
693
+ coreImports.push(databaseType === "mysql" ? "varchar, text" : databaseType === "pg" ? "text" : "text");
694
+ coreImports.push(hasBigint ? databaseType !== "sqlite" ? "bigint" : "" : "");
695
+ coreImports.push(databaseType !== "sqlite" ? "timestamp, boolean" : "");
696
+ if (databaseType === "mysql") {
697
+ if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => (field.type === "number" || field.type === "number[]") && !field.bigint))) coreImports.push("int");
698
+ if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => typeof field.type !== "string" && Array.isArray(field.type) && field.type.every((x) => typeof x === "string")))) coreImports.push("mysqlEnum");
699
+ } else if (databaseType === "pg") {
700
+ if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => (field.type === "number" || field.type === "number[]") && !field.bigint))) coreImports.push("integer");
701
+ if (hasUuid) coreImports.push("uuid");
702
+ } else coreImports.push("integer");
703
+ if (hasJson) {
704
+ if (databaseType === "pg") coreImports.push("jsonb");
705
+ if (databaseType === "mysql") coreImports.push("json");
706
+ }
707
+ if (databaseType === "sqlite" && Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.type === "date" && field.defaultValue && typeof field.defaultValue === "function" && field.defaultValue.toString().includes("new Date()")))) rootImports.push("sql");
708
+ const hasIndexes = Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.index && !field.unique));
709
+ const hasUniqueIndexes = Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.unique && field.index));
710
+ if (hasIndexes) coreImports.push("index");
711
+ if (hasUniqueIndexes) coreImports.push("uniqueIndex");
712
+ return `${rootImports.length > 0 ? `import { ${rootImports.join(", ")} } from "drizzle-orm";\n` : ""}import { ${coreImports.map((x) => x.trim()).filter((x) => x !== "").join(", ")} } from "drizzle-orm/${databaseType}-core";\n`;
713
+ }
714
+
715
+ //#endregion
716
+ //#region src/generators/kysely.ts
717
+ const generateKyselySchema = async ({ options, file, adapter }) => {
718
+ const { compileMigrations } = await getMigrations(options);
719
+ const migrations = await compileMigrations();
720
+ const migrationFile = file || `./better-auth_migrations/${(/* @__PURE__ */ new Date()).toISOString().replace(/:/g, "-")}.sql`;
721
+ if ((adapter?.options?.type || "postgres") === "postgres") {
722
+ const tables = getPaymentTables(options);
723
+ const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
724
+ getModelName: initGetModelName({
725
+ schema: tables,
726
+ usePlural: adapter?.options?.adapterConfig?.usePlural
727
+ }),
728
+ getFieldName: initGetFieldName({
729
+ schema: tables,
730
+ usePlural: false
731
+ })
732
+ }), "public");
733
+ const sqlFilePath = path.join(path.dirname(migrationFile), "invariants.sql");
734
+ const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
735
+ await fs$1.mkdir(sqlDir, { recursive: true });
736
+ await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
737
+ console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
738
+ }
739
+ return {
740
+ code: migrations.trim() === ";" ? "" : migrations,
741
+ fileName: migrationFile
742
+ };
743
+ };
744
+
745
+ //#endregion
746
+ //#region src/utils/get-package-info.ts
747
+ function getPackageInfo(cwd) {
748
+ const packageJsonPath = cwd ? path.join(cwd, "package.json") : path.join("package.json");
749
+ return JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
750
+ }
751
+ function getPrismaVersion(cwd) {
752
+ try {
753
+ const packageInfo = getPackageInfo(cwd);
754
+ const prismaVersion = packageInfo.dependencies?.prisma || packageInfo.devDependencies?.prisma || packageInfo.dependencies?.["@prisma/client"] || packageInfo.devDependencies?.["@prisma/client"];
755
+ if (!prismaVersion) return null;
756
+ const match = prismaVersion.match(/(\d+)/);
757
+ return match ? parseInt(match[1], 10) : null;
758
+ } catch {
759
+ return null;
760
+ }
761
+ }
762
+
763
+ //#endregion
764
+ //#region src/generators/prisma.ts
765
+ const generatePrismaSchema = async ({ adapter, options, file }) => {
766
+ const provider = adapter.options?.provider || "postgresql";
767
+ const tables = getPaymentTables$1(options);
768
+ const filePath = file || "./prisma/schema.prisma";
769
+ const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
770
+ const getModelName = initGetModelName({
771
+ schema: getPaymentTables$1(options),
772
+ usePlural: adapter.options?.adapterConfig?.usePlural
773
+ });
774
+ const getFieldName = initGetFieldName({
775
+ schema: getPaymentTables$1(options),
776
+ usePlural: false
777
+ });
778
+ let schemaPrisma = "";
779
+ if (schemaPrismaExist) schemaPrisma = await fs$1.readFile(path.join(process.cwd(), filePath), "utf-8");
780
+ else schemaPrisma = getNewPrisma(provider, process.cwd());
781
+ const prismaVersion = getPrismaVersion(process.cwd());
782
+ if (prismaVersion && prismaVersion >= 7 && schemaPrismaExist) schemaPrisma = produceSchema(schemaPrisma, (builder) => {
783
+ const generator = builder.findByType("generator", { name: "client" });
784
+ if (generator && generator.properties) {
785
+ const providerProp = generator.properties.find((prop) => prop.type === "assignment" && prop.key === "provider");
786
+ if (providerProp && providerProp.value === "\"prisma-client-js\"") providerProp.value = "\"prisma-client\"";
787
+ }
788
+ });
789
+ const manyToManyRelations = /* @__PURE__ */ new Map();
790
+ for (const table in tables) {
791
+ const fields = tables[table]?.fields;
792
+ for (const field in fields) {
793
+ const attr = fields[field];
794
+ if (attr.references) {
795
+ const referencedOriginalModel = attr.references.model;
796
+ const referencedModelNameCap = capitalizeFirstLetter(getModelName(tables[referencedOriginalModel]?.modelName || referencedOriginalModel));
797
+ if (!manyToManyRelations.has(referencedModelNameCap)) manyToManyRelations.set(referencedModelNameCap, /* @__PURE__ */ new Set());
798
+ const currentModelNameCap = capitalizeFirstLetter(getModelName(tables[table]?.modelName || table));
799
+ manyToManyRelations.get(referencedModelNameCap).add(currentModelNameCap);
800
+ }
801
+ }
802
+ }
803
+ const indexedFields = /* @__PURE__ */ new Map();
804
+ for (const table in tables) {
805
+ const fields = tables[table]?.fields;
806
+ const modelName = capitalizeFirstLetter(getModelName(tables[table]?.modelName || table));
807
+ indexedFields.set(modelName, []);
808
+ for (const field in fields) {
809
+ const attr = fields[field];
810
+ if (attr.index && !attr.unique) {
811
+ const fieldName = attr.fieldName || field;
812
+ indexedFields.get(modelName).push(fieldName);
813
+ }
814
+ }
815
+ }
816
+ const schema = produceSchema(schemaPrisma, (builder) => {
817
+ for (const table in tables) {
818
+ const originalTableName = table;
819
+ const customModelName = tables[table]?.modelName || table;
820
+ const modelName = capitalizeFirstLetter(getModelName(customModelName));
821
+ const fields = tables[table]?.fields;
822
+ function getType({ isBigint, isOptional, type }) {
823
+ if (type === "string") return isOptional ? "String?" : "String";
824
+ if (type === "number" && isBigint) return isOptional ? "BigInt?" : "BigInt";
825
+ if (type === "number") return isOptional ? "Int?" : "Int";
826
+ if (type === "boolean") return isOptional ? "Boolean?" : "Boolean";
827
+ if (type === "date") return isOptional ? "DateTime?" : "DateTime";
828
+ if (type === "json") {
829
+ if (provider === "sqlite" || provider === "mysql") return isOptional ? "String?" : "String";
830
+ return isOptional ? "Json?" : "Json";
831
+ }
832
+ if (type === "string[]") {
833
+ if (provider === "sqlite" || provider === "mysql") return isOptional ? "String?" : "String";
834
+ return "String[]";
835
+ }
836
+ if (type === "number[]") {
837
+ if (provider === "sqlite" || provider === "mysql") return "String";
838
+ return "Int[]";
839
+ }
840
+ }
841
+ const prismaModel = builder.findByType("model", { name: modelName });
842
+ if (!prismaModel) builder.model(modelName).field("id", "String").attribute("id");
843
+ for (const field in fields) {
844
+ const attr = fields[field];
845
+ const fieldName = attr.fieldName || field;
846
+ if (prismaModel) {
847
+ if (builder.findByType("field", {
848
+ name: fieldName,
849
+ within: prismaModel.properties
850
+ })) continue;
851
+ }
852
+ const fieldBuilder = builder.model(modelName).field(fieldName, getType({
853
+ isBigint: attr?.bigint || false,
854
+ isOptional: !attr?.required,
855
+ type: "string"
856
+ }));
857
+ if (field === "id") fieldBuilder.attribute("id");
858
+ if (attr.unique) builder.model(modelName).blockAttribute(`unique([${fieldName}])`);
859
+ if (attr.defaultValue !== void 0) {
860
+ if (Array.isArray(attr.defaultValue)) {
861
+ if (attr.type === "json") {
862
+ if (Object.prototype.toString.call(attr.defaultValue[0]) === "[object Object]") {
863
+ fieldBuilder.attribute(`default("${JSON.stringify(attr.defaultValue).replace(/\\/g, "\\\\").replace(/"/g, "\\\"")}")`);
864
+ continue;
865
+ }
866
+ let jsonArray = [];
867
+ for (const value of attr.defaultValue) jsonArray.push(value);
868
+ fieldBuilder.attribute(`default("${JSON.stringify(jsonArray).replace(/"/g, "\\\"")}")`);
869
+ continue;
870
+ }
871
+ if (attr.defaultValue.length === 0) {
872
+ fieldBuilder.attribute(`default([])`);
873
+ continue;
874
+ } else if (typeof attr.defaultValue[0] === "string" && attr.type === "string[]") {
875
+ let valueArray = [];
876
+ for (const value of attr.defaultValue) valueArray.push(JSON.stringify(value));
877
+ fieldBuilder.attribute(`default([${valueArray}])`);
878
+ } else if (typeof attr.defaultValue[0] === "number") {
879
+ let valueArray = [];
880
+ for (const value of attr.defaultValue) valueArray.push(`${value}`);
881
+ fieldBuilder.attribute(`default([${valueArray}])`);
882
+ }
883
+ } else if (typeof attr.defaultValue === "object" && !Array.isArray(attr.defaultValue) && attr.defaultValue !== null) {
884
+ if (Object.entries(attr.defaultValue).length === 0) {
885
+ fieldBuilder.attribute(`default("{}")`);
886
+ continue;
887
+ }
888
+ fieldBuilder.attribute(`default("${JSON.stringify(attr.defaultValue).replace(/\\/g, "\\\\").replace(/"/g, "\\\"")}")`);
889
+ }
890
+ if (field === "createdAt") fieldBuilder.attribute("default(now())");
891
+ else if (typeof attr.defaultValue === "string" && provider !== "mysql") fieldBuilder.attribute(`default("${attr.defaultValue}")`);
892
+ else if (typeof attr.defaultValue === "boolean" || typeof attr.defaultValue === "number") fieldBuilder.attribute(`default(${attr.defaultValue})`);
893
+ else if (typeof attr.defaultValue === "function") {}
894
+ }
895
+ if (field === "updatedAt" && attr.onUpdate) fieldBuilder.attribute("updatedAt");
896
+ else if (attr.onUpdate) {}
897
+ if (attr.references) {
898
+ const referencedOriginalModelName = getModelName(attr.references.model);
899
+ const referencedCustomModelName = tables[referencedOriginalModelName]?.modelName || referencedOriginalModelName;
900
+ let action = "Cascade";
901
+ if (attr.references.onDelete === "no action") action = "NoAction";
902
+ else if (attr.references.onDelete === "set null") action = "SetNull";
903
+ else if (attr.references.onDelete === "set default") action = "SetDefault";
904
+ else if (attr.references.onDelete === "restrict") action = "Restrict";
905
+ const relationField = `relation(fields: [${getFieldName({
906
+ model: originalTableName,
907
+ field: fieldName
908
+ })}], references: [${getFieldName({
909
+ model: attr.references.model,
910
+ field: attr.references.field
911
+ })}], onDelete: ${action})`;
912
+ builder.model(modelName).field(referencedCustomModelName.toLowerCase(), `${capitalizeFirstLetter(referencedCustomModelName)}${!attr.required ? "?" : ""}`).attribute(relationField);
913
+ }
914
+ if (!attr.unique && !attr.references && provider === "mysql" && attr.type === "string") builder.model(modelName).field(fieldName).attribute("db.Text");
915
+ }
916
+ if (manyToManyRelations.has(modelName)) for (const relatedModel of manyToManyRelations.get(modelName)) {
917
+ const relatedTableName = Object.keys(tables).find((key) => capitalizeFirstLetter(tables[key]?.modelName || key) === relatedModel);
918
+ const relatedFields = relatedTableName ? tables[relatedTableName]?.fields : {};
919
+ const [_fieldKey, fkFieldAttr] = Object.entries(relatedFields || {}).find(([_fieldName, fieldAttr]) => fieldAttr.references && getModelName(fieldAttr.references.model) === getModelName(originalTableName)) || [];
920
+ const isUnique = fkFieldAttr?.unique === true;
921
+ const fieldName = isUnique || adapter.options?.usePlural === true ? `${relatedModel.toLowerCase()}` : `${relatedModel.toLowerCase()}s`;
922
+ if (!builder.findByType("field", {
923
+ name: fieldName,
924
+ within: prismaModel?.properties
925
+ })) builder.model(modelName).field(fieldName, `${relatedModel}${isUnique ? "?" : "[]"}`);
926
+ }
927
+ const indexedFieldsForModel = indexedFields.get(modelName);
928
+ if (indexedFieldsForModel && indexedFieldsForModel.length > 0) for (const fieldName of indexedFieldsForModel) {
929
+ if (prismaModel) {
930
+ if (prismaModel.properties.some((v) => v.type === "attribute" && v.name === "index" && JSON.stringify(v.args[0]?.value).includes(fieldName))) continue;
931
+ }
932
+ const field = Object.entries(fields).find(([key, attr]) => (attr.fieldName || key) === fieldName)?.[1];
933
+ let indexField = fieldName;
934
+ if (provider === "mysql" && field && field.type === "string") indexField = `${fieldName}(length: 191)`;
935
+ builder.model(modelName).blockAttribute(`index([${indexField}])`);
936
+ }
937
+ const hasAttribute = builder.findByType("attribute", {
938
+ name: "map",
939
+ within: prismaModel?.properties
940
+ });
941
+ const hasChanged = customModelName !== originalTableName;
942
+ if (!hasAttribute) builder.model(modelName).blockAttribute("map", `${getModelName(hasChanged ? customModelName : originalTableName)}`);
943
+ }
944
+ });
945
+ const schemaChanged = schema.trim() !== schemaPrisma.trim();
946
+ if (provider === "postgresql") {
947
+ const tables$1 = getPaymentTables$1(options);
948
+ const sql = emitPostgresInvariantSql(normalizeInvariants(tables$1, {
949
+ getModelName: initGetModelName({
950
+ schema: tables$1,
951
+ usePlural: adapter.options?.adapterConfig?.usePlural
952
+ }),
953
+ getFieldName: initGetFieldName({
954
+ schema: tables$1,
955
+ usePlural: false
956
+ })
957
+ }), "public");
958
+ const sqlFilePath = path.join(path.dirname(filePath), "invariants.sql");
959
+ const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
960
+ await fs$1.mkdir(sqlDir, { recursive: true });
961
+ await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
962
+ console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
963
+ }
964
+ return {
965
+ code: schemaChanged ? schema : "",
966
+ fileName: filePath,
967
+ overwrite: schemaPrismaExist && schemaChanged
968
+ };
969
+ };
970
+ const getNewPrisma = (provider, cwd) => {
971
+ const prismaVersion = getPrismaVersion(cwd);
972
+ return `generator client {
973
+ provider = "${prismaVersion && prismaVersion >= 7 ? "prisma-client" : "prisma-client-js"}"
974
+ }
975
+
976
+ datasource db {
977
+ provider = "${provider}"
978
+ url = ${provider === "sqlite" ? `"file:./dev.db"` : `env("DATABASE_URL")`}
979
+ }`;
980
+ };
981
+
982
+ //#endregion
983
+ //#region src/generators/invariants-mongo.ts
984
+ /**
985
+ * Generate MongoDB collection validator JSON schema for invariants.
986
+ */
987
+ function emitMongoValidators(invariants) {
988
+ const validators = {};
989
+ for (const inv of invariants) {
990
+ if (inv.enforcement.mongo !== "validator" || !inv.appliesTo.includes("mongo")) continue;
991
+ const collectionName = inv.tableName;
992
+ if (!validators[collectionName]) validators[collectionName] = { $jsonSchema: {
993
+ bsonType: "object",
994
+ required: [],
995
+ properties: {},
996
+ additionalProperties: true
997
+ } };
998
+ const validator = generateMongoValidator(inv);
999
+ if (validator) {
1000
+ Object.assign(validators[collectionName].$jsonSchema.properties, validator.properties || {});
1001
+ if (validator.required) validators[collectionName].$jsonSchema.required = [...validators[collectionName].$jsonSchema.required || [], ...validator.required];
1002
+ if (validator.anyOf) {
1003
+ if (!validators[collectionName].$jsonSchema.anyOf) validators[collectionName].$jsonSchema.anyOf = [];
1004
+ validators[collectionName].$jsonSchema.anyOf.push(...validator.anyOf);
1005
+ }
1006
+ }
1007
+ }
1008
+ return validators;
1009
+ }
1010
+ /**
1011
+ * Generate MongoDB validator JSON schema for a single invariant.
1012
+ */
1013
+ function generateMongoValidator(inv) {
1014
+ const { logic } = inv;
1015
+ switch (logic.type) {
1016
+ case "field_enum_constraint": {
1017
+ const { fieldName, allowedValues, conditionalField } = logic;
1018
+ if (conditionalField) {
1019
+ const anyOf = [];
1020
+ if (!conditionalField.whenPresent) anyOf.push({ properties: {
1021
+ [fieldName]: { enum: ["PAYMENT"] },
1022
+ [conditionalField.fieldName]: { bsonType: "null" }
1023
+ } });
1024
+ if (conditionalField.whenPresent) anyOf.push({
1025
+ properties: {
1026
+ [fieldName]: { enum: ["SUBSCRIPTION"] },
1027
+ [conditionalField.fieldName]: { bsonType: "string" }
1028
+ },
1029
+ required: [conditionalField.fieldName]
1030
+ });
1031
+ return {
1032
+ properties: { [fieldName]: { enum: allowedValues } },
1033
+ anyOf
1034
+ };
1035
+ }
1036
+ return { properties: { [fieldName]: { enum: allowedValues } } };
1037
+ }
1038
+ default: return null;
1039
+ }
1040
+ }
1041
+ /**
1042
+ * Generate TypeScript guard module for MongoDB "app" enforcement invariants.
1043
+ */
1044
+ function emitMongoGuards(invariants) {
1045
+ const guardInvariants = invariants.filter((inv) => inv.enforcement.mongo === "app" && inv.appliesTo.includes("mongo"));
1046
+ if (guardInvariants.length === 0) return `// No MongoDB app-level invariant guards generated
1047
+ // This file is auto-generated - DO NOT EDIT MANUALLY
1048
+
1049
+ export {};
1050
+ `;
1051
+ const guards = [];
1052
+ const imports = [];
1053
+ for (const inv of guardInvariants) {
1054
+ const guard = generateMongoGuard(inv);
1055
+ if (guard) {
1056
+ guards.push(guard);
1057
+ if (guard.imports) imports.push(...guard.imports);
1058
+ }
1059
+ }
1060
+ const uniqueImports = Array.from(new Set(imports));
1061
+ return `// MongoDB invariant guards
1062
+ // Generated from schema invariants
1063
+ // DO NOT EDIT MANUALLY - This file is auto-generated
1064
+ //
1065
+ // These guards should be called before write operations to enforce invariants
1066
+ // at the application level.
1067
+
1068
+ ${uniqueImports.length > 0 ? uniqueImports.join("\n") + "\n" : ""}
1069
+
1070
+ ${guards.join("\n\n")}
1071
+ `;
1072
+ }
1073
+ /**
1074
+ * Generate TypeScript guard function for a single invariant.
1075
+ */
1076
+ function generateMongoGuard(inv) {
1077
+ const { logic, tableName, id, description } = inv;
1078
+ switch (logic.type) {
1079
+ case "cross_table_ownership": {
1080
+ const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
1081
+ return { code: `/**
1082
+ * Guard: ${description}
1083
+ *
1084
+ * @param data - The record being created/updated (use camelCase field names)
1085
+ * @param db - MongoDB database instance
1086
+ * @returns true if invariant is satisfied, throws error otherwise
1087
+ */
1088
+ export async function ${id}Guard(
1089
+ data: { ${fieldName}?: string | null; ${ownershipFieldName}: string },
1090
+ db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1091
+ ): Promise<boolean> {
1092
+ if (!data.${fieldName}) {
1093
+ return true; // Field is optional/nullable
1094
+ }
1095
+
1096
+ const referenced = await db.collection("${referencedTable}").findOne({
1097
+ ${referencedField}: data.${fieldName}
1098
+ });
1099
+
1100
+ if (!referenced) {
1101
+ throw new Error(\`Invariant violation: \${data.${fieldName}} not found in ${referencedTable}\`);
1102
+ }
1103
+
1104
+ // Compare ownership field (use database field name for referenced record)
1105
+ const dataOwnerValue = data.${ownershipFieldName};
1106
+ const refOwnerValue = referenced.${ownershipFieldName};
1107
+
1108
+ if (refOwnerValue !== dataOwnerValue) {
1109
+ throw new Error(
1110
+ \`Invariant violation: ${fieldName} must belong to the same ${ownershipFieldName} as the record. \` +
1111
+ \`Expected \${dataOwnerValue}, got \${refOwnerValue}\`
1112
+ );
1113
+ }
1114
+
1115
+ return true;
1116
+ }` };
1117
+ }
1118
+ case "cross_table_equality": {
1119
+ const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
1120
+ return { code: `/**
1121
+ * Guard: ${description}
1122
+ *
1123
+ * @param data - The record being created/updated
1124
+ * @param db - MongoDB database instance
1125
+ * @returns true if invariant is satisfied, throws error otherwise
1126
+ */
1127
+ export async function ${id}Guard(
1128
+ data: { ${fieldName}: string; ${equalityFieldName}: string },
1129
+ db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1130
+ ): Promise<boolean> {
1131
+ if (!data.${fieldName}) {
1132
+ throw new Error(\`Invariant violation: ${fieldName} is required\`);
1133
+ }
1134
+
1135
+ const referenced = await db.collection("${referencedTable}").findOne({
1136
+ ${referencedField}: data.${fieldName}
1137
+ });
1138
+
1139
+ if (!referenced) {
1140
+ throw new Error(\`Invariant violation: Referenced record not found in ${referencedTable}\`);
1141
+ }
1142
+
1143
+ if (referenced.${referencedEqualityFieldName} !== data.${equalityFieldName}) {
1144
+ throw new Error(
1145
+ \`Invariant violation: ${equalityFieldName} must equal ${referencedTable}.${referencedEqualityFieldName}. \` +
1146
+ \`Expected \${referenced.${referencedEqualityFieldName}}, got \${data.${equalityFieldName}}\`
1147
+ );
1148
+ }
1149
+
1150
+ return true;
1151
+ }` };
1152
+ }
1153
+ case "raw": return { code: `/**
1154
+ * Guard: ${description}
1155
+ *
1156
+ * TODO: Implement this guard based on the invariant description.
1157
+ * This is a placeholder - you must implement the actual validation logic.
1158
+ *
1159
+ * @param data - The record being created/updated
1160
+ * @param db - MongoDB database instance
1161
+ * @returns true if invariant is satisfied, throws error otherwise
1162
+ */
1163
+ export async function ${id}Guard(
1164
+ data: any,
1165
+ db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1166
+ ): Promise<boolean> {
1167
+ // TODO: Implement invariant: ${description}
1168
+ console.warn("Guard ${id}Guard is not yet implemented");
1169
+ return true;
1170
+ }` };
1171
+ default: return null;
1172
+ }
1173
+ }
1174
+
1175
+ //#endregion
1176
+ //#region src/generators/mongodb.ts
1177
+ const generateMongoDBSchema = async ({ options, file, adapter }) => {
1178
+ const tables = getPaymentTables$1(options);
1179
+ const filePath = file || "./mongodb-schema.ts";
1180
+ const invariants = normalizeInvariants(tables, {
1181
+ getModelName: initGetModelName({
1182
+ schema: tables,
1183
+ usePlural: adapter?.options?.adapterConfig?.usePlural
1184
+ }),
1185
+ getFieldName: initGetFieldName({
1186
+ schema: tables,
1187
+ usePlural: false
1188
+ })
1189
+ });
1190
+ const validators = emitMongoValidators(invariants);
1191
+ const validatorsJson = JSON.stringify(validators, null, 2);
1192
+ const guards = emitMongoGuards(invariants);
1193
+ const validatorsPath = filePath.replace(/\.ts$/, "-validators.json");
1194
+ const validatorsDir = path.dirname(path.resolve(process.cwd(), validatorsPath));
1195
+ await fs$1.mkdir(validatorsDir, { recursive: true });
1196
+ await fs$1.writeFile(path.resolve(process.cwd(), validatorsPath), validatorsJson);
1197
+ console.log(`📝 Generated MongoDB validators: ${validatorsPath}`);
1198
+ const guardsPath = filePath.replace(/\.ts$/, "-guards.ts");
1199
+ const guardsDir = path.dirname(path.resolve(process.cwd(), guardsPath));
1200
+ await fs$1.mkdir(guardsDir, { recursive: true });
1201
+ const formattedGuards = await prettier.format(guards, { parser: "typescript" });
1202
+ await fs$1.writeFile(path.resolve(process.cwd(), guardsPath), formattedGuards);
1203
+ console.log(`📝 Generated MongoDB guards: ${guardsPath}`);
1204
+ const validatorsBaseName = path.basename(validatorsPath, ".json");
1205
+ const guardsBaseName = path.basename(guardsPath, ".ts");
1206
+ const schemaCode = `// MongoDB schema setup
1207
+ // Generated from schema definitions
1208
+ // DO NOT EDIT MANUALLY - This file is auto-generated
1209
+
1210
+ import validators from "./${validatorsBaseName}.json";
1211
+ import * as guards from "./${guardsBaseName}";
1212
+
1213
+ /**
1214
+ * MongoDB collection validators.
1215
+ * Apply these using db.createCollection() or db.command({ collMod: ... })
1216
+ *
1217
+ * Example:
1218
+ * \`\`\`
1219
+ * await db.createCollection("checkout_session", {
1220
+ * validator: validators.checkout_session
1221
+ * });
1222
+ * \`\`\`
1223
+ */
1224
+ export { validators };
1225
+
1226
+ /**
1227
+ * MongoDB invariant guards.
1228
+ * Call these before write operations to enforce invariants at the application level.
1229
+ *
1230
+ * Example:
1231
+ * \`\`\`
1232
+ * import { customer_payment_method_ownershipGuard } from "./${guardsBaseName}";
1233
+ *
1234
+ * await customer_payment_method_ownershipGuard(data, db);
1235
+ * await db.collection("customer").insertOne(data);
1236
+ * \`\`\`
1237
+ */
1238
+ export { guards };
1239
+ `;
1240
+ return {
1241
+ code: await prettier.format(schemaCode, { parser: "typescript" }),
1242
+ fileName: filePath,
1243
+ overwrite: true
1244
+ };
1245
+ };
1246
+
1247
+ //#endregion
1248
+ //#region src/generators/index.ts
1249
+ const adapters = {
1250
+ prisma: generatePrismaSchema,
1251
+ drizzle: generateDrizzleSchema,
1252
+ kysely: generateKyselySchema,
1253
+ mongodb: generateMongoDBSchema
1254
+ };
1255
+ const generateSchema = async (opts) => {
1256
+ const adapter = opts.adapter;
1257
+ const generator = adapter.id in adapters ? adapters[adapter.id] : null;
1258
+ if (generator) return generator(opts);
1259
+ if (adapter.createSchema) return adapter.createSchema(opts.options, opts.file).then(({ code, path: fileName, overwrite }) => ({
1260
+ code,
1261
+ fileName,
1262
+ overwrite
1263
+ }));
1264
+ throw new Error(`${adapter.id} is not supported. If it is a custom adapter, please request the maintainer to implement createSchema`);
1265
+ };
1266
+
1267
+ //#endregion
1268
+ export { generateKyselySchema as a, getPackageInfo as i, generateSchema as n, generateDrizzleSchema as o, generatePrismaSchema as r, adapters as t };