pecunia-cli 0.2.8 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,1542 +0,0 @@
1
- import fs, { existsSync, statSync } from "node:fs";
2
- import fs$1 from "node:fs/promises";
3
- import path from "node:path";
4
- import { getMigrations, getPaymentTables } from "pecunia-root";
5
- import { capitalizeFirstLetter, getPaymentTables as getPaymentTables$1, initGetFieldName, initGetModelName } from "pecunia-core";
6
- import prettier from "prettier";
7
- import { produceSchema } from "@mrleebo/prisma-ast";
8
-
9
- //#region src/generators/invariants.ts
10
- /**
11
- * Normalize invariants from schema into IR format.
12
- * This parses invariant descriptions and extracts structured logic.
13
- */
14
- function normalizeInvariants(schema, options) {
15
- const invariants = [];
16
- for (const [tableKey, table] of Object.entries(schema)) {
17
- if (!table.invariants) continue;
18
- const modelName = options.getModelName(tableKey);
19
- const tableName = table.modelName;
20
- for (const invariant of table.invariants) {
21
- const logic = parseInvariantLogic(invariant.description, tableKey, table, schema, options);
22
- invariants.push({
23
- id: invariant.id,
24
- description: invariant.description,
25
- modelName,
26
- tableName,
27
- appliesTo: invariant.appliesTo,
28
- enforcement: invariant.enforcement || {},
29
- logic
30
- });
31
- }
32
- }
33
- return invariants;
34
- }
35
- /**
36
- * Parse invariant description into structured logic.
37
- * This is a heuristic parser that extracts common patterns.
38
- */
39
- function parseInvariantLogic(description, tableKey, table, schema, options) {
40
- const desc = description.toLowerCase();
41
- if (desc.includes("determines") && desc.includes("presence")) {
42
- const modeMatch = description.match(/(\w+)\.(\w+)\s+determines/);
43
- const subMatch = description.match(/(\w+)\s+must be (present|null\/absent)/);
44
- const enumMatches = description.matchAll(/(\w+)\s*=>\s*(\w+)\s+must be (present|null\/absent)/g);
45
- if (modeMatch && subMatch) {
46
- const [, modelName, fieldName] = modeMatch;
47
- const [, conditionalField] = subMatch;
48
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
49
- const conditional = Object.keys(table.fields).find((k) => k.toLowerCase() === conditionalField.toLowerCase());
50
- if (field && conditional) {
51
- const fieldAttr = table.fields[field];
52
- let allowedValues = [];
53
- if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) allowedValues = fieldAttr.type;
54
- else {
55
- const enumValues = /* @__PURE__ */ new Set();
56
- for (const match of enumMatches) {
57
- const val = match[1]?.trim();
58
- if (val) enumValues.add(val);
59
- }
60
- if (enumValues.size > 0) allowedValues = Array.from(enumValues);
61
- else allowedValues = ["PAYMENT", "SUBSCRIPTION"].filter((e) => desc.includes(e.toLowerCase()));
62
- }
63
- let whenPresent = false;
64
- for (const match of enumMatches) {
65
- const enumVal = match[1];
66
- const requirement = match[3];
67
- if (enumVal && requirement === "present") {
68
- whenPresent = true;
69
- break;
70
- }
71
- }
72
- if (!whenPresent && desc.includes("subscription") && desc.includes("must be present")) whenPresent = true;
73
- return {
74
- type: "field_enum_constraint",
75
- field,
76
- fieldName: options.getFieldName({
77
- model: tableKey,
78
- field
79
- }),
80
- allowedValues: allowedValues.length > 0 ? allowedValues : ["PAYMENT", "SUBSCRIPTION"],
81
- conditionalField: {
82
- field: conditional,
83
- fieldName: options.getFieldName({
84
- model: tableKey,
85
- field: conditional
86
- }),
87
- whenPresent
88
- }
89
- };
90
- }
91
- }
92
- }
93
- if (desc.includes("must belong to same") || desc.includes("must equal") && desc.includes("when")) {
94
- const fieldMatch = description.match(/(\w+)\.(\w+)\s+must/);
95
- const refMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
96
- description.match(/(\w+)\.(\w+)\s+must equal/);
97
- if (fieldMatch && refMatch) {
98
- const [, modelName, fieldName] = fieldMatch;
99
- const [, refTable, refField, ownerTable, ownerField] = refMatch;
100
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
101
- if (field && table.fields[field]?.references) {
102
- table.fields[field].references;
103
- const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
104
- const ownerTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === ownerTable || k === ownerTable);
105
- if (refTableKey && ownerTableKey) {
106
- const ownershipField = Object.keys(table.fields).find((k) => k.toLowerCase() === ownerField.toLowerCase());
107
- if (ownershipField) return {
108
- type: "cross_table_ownership",
109
- field,
110
- fieldName: options.getFieldName({
111
- model: tableKey,
112
- field
113
- }),
114
- referencedTable: options.getModelName(refTableKey),
115
- referencedField: options.getFieldName({
116
- model: refTableKey,
117
- field: refField
118
- }),
119
- ownershipField,
120
- ownershipFieldName: options.getFieldName({
121
- model: tableKey,
122
- field: ownershipField
123
- })
124
- };
125
- }
126
- }
127
- }
128
- }
129
- if (desc.includes("must match") && desc.includes("of its")) {
130
- const fieldMatch = description.match(/(\w+)\.(\w+)\s+must match/);
131
- const refMatch = description.match(/of its\s+(\w+)/);
132
- const equalityMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
133
- if (fieldMatch && refMatch && equalityMatch) {
134
- const [, , equalityFieldName] = fieldMatch;
135
- const [, refFieldName] = refMatch;
136
- const [, refTable, refEqualityField, , sourceEqualityField] = equalityMatch;
137
- const equalityField = Object.keys(table.fields).find((k) => k.toLowerCase() === equalityFieldName.toLowerCase());
138
- const refField = Object.keys(table.fields).find((k) => k.toLowerCase() === refFieldName.toLowerCase());
139
- if (equalityField && refField && table.fields[refField]?.references) {
140
- const ref = table.fields[refField].references;
141
- const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
142
- if (refTableKey) {
143
- const refEquality = Object.keys(schema[refTableKey].fields).find((k) => k.toLowerCase() === refEqualityField.toLowerCase());
144
- if (refEquality) return {
145
- type: "cross_table_equality",
146
- field: refField,
147
- fieldName: options.getFieldName({
148
- model: tableKey,
149
- field: refField
150
- }),
151
- referencedTable: options.getModelName(refTableKey),
152
- referencedField: options.getFieldName({
153
- model: refTableKey,
154
- field: ref.field
155
- }),
156
- equalityField,
157
- equalityFieldName: options.getFieldName({
158
- model: tableKey,
159
- field: equalityField
160
- }),
161
- referencedEqualityField: refEquality,
162
- referencedEqualityFieldName: options.getFieldName({
163
- model: refTableKey,
164
- field: refEquality
165
- })
166
- };
167
- }
168
- }
169
- }
170
- }
171
- return {
172
- type: "raw",
173
- description
174
- };
175
- }
176
-
177
- //#endregion
178
- //#region src/generators/invariants-sql.ts
179
- /**
180
- * Generate SQL for enforcing invariants in Postgres (and compatible databases).
181
- * Returns SQL statements for CHECK constraints and triggers.
182
- */
183
- function emitPostgresInvariantSql(invariants, schemaName = "public") {
184
- const statements = [];
185
- const checkInvariants = invariants.filter((inv) => inv.enforcement.postgres === "check" && inv.appliesTo.includes("postgres"));
186
- const triggerInvariants = invariants.filter((inv) => inv.enforcement.postgres === "trigger" && inv.appliesTo.includes("postgres"));
187
- for (const inv of checkInvariants) {
188
- const sql = generateCheckConstraint(inv, schemaName);
189
- if (sql) statements.push(sql);
190
- }
191
- for (const inv of triggerInvariants) {
192
- const sql = generateTrigger(inv, schemaName);
193
- if (sql) statements.push(...sql);
194
- }
195
- if (statements.length === 0) return "-- No invariant enforcement SQL generated\n";
196
- return `-- Invariant enforcement SQL
197
- -- Generated from schema invariants
198
- -- DO NOT EDIT MANUALLY - This file is auto-generated
199
-
200
- ${statements.join("\n\n")}
201
- `;
202
- }
203
- /**
204
- * Generate CHECK constraint SQL for an invariant.
205
- */
206
- function generateCheckConstraint(inv, schemaName) {
207
- const { logic, tableName, id } = inv;
208
- switch (logic.type) {
209
- case "field_enum_constraint": {
210
- const { fieldName, allowedValues, conditionalField } = logic;
211
- if (conditionalField) {
212
- const constraintName$1 = `${tableName}_${id}_check`;
213
- const enumCheck$1 = allowedValues.map((val) => `'${val}'`).join(", ");
214
- const subscriptionValue = allowedValues.find((v) => v === "SUBSCRIPTION") || allowedValues.find((v) => v !== "PAYMENT") || allowedValues[0];
215
- const paymentValue = allowedValues.find((v) => v === "PAYMENT") || allowedValues[0];
216
- const conditionalCheck = `(
217
- (${fieldName} = '${subscriptionValue}') = (${conditionalField.fieldName} IS NOT NULL) AND
218
- (${fieldName} = '${paymentValue}') = (${conditionalField.fieldName} IS NULL)
219
- )`;
220
- return `-- ${inv.description}
221
- ALTER TABLE ${schemaName}.${tableName}
222
- ADD CONSTRAINT ${constraintName$1}
223
- CHECK (
224
- ${fieldName} IN (${enumCheck$1}) AND
225
- ${conditionalCheck}
226
- );`;
227
- }
228
- const constraintName = `${tableName}_${id}_check`;
229
- const enumCheck = allowedValues.map((val) => `'${val}'`).join(", ");
230
- return `-- ${inv.description}
231
- ALTER TABLE ${schemaName}.${tableName}
232
- ADD CONSTRAINT ${constraintName}
233
- CHECK (${fieldName} IN (${enumCheck}));`;
234
- }
235
- default: return null;
236
- }
237
- }
238
- /**
239
- * Generate trigger SQL for an invariant.
240
- * Returns array of SQL statements (function + trigger).
241
- */
242
- function generateTrigger(inv, schemaName) {
243
- const { logic, tableName, id } = inv;
244
- switch (logic.type) {
245
- case "cross_table_ownership": {
246
- const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
247
- const functionName = `${tableName}_${id}_fn`;
248
- const triggerName = `${tableName}_${id}_trigger`;
249
- return [`-- Function to enforce: ${inv.description}
250
- CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
251
- RETURNS TRIGGER AS $$
252
- BEGIN
253
- IF NEW.${fieldName} IS NOT NULL THEN
254
- IF NOT EXISTS (
255
- SELECT 1
256
- FROM ${schemaName}.${referencedTable}
257
- WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
258
- AND ${referencedTable}.${ownershipFieldName} = NEW.${ownershipFieldName}
259
- ) THEN
260
- RAISE EXCEPTION 'Invariant violation: % must belong to the same % as the record', '${fieldName}', '${ownershipFieldName}';
261
- END IF;
262
- END IF;
263
- RETURN NEW;
264
- END;
265
- $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
266
- DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
267
- CREATE TRIGGER ${triggerName}
268
- BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
269
- FOR EACH ROW
270
- EXECUTE FUNCTION ${schemaName}.${functionName}();`];
271
- }
272
- case "cross_table_equality": {
273
- const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
274
- const functionName = `${tableName}_${id}_fn`;
275
- const triggerName = `${tableName}_${id}_trigger`;
276
- return [`-- Function to enforce: ${inv.description}
277
- CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
278
- RETURNS TRIGGER AS $$
279
- DECLARE
280
- ref_${referencedEqualityFieldName} TEXT;
281
- BEGIN
282
- IF NEW.${fieldName} IS NOT NULL THEN
283
- SELECT ${referencedEqualityFieldName} INTO ref_${referencedEqualityFieldName}
284
- FROM ${schemaName}.${referencedTable}
285
- WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
286
- LIMIT 1;
287
-
288
- IF ref_${referencedEqualityFieldName} IS NULL THEN
289
- RAISE EXCEPTION 'Invariant violation: Referenced record not found in ${referencedTable}';
290
- END IF;
291
-
292
- IF NEW.${equalityFieldName} != ref_${referencedEqualityFieldName} THEN
293
- RAISE EXCEPTION 'Invariant violation: % must equal %.%', '${equalityFieldName}', '${referencedTable}', '${referencedEqualityFieldName}';
294
- END IF;
295
- END IF;
296
- RETURN NEW;
297
- END;
298
- $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
299
- DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
300
- CREATE TRIGGER ${triggerName}
301
- BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
302
- FOR EACH ROW
303
- EXECUTE FUNCTION ${schemaName}.${functionName}();`];
304
- }
305
- default: return null;
306
- }
307
- }
308
-
309
- //#endregion
310
- //#region src/utils/drizzle-migrations.ts
311
- /**
312
- * Find or create the Drizzle migrations directory.
313
- * Never places migrations under src/ to avoid drizzle-kit crashes.
314
- *
315
- * @param projectRoot - The project root directory (where package.json typically lives)
316
- * @returns The path to the drizzle migrations directory
317
- */
318
- async function getDrizzleMigrationsDir(projectRoot) {
319
- const drizzleDir = path.resolve(projectRoot, "drizzle");
320
- if (existsSync(drizzleDir)) {
321
- if (!statSync(drizzleDir).isDirectory()) throw new Error(`"drizzle" exists but is not a directory. Please remove it or rename it.`);
322
- return drizzleDir;
323
- }
324
- await fs$1.mkdir(drizzleDir, { recursive: true });
325
- return drizzleDir;
326
- }
327
- /**
328
- * Get the path to the Drizzle meta directory.
329
- */
330
- function getDrizzleMetaDir(projectRoot) {
331
- return path.resolve(projectRoot, "drizzle", "meta");
332
- }
333
- /**
334
- * Get the path to the Drizzle journal file.
335
- */
336
- function getJournalPath(projectRoot) {
337
- return path.resolve(projectRoot, "drizzle", "meta", "_journal.json");
338
- }
339
- /**
340
- * Read the Drizzle journal file, or return null if it doesn't exist.
341
- */
342
- async function readJournal(projectRoot) {
343
- const journalPath = getJournalPath(projectRoot);
344
- if (!existsSync(journalPath)) return null;
345
- const content = await fs$1.readFile(journalPath, "utf-8");
346
- return JSON.parse(content);
347
- }
348
- /**
349
- * Write the Drizzle journal file with stable JSON formatting (2-space indent).
350
- */
351
- async function writeJournal(projectRoot, journal) {
352
- const journalPath = getJournalPath(projectRoot);
353
- await fs$1.writeFile(journalPath, JSON.stringify(journal, null, 2) + "\n", "utf-8");
354
- }
355
- /**
356
- * Ensure the Drizzle journal exists. If it doesn't exist, create it with a baseline init migration.
357
- *
358
- * @param projectRoot - The project root directory
359
- * @returns true if journal was created, false if it already existed
360
- */
361
- async function ensureDrizzleJournal(projectRoot) {
362
- if (existsSync(getJournalPath(projectRoot))) return false;
363
- const metaDir = getDrizzleMetaDir(projectRoot);
364
- await fs$1.mkdir(metaDir, { recursive: true });
365
- await writeJournal(projectRoot, {
366
- version: "7",
367
- dialect: "postgresql",
368
- entries: []
369
- });
370
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
371
- const initMigrationPath = path.join(migrationsDir, "0000_pecunia_init.sql");
372
- await fs$1.writeFile(initMigrationPath, "-- Initial no-op migration created by pecunia-cli\n", "utf-8");
373
- await upsertJournalEntry(projectRoot, "0000_pecunia_init");
374
- return true;
375
- }
376
- /**
377
- * Upsert a journal entry. If an entry with the same tag exists, do nothing.
378
- * Otherwise, append a new entry with the next available idx.
379
- *
380
- * @param projectRoot - The project root directory
381
- * @param tag - The migration tag (filename base without .sql)
382
- */
383
- async function upsertJournalEntry(projectRoot, tag) {
384
- const journal = await readJournal(projectRoot);
385
- if (!journal) throw new Error("Journal does not exist. Call ensureDrizzleJournal() first.");
386
- if (journal.entries.find((entry) => entry.tag === tag)) return;
387
- const nextIdx = (journal.entries.length > 0 ? Math.max(...journal.entries.map((e) => e.idx)) : -1) + 1;
388
- const hasBreakpoints = journal.entries.some((e) => e.breakpoints === true);
389
- const newEntry = {
390
- tag,
391
- idx: nextIdx,
392
- version: journal.version,
393
- when: Date.now(),
394
- ...hasBreakpoints && { breakpoints: true }
395
- };
396
- journal.entries.push(newEntry);
397
- await writeJournal(projectRoot, journal);
398
- }
399
- /**
400
- * Find existing invariants migration tag in the journal.
401
- *
402
- * @param projectRoot - The project root directory
403
- * @returns The tag if found, or null
404
- */
405
- async function findExistingInvariantsTag(projectRoot) {
406
- const journal = await readJournal(projectRoot);
407
- if (!journal) return null;
408
- const invariantsEntry = journal.entries.find((entry) => entry.tag.endsWith("_pecunia_invariants"));
409
- return invariantsEntry ? invariantsEntry.tag : null;
410
- }
411
- /**
412
- * Determine the next migration number by checking journal entries and existing files.
413
- * Prefers checking journal entries first, then falls back to scanning files.
414
- *
415
- * @param projectRoot - The project root directory
416
- * @returns The next migration number (4-digit string, e.g., "0001")
417
- */
418
- async function nextMigrationNumber(projectRoot) {
419
- const existingInvariantsTag = await findExistingInvariantsTag(projectRoot);
420
- if (existingInvariantsTag) {
421
- const match = existingInvariantsTag.match(/^(\d{4})_/);
422
- if (match) return match[1];
423
- }
424
- const journal = await readJournal(projectRoot);
425
- if (journal && journal.entries.length > 0) {
426
- const numbers$1 = journal.entries.map((entry) => {
427
- const match = entry.tag.match(/^(\d{4})_/);
428
- return match ? parseInt(match[1], 10) : 0;
429
- }).filter((n) => n >= 0);
430
- if (numbers$1.length > 0) {
431
- const nextNumber$1 = Math.max(...numbers$1) + 1;
432
- if (nextNumber$1 >= 9999) return "9999";
433
- return String(nextNumber$1).padStart(4, "0");
434
- }
435
- }
436
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
437
- if (!existsSync(migrationsDir)) return "0001";
438
- const migrationFiles = (await fs$1.readdir(migrationsDir)).filter((file) => file.endsWith(".sql") && /^\d{4}_/.test(file));
439
- if (migrationFiles.length === 0) return "0001";
440
- const numbers = migrationFiles.map((file) => {
441
- const match = file.match(/^(\d{4})_/);
442
- return match ? parseInt(match[1], 10) : 0;
443
- }).filter((n) => n > 0);
444
- if (numbers.length === 0) return "0001";
445
- const nextNumber = Math.max(...numbers) + 1;
446
- if (nextNumber >= 9999) return "9999";
447
- return String(nextNumber).padStart(4, "0");
448
- }
449
- /**
450
- * Get the path for the invariants migration file.
451
- * Uses a deterministic name based on migration number to ensure it runs after
452
- * base table creation migrations.
453
- *
454
- * @param migrationsDir - Path to the drizzle migrations directory
455
- * @param migrationNumber - The migration number to use (4-digit string)
456
- * @returns The full path to the invariants migration file
457
- */
458
- function getInvariantsMigrationPath(migrationsDir, migrationNumber) {
459
- return path.join(migrationsDir, `${migrationNumber}_pecunia_invariants.sql`);
460
- }
461
- /**
462
- * Upsert a Drizzle migration file and register it in the journal.
463
- *
464
- * @param projectRoot - The project root directory
465
- * @param tag - The migration tag (filename base without .sql)
466
- * @param sql - The SQL content to write
467
- */
468
- async function upsertDrizzleMigration(projectRoot, tag, sql) {
469
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
470
- const migrationPath = path.join(migrationsDir, `${tag}.sql`);
471
- if (existsSync(migrationPath)) {
472
- if (await fs$1.readFile(migrationPath, "utf-8") === sql) {
473
- await upsertJournalEntry(projectRoot, tag);
474
- return;
475
- }
476
- }
477
- await fs$1.writeFile(migrationPath, sql, "utf-8");
478
- await upsertJournalEntry(projectRoot, tag);
479
- }
480
- /**
481
- * Write the invariants SQL file to the drizzle migrations directory.
482
- * Ensures journal exists, determines migration number, and registers in journal.
483
- *
484
- * @param projectRoot - The project root directory
485
- * @param sqlContent - The SQL content to write
486
- * @returns The path to the written file and status flags
487
- */
488
- async function writeInvariantsMigration(projectRoot, sqlContent) {
489
- const journalCreated = await ensureDrizzleJournal(projectRoot);
490
- const migrationNumber = await nextMigrationNumber(projectRoot);
491
- const tag = `${migrationNumber}_pecunia_invariants`;
492
- const migrationPath = getInvariantsMigrationPath(await getDrizzleMigrationsDir(projectRoot), migrationNumber);
493
- let created = false;
494
- let updated = false;
495
- if (existsSync(migrationPath)) {
496
- if (await fs$1.readFile(migrationPath, "utf-8") === sqlContent) {
497
- await upsertJournalEntry(projectRoot, tag);
498
- return {
499
- path: migrationPath,
500
- created: false,
501
- updated: false,
502
- journalCreated
503
- };
504
- }
505
- updated = true;
506
- } else created = true;
507
- await upsertDrizzleMigration(projectRoot, tag, sqlContent);
508
- return {
509
- path: migrationPath,
510
- created,
511
- updated,
512
- journalCreated
513
- };
514
- }
515
-
516
- //#endregion
517
- //#region src/generators/drizzle.ts
518
- function convertToSnakeCase(str, camelCase) {
519
- if (camelCase) return str;
520
- return str.replace(/([A-Z]+)([A-Z][a-z])/g, "$1_$2").replace(/([a-z\d])([A-Z])/g, "$1_$2").toLowerCase();
521
- }
522
- const generateDrizzleSchema = async ({ options, file, adapter }) => {
523
- const tables = getPaymentTables$1(options);
524
- const filePath = file || "./src/db/schema.ts";
525
- const databaseType = adapter.options?.provider;
526
- const projectRoot = process.cwd();
527
- if (!databaseType) throw new Error("Database provider type is undefined during Drizzle schema generation. Please define a `provider` in the Drizzle adapter config.");
528
- const resolvedSchemaPath = path.isAbsolute(filePath) ? filePath : path.resolve(projectRoot, filePath);
529
- const schemaDir = path.dirname(resolvedSchemaPath);
530
- if (!existsSync(schemaDir)) await fs$1.mkdir(schemaDir, { recursive: true });
531
- const fileExist = existsSync(resolvedSchemaPath);
532
- let code = generateImport({
533
- databaseType,
534
- tables,
535
- options
536
- });
537
- const getModelName = initGetModelName({
538
- schema: tables,
539
- usePlural: adapter.options?.adapterConfig?.usePlural
540
- });
541
- const getFieldName = initGetFieldName({
542
- schema: tables,
543
- usePlural: adapter.options?.adapterConfig?.usePlural
544
- });
545
- const tableNameMap = /* @__PURE__ */ new Map();
546
- for (const tableKey in tables) tableNameMap.set(tableKey, getModelName(tableKey));
547
- function getType(name, field, databaseType$1) {
548
- name = convertToSnakeCase(name, adapter.options?.camelCase);
549
- if (field.references?.field === "id") {
550
- if (databaseType$1 === "mysql") return `varchar('${name}', { length: 36 })`;
551
- return `text('${name}')`;
552
- }
553
- const type = field.type;
554
- if (typeof type !== "string") {
555
- if (Array.isArray(type) && type.every((x) => typeof x === "string")) return {
556
- sqlite: `text({ enum: [${type.map((x) => `'${x}'`).join(", ")}] })`,
557
- pg: `text('${name}', { enum: [${type.map((x) => `'${x}'`).join(", ")}] })`,
558
- mysql: `mysqlEnum([${type.map((x) => `'${x}'`).join(", ")}])`
559
- }[databaseType$1];
560
- throw new TypeError(`Invalid field type for field ${name}`);
561
- }
562
- const dbTypeMap = {
563
- string: {
564
- sqlite: `text('${name}')`,
565
- pg: `text('${name}')`,
566
- mysql: field.unique ? `varchar('${name}', { length: 255 })` : field.references ? `varchar('${name}', { length: 36 })` : field.sortable ? `varchar('${name}', { length: 255 })` : field.index ? `varchar('${name}', { length: 255 })` : `text('${name}')`
567
- },
568
- boolean: {
569
- sqlite: `integer('${name}', { mode: 'boolean' })`,
570
- pg: `boolean('${name}')`,
571
- mysql: `boolean('${name}')`
572
- },
573
- number: {
574
- sqlite: `integer('${name}')`,
575
- pg: field.bigint ? `bigint('${name}', { mode: 'number' })` : `integer('${name}')`,
576
- mysql: field.bigint ? `bigint('${name}', { mode: 'number' })` : `int('${name}')`
577
- },
578
- date: {
579
- sqlite: `integer('${name}', { mode: 'timestamp_ms' })`,
580
- pg: `timestamp('${name}')`,
581
- mysql: `timestamp('${name}', { fsp: 3 })`
582
- },
583
- "number[]": {
584
- sqlite: `text('${name}', { mode: "json" })`,
585
- pg: field.bigint ? `bigint('${name}', { mode: 'number' }).array()` : `integer('${name}').array()`,
586
- mysql: `text('${name}', { mode: 'json' })`
587
- },
588
- "string[]": {
589
- sqlite: `text('${name}', { mode: "json" })`,
590
- pg: `text('${name}').array()`,
591
- mysql: `text('${name}', { mode: "json" })`
592
- },
593
- json: {
594
- sqlite: `text('${name}', { mode: "json" })`,
595
- pg: `jsonb('${name}')`,
596
- mysql: `json('${name}', { mode: "json" })`
597
- },
598
- uuid: {
599
- sqlite: `text('${name}')`,
600
- pg: `uuid('${name}')`,
601
- mysql: `varchar('${name}', { length: 36 })`
602
- }
603
- }[type];
604
- if (!dbTypeMap) throw new Error(`Unsupported field type '${field.type}' for field '${name}'.`);
605
- return dbTypeMap[databaseType$1];
606
- }
607
- const tableDefinitions = [];
608
- for (const tableKey in tables) {
609
- const table = tables[tableKey];
610
- const modelName = getModelName(tableKey);
611
- const fields = table.fields;
612
- const idFieldType = table.fields.id?.type;
613
- let id;
614
- if (databaseType === "pg" && idFieldType === "uuid") id = `uuid('id').primaryKey()`;
615
- else if (databaseType === "mysql") id = `varchar('id', { length: 36 }).primaryKey()`;
616
- else id = `text('id').primaryKey()`;
617
- const indexes = [];
618
- const references = [];
619
- for (const field of Object.keys(fields)) {
620
- if (field === "id") continue;
621
- const attr = fields[field];
622
- const fieldName = attr.fieldName || field;
623
- if (attr.index && !attr.unique) indexes.push({
624
- type: "index",
625
- name: `${modelName}_${fieldName}_idx`,
626
- on: fieldName
627
- });
628
- else if (attr.index && attr.unique) indexes.push({
629
- type: "uniqueIndex",
630
- name: `${modelName}_${fieldName}_uidx`,
631
- on: fieldName
632
- });
633
- if (attr.references) {
634
- const referencedModelName = tableNameMap.get(attr.references.model) || getModelName(attr.references.model);
635
- const onDelete = attr.references.onDelete || "no action";
636
- references.push({
637
- fieldName,
638
- referencedTable: referencedModelName,
639
- referencedField: getFieldName({
640
- model: attr.references.model,
641
- field: attr.references.field
642
- }),
643
- onDelete,
644
- required: attr.required ?? false,
645
- originalModel: attr.references.model
646
- });
647
- }
648
- }
649
- tableDefinitions.push({
650
- modelName,
651
- tableKey,
652
- fields,
653
- id,
654
- indexes,
655
- references
656
- });
657
- }
658
- const modelKeyToTableKey = /* @__PURE__ */ new Map();
659
- for (const tableKey in tables) {
660
- const table = tables[tableKey];
661
- const modelName = getModelName(tableKey);
662
- modelKeyToTableKey.set(tableKey, tableKey);
663
- modelKeyToTableKey.set(modelName, tableKey);
664
- modelKeyToTableKey.set(table.modelName, tableKey);
665
- }
666
- const referenceGraph = /* @__PURE__ */ new Map();
667
- for (const tableDef of tableDefinitions) for (const ref of tableDef.references) {
668
- const referencedTableKey = modelKeyToTableKey.get(ref.originalModel);
669
- if (!referencedTableKey) continue;
670
- const key = `${tableDef.tableKey}->${referencedTableKey}`;
671
- referenceGraph.set(key, {
672
- ...ref,
673
- sourceTable: tableDef.tableKey,
674
- sourceModelName: tableDef.modelName
675
- });
676
- }
677
- const skipReferences = /* @__PURE__ */ new Set();
678
- for (const tableDef of tableDefinitions) for (const ref of tableDef.references) {
679
- const referencedTableKey = modelKeyToTableKey.get(ref.originalModel);
680
- if (!referencedTableKey) continue;
681
- const reverseKey = `${referencedTableKey}->${tableDef.tableKey}`;
682
- const reverseRef = referenceGraph.get(reverseKey);
683
- if (reverseRef) {
684
- const thisIsNullableWithSetNull = !ref.required && ref.onDelete === "set null";
685
- const reverseIsNullableWithSetNull = !reverseRef.required && reverseRef.onDelete === "set null";
686
- if (thisIsNullableWithSetNull) continue;
687
- if (reverseIsNullableWithSetNull) continue;
688
- if (!ref.required && (reverseRef.required || ref.onDelete !== "cascade" && reverseRef.onDelete === "cascade")) skipReferences.add(`${tableDef.tableKey}.${ref.fieldName}`);
689
- }
690
- }
691
- for (const tableDef of tableDefinitions) {
692
- const { modelName, fields, id, indexes, references } = tableDef;
693
- const assignIndexes = (indexesToAssign) => {
694
- if (!indexesToAssign.length) return "";
695
- const parts = [`, (table) => [`];
696
- for (const index of indexesToAssign) parts.push(` ${index.type}("${index.name}").on(table.${index.on}),`);
697
- parts.push(`]`);
698
- return parts.join("\n");
699
- };
700
- const referenceMap = /* @__PURE__ */ new Map();
701
- for (const ref of references) referenceMap.set(ref.fieldName, ref);
702
- const hasCircularRef = references.some((ref) => {
703
- const referencedTableKey = modelKeyToTableKey.get(ref.originalModel);
704
- if (!referencedTableKey) return false;
705
- const reverseKey = `${referencedTableKey}->${tableDef.tableKey}`;
706
- return referenceGraph.has(reverseKey);
707
- });
708
- const fieldDefinitions = Object.keys(fields).filter((field) => field !== "id").map((field) => {
709
- const attr = fields[field];
710
- const fieldName = attr.fieldName || field;
711
- let type = getType(fieldName, attr, databaseType);
712
- let comment = "";
713
- if (attr.defaultValue !== null && typeof attr.defaultValue !== "undefined") if (typeof attr.defaultValue === "function") {
714
- if (attr.type === "date" && attr.defaultValue.toString().includes("new Date()")) if (databaseType === "sqlite") type += `.default(sql\`(cast(unixepoch('subsecond') * 1000 as integer))\`)`;
715
- else type += `.defaultNow()`;
716
- } else if (typeof attr.defaultValue === "string") type += `.default("${attr.defaultValue}")`;
717
- else type += `.default(${attr.defaultValue})`;
718
- if (attr.onUpdate && attr.type === "date") {
719
- if (typeof attr.onUpdate === "function") type += `.$onUpdate(${attr.onUpdate})`;
720
- }
721
- const ref = referenceMap.get(fieldName);
722
- const shouldSkipReference = skipReferences.has(`${tableDef.tableKey}.${fieldName}`);
723
- let referenceChain = "";
724
- if (ref && !shouldSkipReference) referenceChain = `.references(() => ${ref.referencedTable}.${ref.referencedField}, { onDelete: '${ref.onDelete}' })`;
725
- else if (ref && shouldSkipReference) {
726
- const reverseKey = `${ref.originalModel}->${tableDef.tableKey}`;
727
- const reverseRef = referenceGraph.get(reverseKey);
728
- if (reverseRef) comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // Primary FK: ${reverseRef.sourceModelName}.${reverseRef.fieldName} -> ${modelName}.${fieldName}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
729
- else comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
730
- }
731
- const isRequired = attr.required === true;
732
- const fieldDef = `${fieldName}: ${type}${isRequired ? ".notNull()" : ""}${attr.unique ? ".unique()" : ""}${referenceChain}`;
733
- return comment ? `${comment}\n ${fieldDef}` : fieldDef;
734
- });
735
- const typeAssertion = hasCircularRef ? " as any" : "";
736
- const schema = `export const ${modelName} = ${databaseType}Table("${convertToSnakeCase(modelName, adapter.options?.camelCase)}", {
737
- id: ${id},
738
- ${fieldDefinitions.join(",\n ")}
739
- }${assignIndexes(indexes)})${typeAssertion};`;
740
- code += `\n${schema}\n`;
741
- }
742
- let relationsString = "";
743
- for (const tableKey in tables) {
744
- const table = tables[tableKey];
745
- const modelName = getModelName(tableKey);
746
- const oneRelations = [];
747
- const manyRelations = [];
748
- if (table.relations) {
749
- for (const [relationName, relationDef] of Object.entries(table.relations)) {
750
- const referencedModelName = getModelName(relationDef.model);
751
- const foreignKeyField = table.fields[relationDef.foreignKey];
752
- const isSelfReferential = relationDef.model === tableKey || referencedModelName === modelName;
753
- const generateRelationName = (fkName) => {
754
- let cleaned = convertToSnakeCase(fkName, adapter.options?.camelCase).replace(/_by_id$/, "").replace(/_id$/, "");
755
- const participleToNoun = {
756
- "reversed": "reversal",
757
- "created": "creation",
758
- "updated": "update"
759
- };
760
- if (participleToNoun[cleaned]) cleaned = participleToNoun[cleaned];
761
- return `${convertToSnakeCase(modelName, adapter.options?.camelCase)}_${cleaned}`;
762
- };
763
- if (relationDef.kind === "one") {
764
- if (foreignKeyField?.references) {
765
- const fieldRef = `${modelName}.${getFieldName({
766
- model: tableKey,
767
- field: relationDef.foreignKey
768
- })}`;
769
- const referenceRef = `${referencedModelName}.${getFieldName({
770
- model: relationDef.model,
771
- field: foreignKeyField.references.field || "id"
772
- })}`;
773
- oneRelations.push({
774
- key: relationName,
775
- model: referencedModelName,
776
- type: "one",
777
- reference: {
778
- field: fieldRef,
779
- references: referenceRef,
780
- fieldName: relationDef.foreignKey
781
- },
782
- relationName: isSelfReferential ? generateRelationName(relationDef.foreignKey) : void 0
783
- });
784
- }
785
- } else if (relationDef.kind === "many") {
786
- const referencedTable = tables[relationDef.model];
787
- if (referencedTable) {
788
- const fkField = Object.entries(referencedTable.fields).find(([_, field]) => field.references && (field.references.model === tableKey || field.references.model === getModelName(tableKey)));
789
- if (fkField) {
790
- const [fkFieldName] = fkField;
791
- const fieldRef = `${referencedModelName}.${getFieldName({
792
- model: relationDef.model,
793
- field: fkFieldName
794
- })}`;
795
- const referenceRef = `${modelName}.${getFieldName({
796
- model: tableKey,
797
- field: "id"
798
- })}`;
799
- let relationNameForMany;
800
- if (isSelfReferential) {
801
- const matchingOne = oneRelations.find((rel) => rel.reference?.fieldName === fkFieldName && rel.model === referencedModelName);
802
- if (matchingOne?.relationName) relationNameForMany = matchingOne.relationName;
803
- else {
804
- let cleaned = convertToSnakeCase(fkFieldName, adapter.options?.camelCase).replace(/_by_id$/, "").replace(/_id$/, "");
805
- const participleToNoun = {
806
- "reversed": "reversal",
807
- "created": "creation",
808
- "updated": "update"
809
- };
810
- if (participleToNoun[cleaned]) cleaned = participleToNoun[cleaned];
811
- relationNameForMany = `${convertToSnakeCase(modelName, adapter.options?.camelCase)}_${cleaned}`;
812
- }
813
- }
814
- manyRelations.push({
815
- key: relationName,
816
- model: referencedModelName,
817
- type: "many",
818
- reference: {
819
- field: fieldRef,
820
- references: referenceRef,
821
- fieldName: fkFieldName
822
- },
823
- relationName: relationNameForMany
824
- });
825
- } else manyRelations.push({
826
- key: relationName,
827
- model: referencedModelName,
828
- type: "many"
829
- });
830
- }
831
- }
832
- }
833
- for (const oneRel of oneRelations) {
834
- if (!oneRel.relationName || !oneRel.reference) continue;
835
- const oneRelFieldName = oneRel.reference.fieldName;
836
- const matchingMany = manyRelations.find((manyRel) => manyRel.model === oneRel.model && manyRel.reference?.fieldName === oneRelFieldName);
837
- if (matchingMany) matchingMany.relationName = oneRel.relationName;
838
- }
839
- }
840
- const relationsByModel = /* @__PURE__ */ new Map();
841
- for (const relation of oneRelations) {
842
- if (!relation.reference) continue;
843
- const modelKey = relation.key;
844
- if (!relationsByModel.has(modelKey)) relationsByModel.set(modelKey, []);
845
- relationsByModel.get(modelKey).push(relation);
846
- }
847
- const duplicateRelations = [];
848
- const singleRelations = [];
849
- for (const [_modelKey, rels] of relationsByModel.entries()) if (rels.length > 1) duplicateRelations.push(...rels);
850
- else singleRelations.push(rels[0]);
851
- for (const relation of duplicateRelations) {
852
- if (!relation.reference) continue;
853
- const fieldName = relation.reference.fieldName;
854
- const relationExportName = `${modelName}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Relations`;
855
- const relationNameParam = relation.relationName ? `,\n relationName: "${relation.relationName}"` : "";
856
- const tableRelation = `export const ${relationExportName} = relations(${modelName}, ({ one }) => ({
857
- ${relation.key}: one(${relation.model}, {
858
- fields: [${relation.reference.field}],
859
- references: [${relation.reference.references}]${relationNameParam}
860
- })
861
- }))`;
862
- relationsString += `\n${tableRelation}\n`;
863
- }
864
- const hasOne = singleRelations.length > 0;
865
- const hasMany = manyRelations.length > 0;
866
- if (hasOne && hasMany) {
867
- const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ one, many }) => ({
868
- ${singleRelations.map((relation) => {
869
- if (!relation.reference) return "";
870
- const relationNameParam = relation.relationName ? `,\n relationName: "${relation.relationName}"` : "";
871
- return ` ${relation.key}: one(${relation.model}, {
872
- fields: [${relation.reference.field}],
873
- references: [${relation.reference.references}]${relationNameParam}
874
- })`;
875
- }).filter((x) => x !== "").join(",\n ")}${singleRelations.length > 0 && manyRelations.length > 0 ? "," : ""}
876
- ${manyRelations.map(({ key, model, relationName }) => {
877
- return ` ${key}: many(${model}${relationName ? `, { relationName: "${relationName}" }` : ""})`;
878
- }).join(",\n ")}
879
- }))`;
880
- relationsString += `\n${tableRelation}\n`;
881
- } else if (hasOne) {
882
- const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ one }) => ({
883
- ${singleRelations.map((relation) => {
884
- if (!relation.reference) return "";
885
- const relationNameParam = relation.relationName ? `,\n relationName: "${relation.relationName}"` : "";
886
- return ` ${relation.key}: one(${relation.model}, {
887
- fields: [${relation.reference.field}],
888
- references: [${relation.reference.references}]${relationNameParam}
889
- })`;
890
- }).filter((x) => x !== "").join(",\n ")}
891
- }))`;
892
- relationsString += `\n${tableRelation}\n`;
893
- } else if (hasMany) {
894
- const tableRelation = `export const ${modelName}Relations = relations(${modelName}, ({ many }) => ({
895
- ${manyRelations.map(({ key, model, relationName }) => {
896
- return ` ${key}: many(${model}${relationName ? `, { relationName: "${relationName}" }` : ""})`;
897
- }).join(",\n ")}
898
- }))`;
899
- relationsString += `\n${tableRelation}\n`;
900
- }
901
- }
902
- code += `\n${relationsString}`;
903
- const typeHints = generateInvariantTypeHints(tables, getModelName, getFieldName);
904
- if (typeHints) code += `\n\n${typeHints}`;
905
- const formattedCode = await prettier.format(code, { parser: "typescript" });
906
- if (databaseType === "pg") {
907
- const result = await writeInvariantsMigration(projectRoot, emitPostgresInvariantSql(normalizeInvariants(tables, {
908
- getModelName,
909
- getFieldName
910
- }), "public"));
911
- if (result.journalCreated) console.log(`Created Drizzle migration journal and invariants migration; now run \`npx drizzle-kit migrate\`.`);
912
- else if (result.created) console.log(`Generated invariants migration: ${path.relative(projectRoot, result.path)}`);
913
- else if (result.updated) console.log(`Updated invariants migration: ${path.relative(projectRoot, result.path)}`);
914
- else console.log(`Invariants migration up to date: ${path.relative(projectRoot, result.path)}`);
915
- }
916
- return {
917
- code: formattedCode,
918
- fileName: path.relative(projectRoot, resolvedSchemaPath),
919
- overwrite: fileExist
920
- };
921
- };
922
- /**
923
- * Generate TypeScript type hints for invariants (e.g., enum unions).
924
- */
925
- function generateInvariantTypeHints(tables, getModelName, getFieldName) {
926
- const hints = [];
927
- for (const [tableKey, table] of Object.entries(tables)) {
928
- if (!table.invariants) continue;
929
- for (const invariant of table.invariants) {
930
- const desc = invariant.description.toLowerCase();
931
- if (desc.includes("determines") && desc.includes("presence")) {
932
- const modeMatch = invariant.description.match(/(\w+)\.(\w+)\s+determines/);
933
- if (modeMatch) {
934
- const [, , fieldName] = modeMatch;
935
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
936
- if (field) {
937
- const fieldAttr = table.fields[field];
938
- if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) {
939
- const enumName = `${getModelName(tableKey)}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Mode`;
940
- const enumValues = fieldAttr.type.map((v) => `"${v}"`).join(" | ");
941
- hints.push(`// Type hint for invariant: ${invariant.id}`);
942
- hints.push(`export type ${enumName} = ${enumValues};`);
943
- }
944
- }
945
- }
946
- }
947
- }
948
- }
949
- return hints.length > 0 ? `\n// Invariant type hints\n${hints.join("\n")}` : "";
950
- }
951
- function generateImport({ databaseType, tables }) {
952
- const rootImports = ["relations"];
953
- const coreImports = [];
954
- let hasBigint = false;
955
- let hasJson = false;
956
- let hasUuid = false;
957
- for (const table of Object.values(tables)) {
958
- for (const field of Object.values(table.fields)) {
959
- if (field.bigint) hasBigint = true;
960
- if (field.type === "json") hasJson = true;
961
- if (field.type === "uuid") hasUuid = true;
962
- }
963
- if (hasJson && hasBigint && hasUuid) break;
964
- }
965
- coreImports.push(`${databaseType}Table`);
966
- coreImports.push(databaseType === "mysql" ? "varchar, text" : databaseType === "pg" ? "text" : "text");
967
- coreImports.push(hasBigint ? databaseType !== "sqlite" ? "bigint" : "" : "");
968
- coreImports.push(databaseType !== "sqlite" ? "timestamp, boolean" : "");
969
- if (databaseType === "mysql") {
970
- if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => (field.type === "number" || field.type === "number[]") && !field.bigint))) coreImports.push("int");
971
- if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => typeof field.type !== "string" && Array.isArray(field.type) && field.type.every((x) => typeof x === "string")))) coreImports.push("mysqlEnum");
972
- } else if (databaseType === "pg") {
973
- if (Object.values(tables).some((table) => Object.values(table.fields).some((field) => (field.type === "number" || field.type === "number[]") && !field.bigint))) coreImports.push("integer");
974
- if (hasUuid) coreImports.push("uuid");
975
- } else coreImports.push("integer");
976
- if (hasJson) {
977
- if (databaseType === "pg") coreImports.push("jsonb");
978
- if (databaseType === "mysql") coreImports.push("json");
979
- }
980
- if (databaseType === "sqlite" && Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.type === "date" && field.defaultValue && typeof field.defaultValue === "function" && field.defaultValue.toString().includes("new Date()")))) rootImports.push("sql");
981
- const hasIndexes = Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.index && !field.unique));
982
- const hasUniqueIndexes = Object.values(tables).some((table) => Object.values(table.fields).some((field) => field.unique && field.index));
983
- if (hasIndexes) coreImports.push("index");
984
- if (hasUniqueIndexes) coreImports.push("uniqueIndex");
985
- return `${rootImports.length > 0 ? `import { ${rootImports.join(", ")} } from "drizzle-orm";\n` : ""}import { ${coreImports.map((x) => x.trim()).filter((x) => x !== "").join(", ")} } from "drizzle-orm/${databaseType}-core";\n`;
986
- }
987
-
988
- //#endregion
989
- //#region src/generators/kysely.ts
990
- const generateKyselySchema = async ({ options, file, adapter }) => {
991
- const { compileMigrations } = await getMigrations(options);
992
- const migrations = await compileMigrations();
993
- const migrationFile = file || `./better-auth_migrations/${(/* @__PURE__ */ new Date()).toISOString().replace(/:/g, "-")}.sql`;
994
- if ((adapter?.options?.type || "postgres") === "postgres") {
995
- const tables = getPaymentTables(options);
996
- const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
997
- getModelName: initGetModelName({
998
- schema: tables,
999
- usePlural: adapter?.options?.adapterConfig?.usePlural
1000
- }),
1001
- getFieldName: initGetFieldName({
1002
- schema: tables,
1003
- usePlural: false
1004
- })
1005
- }), "public");
1006
- const sqlFilePath = path.join(path.dirname(migrationFile), "invariants.sql");
1007
- const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
1008
- await fs$1.mkdir(sqlDir, { recursive: true });
1009
- await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
1010
- console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
1011
- }
1012
- return {
1013
- code: migrations.trim() === ";" ? "" : migrations,
1014
- fileName: migrationFile
1015
- };
1016
- };
1017
-
1018
- //#endregion
1019
- //#region src/utils/get-package-info.ts
1020
- function getPackageInfo(cwd) {
1021
- const packageJsonPath = cwd ? path.join(cwd, "package.json") : path.join("package.json");
1022
- return JSON.parse(fs.readFileSync(packageJsonPath, "utf-8"));
1023
- }
1024
- function getPrismaVersion(cwd) {
1025
- try {
1026
- const packageInfo = getPackageInfo(cwd);
1027
- const prismaVersion = packageInfo.dependencies?.prisma || packageInfo.devDependencies?.prisma || packageInfo.dependencies?.["@prisma/client"] || packageInfo.devDependencies?.["@prisma/client"];
1028
- if (!prismaVersion) return null;
1029
- const match = prismaVersion.match(/(\d+)/);
1030
- return match ? parseInt(match[1], 10) : null;
1031
- } catch {
1032
- return null;
1033
- }
1034
- }
1035
-
1036
- //#endregion
1037
- //#region src/generators/prisma.ts
1038
- const generatePrismaSchema = async ({ adapter, options, file }) => {
1039
- const provider = adapter.options?.provider || "postgresql";
1040
- const tables = getPaymentTables$1(options);
1041
- const filePath = file || "./prisma/schema.prisma";
1042
- const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
1043
- const getModelName = initGetModelName({
1044
- schema: getPaymentTables$1(options),
1045
- usePlural: adapter.options?.adapterConfig?.usePlural
1046
- });
1047
- const getFieldName = initGetFieldName({
1048
- schema: getPaymentTables$1(options),
1049
- usePlural: false
1050
- });
1051
- let schemaPrisma = "";
1052
- if (schemaPrismaExist) schemaPrisma = await fs$1.readFile(path.join(process.cwd(), filePath), "utf-8");
1053
- else schemaPrisma = getNewPrisma(provider, process.cwd());
1054
- const prismaVersion = getPrismaVersion(process.cwd());
1055
- if (prismaVersion && prismaVersion >= 7 && schemaPrismaExist) schemaPrisma = produceSchema(schemaPrisma, (builder) => {
1056
- const generator = builder.findByType("generator", { name: "client" });
1057
- if (generator && generator.properties) {
1058
- const providerProp = generator.properties.find((prop) => prop.type === "assignment" && prop.key === "provider");
1059
- if (providerProp && providerProp.value === "\"prisma-client-js\"") providerProp.value = "\"prisma-client\"";
1060
- }
1061
- });
1062
- const manyToManyRelations = /* @__PURE__ */ new Map();
1063
- for (const table in tables) {
1064
- const fields = tables[table]?.fields;
1065
- for (const field in fields) {
1066
- const attr = fields[field];
1067
- if (attr.references) {
1068
- const referencedOriginalModel = attr.references.model;
1069
- const referencedModelNameCap = capitalizeFirstLetter(getModelName(tables[referencedOriginalModel]?.modelName || referencedOriginalModel));
1070
- if (!manyToManyRelations.has(referencedModelNameCap)) manyToManyRelations.set(referencedModelNameCap, /* @__PURE__ */ new Set());
1071
- const currentModelNameCap = capitalizeFirstLetter(getModelName(tables[table]?.modelName || table));
1072
- manyToManyRelations.get(referencedModelNameCap).add(currentModelNameCap);
1073
- }
1074
- }
1075
- }
1076
- const indexedFields = /* @__PURE__ */ new Map();
1077
- for (const table in tables) {
1078
- const fields = tables[table]?.fields;
1079
- const modelName = capitalizeFirstLetter(getModelName(tables[table]?.modelName || table));
1080
- indexedFields.set(modelName, []);
1081
- for (const field in fields) {
1082
- const attr = fields[field];
1083
- if (attr.index && !attr.unique) {
1084
- const fieldName = attr.fieldName || field;
1085
- indexedFields.get(modelName).push(fieldName);
1086
- }
1087
- }
1088
- }
1089
- const schema = produceSchema(schemaPrisma, (builder) => {
1090
- for (const table in tables) {
1091
- const originalTableName = table;
1092
- const customModelName = tables[table]?.modelName || table;
1093
- const modelName = capitalizeFirstLetter(getModelName(customModelName));
1094
- const fields = tables[table]?.fields;
1095
- function getType({ isBigint, isOptional, type }) {
1096
- if (type === "string") return isOptional ? "String?" : "String";
1097
- if (type === "number" && isBigint) return isOptional ? "BigInt?" : "BigInt";
1098
- if (type === "number") return isOptional ? "Int?" : "Int";
1099
- if (type === "boolean") return isOptional ? "Boolean?" : "Boolean";
1100
- if (type === "date") return isOptional ? "DateTime?" : "DateTime";
1101
- if (type === "json") {
1102
- if (provider === "sqlite" || provider === "mysql") return isOptional ? "String?" : "String";
1103
- return isOptional ? "Json?" : "Json";
1104
- }
1105
- if (type === "string[]") {
1106
- if (provider === "sqlite" || provider === "mysql") return isOptional ? "String?" : "String";
1107
- return "String[]";
1108
- }
1109
- if (type === "number[]") {
1110
- if (provider === "sqlite" || provider === "mysql") return "String";
1111
- return "Int[]";
1112
- }
1113
- }
1114
- const prismaModel = builder.findByType("model", { name: modelName });
1115
- if (!prismaModel) builder.model(modelName).field("id", "String").attribute("id");
1116
- for (const field in fields) {
1117
- const attr = fields[field];
1118
- const fieldName = attr.fieldName || field;
1119
- if (prismaModel) {
1120
- if (builder.findByType("field", {
1121
- name: fieldName,
1122
- within: prismaModel.properties
1123
- })) continue;
1124
- }
1125
- const fieldBuilder = builder.model(modelName).field(fieldName, getType({
1126
- isBigint: attr?.bigint || false,
1127
- isOptional: !attr?.required,
1128
- type: "string"
1129
- }));
1130
- if (field === "id") fieldBuilder.attribute("id");
1131
- if (attr.unique) builder.model(modelName).blockAttribute(`unique([${fieldName}])`);
1132
- if (attr.defaultValue !== void 0) {
1133
- if (Array.isArray(attr.defaultValue)) {
1134
- if (attr.type === "json") {
1135
- if (Object.prototype.toString.call(attr.defaultValue[0]) === "[object Object]") {
1136
- fieldBuilder.attribute(`default("${JSON.stringify(attr.defaultValue).replace(/\\/g, "\\\\").replace(/"/g, "\\\"")}")`);
1137
- continue;
1138
- }
1139
- let jsonArray = [];
1140
- for (const value of attr.defaultValue) jsonArray.push(value);
1141
- fieldBuilder.attribute(`default("${JSON.stringify(jsonArray).replace(/"/g, "\\\"")}")`);
1142
- continue;
1143
- }
1144
- if (attr.defaultValue.length === 0) {
1145
- fieldBuilder.attribute(`default([])`);
1146
- continue;
1147
- } else if (typeof attr.defaultValue[0] === "string" && attr.type === "string[]") {
1148
- let valueArray = [];
1149
- for (const value of attr.defaultValue) valueArray.push(JSON.stringify(value));
1150
- fieldBuilder.attribute(`default([${valueArray}])`);
1151
- } else if (typeof attr.defaultValue[0] === "number") {
1152
- let valueArray = [];
1153
- for (const value of attr.defaultValue) valueArray.push(`${value}`);
1154
- fieldBuilder.attribute(`default([${valueArray}])`);
1155
- }
1156
- } else if (typeof attr.defaultValue === "object" && !Array.isArray(attr.defaultValue) && attr.defaultValue !== null) {
1157
- if (Object.entries(attr.defaultValue).length === 0) {
1158
- fieldBuilder.attribute(`default("{}")`);
1159
- continue;
1160
- }
1161
- fieldBuilder.attribute(`default("${JSON.stringify(attr.defaultValue).replace(/\\/g, "\\\\").replace(/"/g, "\\\"")}")`);
1162
- }
1163
- if (field === "createdAt") fieldBuilder.attribute("default(now())");
1164
- else if (typeof attr.defaultValue === "string" && provider !== "mysql") fieldBuilder.attribute(`default("${attr.defaultValue}")`);
1165
- else if (typeof attr.defaultValue === "boolean" || typeof attr.defaultValue === "number") fieldBuilder.attribute(`default(${attr.defaultValue})`);
1166
- else if (typeof attr.defaultValue === "function") {}
1167
- }
1168
- if (field === "updatedAt" && attr.onUpdate) fieldBuilder.attribute("updatedAt");
1169
- else if (attr.onUpdate) {}
1170
- if (attr.references) {
1171
- const referencedOriginalModelName = getModelName(attr.references.model);
1172
- const referencedCustomModelName = tables[referencedOriginalModelName]?.modelName || referencedOriginalModelName;
1173
- let action = "NoAction";
1174
- if (attr.references.onDelete === "cascade") action = "Cascade";
1175
- else if (attr.references.onDelete === "no action") action = "NoAction";
1176
- else if (attr.references.onDelete === "set null") action = "SetNull";
1177
- else if (attr.references.onDelete === "set default") action = "SetDefault";
1178
- else if (attr.references.onDelete === "restrict") action = "Restrict";
1179
- const relationField = `relation(fields: [${getFieldName({
1180
- model: originalTableName,
1181
- field: fieldName
1182
- })}], references: [${getFieldName({
1183
- model: attr.references.model,
1184
- field: attr.references.field
1185
- })}], onDelete: ${action})`;
1186
- builder.model(modelName).field(referencedCustomModelName.toLowerCase(), `${capitalizeFirstLetter(referencedCustomModelName)}${!attr.required ? "?" : ""}`).attribute(relationField);
1187
- }
1188
- if (!attr.unique && !attr.references && provider === "mysql" && attr.type === "string") builder.model(modelName).field(fieldName).attribute("db.Text");
1189
- }
1190
- if (manyToManyRelations.has(modelName)) for (const relatedModel of manyToManyRelations.get(modelName)) {
1191
- const relatedTableName = Object.keys(tables).find((key) => capitalizeFirstLetter(tables[key]?.modelName || key) === relatedModel);
1192
- const relatedFields = relatedTableName ? tables[relatedTableName]?.fields : {};
1193
- const [_fieldKey, fkFieldAttr] = Object.entries(relatedFields || {}).find(([_fieldName, fieldAttr]) => fieldAttr.references && getModelName(fieldAttr.references.model) === getModelName(originalTableName)) || [];
1194
- const isUnique = fkFieldAttr?.unique === true;
1195
- const fieldName = isUnique || adapter.options?.usePlural === true ? `${relatedModel.toLowerCase()}` : `${relatedModel.toLowerCase()}s`;
1196
- if (!builder.findByType("field", {
1197
- name: fieldName,
1198
- within: prismaModel?.properties
1199
- })) builder.model(modelName).field(fieldName, `${relatedModel}${isUnique ? "?" : "[]"}`);
1200
- }
1201
- const indexedFieldsForModel = indexedFields.get(modelName);
1202
- if (indexedFieldsForModel && indexedFieldsForModel.length > 0) for (const fieldName of indexedFieldsForModel) {
1203
- if (prismaModel) {
1204
- if (prismaModel.properties.some((v) => v.type === "attribute" && v.name === "index" && JSON.stringify(v.args[0]?.value).includes(fieldName))) continue;
1205
- }
1206
- const field = Object.entries(fields).find(([key, attr]) => (attr.fieldName || key) === fieldName)?.[1];
1207
- let indexField = fieldName;
1208
- if (provider === "mysql" && field && field.type === "string") indexField = `${fieldName}(length: 191)`;
1209
- builder.model(modelName).blockAttribute(`index([${indexField}])`);
1210
- }
1211
- const hasAttribute = builder.findByType("attribute", {
1212
- name: "map",
1213
- within: prismaModel?.properties
1214
- });
1215
- const hasChanged = customModelName !== originalTableName;
1216
- if (!hasAttribute) builder.model(modelName).blockAttribute("map", `${getModelName(hasChanged ? customModelName : originalTableName)}`);
1217
- }
1218
- });
1219
- const schemaChanged = schema.trim() !== schemaPrisma.trim();
1220
- if (provider === "postgresql") {
1221
- const tables$1 = getPaymentTables$1(options);
1222
- const sql = emitPostgresInvariantSql(normalizeInvariants(tables$1, {
1223
- getModelName: initGetModelName({
1224
- schema: tables$1,
1225
- usePlural: adapter.options?.adapterConfig?.usePlural
1226
- }),
1227
- getFieldName: initGetFieldName({
1228
- schema: tables$1,
1229
- usePlural: false
1230
- })
1231
- }), "public");
1232
- const sqlFilePath = path.join(path.dirname(filePath), "invariants.sql");
1233
- const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
1234
- await fs$1.mkdir(sqlDir, { recursive: true });
1235
- await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
1236
- console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
1237
- }
1238
- return {
1239
- code: schemaChanged ? schema : "",
1240
- fileName: filePath,
1241
- overwrite: schemaPrismaExist && schemaChanged
1242
- };
1243
- };
1244
- const getNewPrisma = (provider, cwd) => {
1245
- const prismaVersion = getPrismaVersion(cwd);
1246
- return `generator client {
1247
- provider = "${prismaVersion && prismaVersion >= 7 ? "prisma-client" : "prisma-client-js"}"
1248
- }
1249
-
1250
- datasource db {
1251
- provider = "${provider}"
1252
- url = ${provider === "sqlite" ? `"file:./dev.db"` : `env("DATABASE_URL")`}
1253
- }`;
1254
- };
1255
-
1256
- //#endregion
1257
- //#region src/generators/invariants-mongo.ts
1258
- /**
1259
- * Generate MongoDB collection validator JSON schema for invariants.
1260
- */
1261
- function emitMongoValidators(invariants) {
1262
- const validators = {};
1263
- for (const inv of invariants) {
1264
- if (inv.enforcement.mongo !== "validator" || !inv.appliesTo.includes("mongo")) continue;
1265
- const collectionName = inv.tableName;
1266
- if (!validators[collectionName]) validators[collectionName] = { $jsonSchema: {
1267
- bsonType: "object",
1268
- required: [],
1269
- properties: {},
1270
- additionalProperties: true
1271
- } };
1272
- const validator = generateMongoValidator(inv);
1273
- if (validator) {
1274
- Object.assign(validators[collectionName].$jsonSchema.properties, validator.properties || {});
1275
- if (validator.required) validators[collectionName].$jsonSchema.required = [...validators[collectionName].$jsonSchema.required || [], ...validator.required];
1276
- if (validator.anyOf) {
1277
- if (!validators[collectionName].$jsonSchema.anyOf) validators[collectionName].$jsonSchema.anyOf = [];
1278
- validators[collectionName].$jsonSchema.anyOf.push(...validator.anyOf);
1279
- }
1280
- }
1281
- }
1282
- return validators;
1283
- }
1284
- /**
1285
- * Generate MongoDB validator JSON schema for a single invariant.
1286
- */
1287
- function generateMongoValidator(inv) {
1288
- const { logic } = inv;
1289
- switch (logic.type) {
1290
- case "field_enum_constraint": {
1291
- const { fieldName, allowedValues, conditionalField } = logic;
1292
- if (conditionalField) {
1293
- const anyOf = [];
1294
- if (!conditionalField.whenPresent) anyOf.push({ properties: {
1295
- [fieldName]: { enum: ["PAYMENT"] },
1296
- [conditionalField.fieldName]: { bsonType: "null" }
1297
- } });
1298
- if (conditionalField.whenPresent) anyOf.push({
1299
- properties: {
1300
- [fieldName]: { enum: ["SUBSCRIPTION"] },
1301
- [conditionalField.fieldName]: { bsonType: "string" }
1302
- },
1303
- required: [conditionalField.fieldName]
1304
- });
1305
- return {
1306
- properties: { [fieldName]: { enum: allowedValues } },
1307
- anyOf
1308
- };
1309
- }
1310
- return { properties: { [fieldName]: { enum: allowedValues } } };
1311
- }
1312
- default: return null;
1313
- }
1314
- }
1315
- /**
1316
- * Generate TypeScript guard module for MongoDB "app" enforcement invariants.
1317
- */
1318
- function emitMongoGuards(invariants) {
1319
- const guardInvariants = invariants.filter((inv) => inv.enforcement.mongo === "app" && inv.appliesTo.includes("mongo"));
1320
- if (guardInvariants.length === 0) return `// No MongoDB app-level invariant guards generated
1321
- // This file is auto-generated - DO NOT EDIT MANUALLY
1322
-
1323
- export {};
1324
- `;
1325
- const guards = [];
1326
- const imports = [];
1327
- for (const inv of guardInvariants) {
1328
- const guard = generateMongoGuard(inv);
1329
- if (guard) {
1330
- guards.push(guard);
1331
- if (guard.imports) imports.push(...guard.imports);
1332
- }
1333
- }
1334
- const uniqueImports = Array.from(new Set(imports));
1335
- return `// MongoDB invariant guards
1336
- // Generated from schema invariants
1337
- // DO NOT EDIT MANUALLY - This file is auto-generated
1338
- //
1339
- // These guards should be called before write operations to enforce invariants
1340
- // at the application level.
1341
-
1342
- ${uniqueImports.length > 0 ? uniqueImports.join("\n") + "\n" : ""}
1343
-
1344
- ${guards.join("\n\n")}
1345
- `;
1346
- }
1347
- /**
1348
- * Generate TypeScript guard function for a single invariant.
1349
- */
1350
- function generateMongoGuard(inv) {
1351
- const { logic, tableName, id, description } = inv;
1352
- switch (logic.type) {
1353
- case "cross_table_ownership": {
1354
- const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
1355
- return { code: `/**
1356
- * Guard: ${description}
1357
- *
1358
- * @param data - The record being created/updated (use camelCase field names)
1359
- * @param db - MongoDB database instance
1360
- * @returns true if invariant is satisfied, throws error otherwise
1361
- */
1362
- export async function ${id}Guard(
1363
- data: { ${fieldName}?: string | null; ${ownershipFieldName}: string },
1364
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1365
- ): Promise<boolean> {
1366
- if (!data.${fieldName}) {
1367
- return true; // Field is optional/nullable
1368
- }
1369
-
1370
- const referenced = await db.collection("${referencedTable}").findOne({
1371
- ${referencedField}: data.${fieldName}
1372
- });
1373
-
1374
- if (!referenced) {
1375
- throw new Error(\`Invariant violation: \${data.${fieldName}} not found in ${referencedTable}\`);
1376
- }
1377
-
1378
- // Compare ownership field (use database field name for referenced record)
1379
- const dataOwnerValue = data.${ownershipFieldName};
1380
- const refOwnerValue = referenced.${ownershipFieldName};
1381
-
1382
- if (refOwnerValue !== dataOwnerValue) {
1383
- throw new Error(
1384
- \`Invariant violation: ${fieldName} must belong to the same ${ownershipFieldName} as the record. \` +
1385
- \`Expected \${dataOwnerValue}, got \${refOwnerValue}\`
1386
- );
1387
- }
1388
-
1389
- return true;
1390
- }` };
1391
- }
1392
- case "cross_table_equality": {
1393
- const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
1394
- return { code: `/**
1395
- * Guard: ${description}
1396
- *
1397
- * @param data - The record being created/updated
1398
- * @param db - MongoDB database instance
1399
- * @returns true if invariant is satisfied, throws error otherwise
1400
- */
1401
- export async function ${id}Guard(
1402
- data: { ${fieldName}: string; ${equalityFieldName}: string },
1403
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1404
- ): Promise<boolean> {
1405
- if (!data.${fieldName}) {
1406
- throw new Error(\`Invariant violation: ${fieldName} is required\`);
1407
- }
1408
-
1409
- const referenced = await db.collection("${referencedTable}").findOne({
1410
- ${referencedField}: data.${fieldName}
1411
- });
1412
-
1413
- if (!referenced) {
1414
- throw new Error(\`Invariant violation: Referenced record not found in ${referencedTable}\`);
1415
- }
1416
-
1417
- if (referenced.${referencedEqualityFieldName} !== data.${equalityFieldName}) {
1418
- throw new Error(
1419
- \`Invariant violation: ${equalityFieldName} must equal ${referencedTable}.${referencedEqualityFieldName}. \` +
1420
- \`Expected \${referenced.${referencedEqualityFieldName}}, got \${data.${equalityFieldName}}\`
1421
- );
1422
- }
1423
-
1424
- return true;
1425
- }` };
1426
- }
1427
- case "raw": return { code: `/**
1428
- * Guard: ${description}
1429
- *
1430
- * TODO: Implement this guard based on the invariant description.
1431
- * This is a placeholder - you must implement the actual validation logic.
1432
- *
1433
- * @param data - The record being created/updated
1434
- * @param db - MongoDB database instance
1435
- * @returns true if invariant is satisfied, throws error otherwise
1436
- */
1437
- export async function ${id}Guard(
1438
- data: any,
1439
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1440
- ): Promise<boolean> {
1441
- // TODO: Implement invariant: ${description}
1442
- console.warn("Guard ${id}Guard is not yet implemented");
1443
- return true;
1444
- }` };
1445
- default: return null;
1446
- }
1447
- }
1448
-
1449
- //#endregion
1450
- //#region src/generators/mongodb.ts
1451
- const generateMongoDBSchema = async ({ options, file, adapter }) => {
1452
- const tables = getPaymentTables$1(options);
1453
- const filePath = file || "./mongodb-schema.ts";
1454
- const invariants = normalizeInvariants(tables, {
1455
- getModelName: initGetModelName({
1456
- schema: tables,
1457
- usePlural: adapter?.options?.adapterConfig?.usePlural
1458
- }),
1459
- getFieldName: initGetFieldName({
1460
- schema: tables,
1461
- usePlural: false
1462
- })
1463
- });
1464
- const validators = emitMongoValidators(invariants);
1465
- const validatorsJson = JSON.stringify(validators, null, 2);
1466
- const guards = emitMongoGuards(invariants);
1467
- const validatorsPath = filePath.replace(/\.ts$/, "-validators.json");
1468
- const validatorsDir = path.dirname(path.resolve(process.cwd(), validatorsPath));
1469
- await fs$1.mkdir(validatorsDir, { recursive: true });
1470
- await fs$1.writeFile(path.resolve(process.cwd(), validatorsPath), validatorsJson);
1471
- console.log(`📝 Generated MongoDB validators: ${validatorsPath}`);
1472
- const guardsPath = filePath.replace(/\.ts$/, "-guards.ts");
1473
- const guardsDir = path.dirname(path.resolve(process.cwd(), guardsPath));
1474
- await fs$1.mkdir(guardsDir, { recursive: true });
1475
- const formattedGuards = await prettier.format(guards, { parser: "typescript" });
1476
- await fs$1.writeFile(path.resolve(process.cwd(), guardsPath), formattedGuards);
1477
- console.log(`📝 Generated MongoDB guards: ${guardsPath}`);
1478
- const validatorsBaseName = path.basename(validatorsPath, ".json");
1479
- const guardsBaseName = path.basename(guardsPath, ".ts");
1480
- const schemaCode = `// MongoDB schema setup
1481
- // Generated from schema definitions
1482
- // DO NOT EDIT MANUALLY - This file is auto-generated
1483
-
1484
- import validators from "./${validatorsBaseName}.json";
1485
- import * as guards from "./${guardsBaseName}";
1486
-
1487
- /**
1488
- * MongoDB collection validators.
1489
- * Apply these using db.createCollection() or db.command({ collMod: ... })
1490
- *
1491
- * Example:
1492
- * \`\`\`
1493
- * await db.createCollection("checkout_session", {
1494
- * validator: validators.checkout_session
1495
- * });
1496
- * \`\`\`
1497
- */
1498
- export { validators };
1499
-
1500
- /**
1501
- * MongoDB invariant guards.
1502
- * Call these before write operations to enforce invariants at the application level.
1503
- *
1504
- * Example:
1505
- * \`\`\`
1506
- * import { customer_payment_method_ownershipGuard } from "./${guardsBaseName}";
1507
- *
1508
- * await customer_payment_method_ownershipGuard(data, db);
1509
- * await db.collection("customer").insertOne(data);
1510
- * \`\`\`
1511
- */
1512
- export { guards };
1513
- `;
1514
- return {
1515
- code: await prettier.format(schemaCode, { parser: "typescript" }),
1516
- fileName: filePath,
1517
- overwrite: true
1518
- };
1519
- };
1520
-
1521
- //#endregion
1522
- //#region src/generators/index.ts
1523
- const adapters = {
1524
- prisma: generatePrismaSchema,
1525
- drizzle: generateDrizzleSchema,
1526
- kysely: generateKyselySchema,
1527
- mongodb: generateMongoDBSchema
1528
- };
1529
- const generateSchema = async (opts) => {
1530
- const adapter = opts.adapter;
1531
- const generator = adapter.id in adapters ? adapters[adapter.id] : null;
1532
- if (generator) return generator(opts);
1533
- if (adapter.createSchema) return adapter.createSchema(opts.options, opts.file).then(({ code, path: fileName, overwrite }) => ({
1534
- code,
1535
- fileName,
1536
- overwrite
1537
- }));
1538
- throw new Error(`${adapter.id} is not supported. If it is a custom adapter, please request the maintainer to implement createSchema`);
1539
- };
1540
-
1541
- //#endregion
1542
- export { generateKyselySchema as a, getPackageInfo as i, generateSchema as n, generateDrizzleSchema as o, generatePrismaSchema as r, adapters as t };