pecunia-cli 0.2.8 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/api.mjs CHANGED
@@ -1,3 +1,3 @@
1
- import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-BXA4y5O8.mjs";
1
+ import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-CZDWuCP8.mjs";
2
2
 
3
3
  export { adapters, generateDrizzleSchema, generateKyselySchema, generatePrismaSchema, generateSchema };
@@ -1,526 +1,18 @@
1
- import fs, { existsSync, statSync } from "node:fs";
1
+ import fs, { existsSync } from "node:fs";
2
2
  import fs$1 from "node:fs/promises";
3
3
  import path from "node:path";
4
- import { getMigrations, getPaymentTables } from "pecunia-root";
5
- import { capitalizeFirstLetter, getPaymentTables as getPaymentTables$1, initGetFieldName, initGetModelName } from "pecunia-core";
4
+ import { getMigrations } from "pecunia-root";
5
+ import { capitalizeFirstLetter, getPaymentTables, initGetFieldName, initGetModelName } from "pecunia-core";
6
6
  import prettier from "prettier";
7
7
  import { produceSchema } from "@mrleebo/prisma-ast";
8
8
 
9
- //#region src/generators/invariants.ts
10
- /**
11
- * Normalize invariants from schema into IR format.
12
- * This parses invariant descriptions and extracts structured logic.
13
- */
14
- function normalizeInvariants(schema, options) {
15
- const invariants = [];
16
- for (const [tableKey, table] of Object.entries(schema)) {
17
- if (!table.invariants) continue;
18
- const modelName = options.getModelName(tableKey);
19
- const tableName = table.modelName;
20
- for (const invariant of table.invariants) {
21
- const logic = parseInvariantLogic(invariant.description, tableKey, table, schema, options);
22
- invariants.push({
23
- id: invariant.id,
24
- description: invariant.description,
25
- modelName,
26
- tableName,
27
- appliesTo: invariant.appliesTo,
28
- enforcement: invariant.enforcement || {},
29
- logic
30
- });
31
- }
32
- }
33
- return invariants;
34
- }
35
- /**
36
- * Parse invariant description into structured logic.
37
- * This is a heuristic parser that extracts common patterns.
38
- */
39
- function parseInvariantLogic(description, tableKey, table, schema, options) {
40
- const desc = description.toLowerCase();
41
- if (desc.includes("determines") && desc.includes("presence")) {
42
- const modeMatch = description.match(/(\w+)\.(\w+)\s+determines/);
43
- const subMatch = description.match(/(\w+)\s+must be (present|null\/absent)/);
44
- const enumMatches = description.matchAll(/(\w+)\s*=>\s*(\w+)\s+must be (present|null\/absent)/g);
45
- if (modeMatch && subMatch) {
46
- const [, modelName, fieldName] = modeMatch;
47
- const [, conditionalField] = subMatch;
48
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
49
- const conditional = Object.keys(table.fields).find((k) => k.toLowerCase() === conditionalField.toLowerCase());
50
- if (field && conditional) {
51
- const fieldAttr = table.fields[field];
52
- let allowedValues = [];
53
- if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) allowedValues = fieldAttr.type;
54
- else {
55
- const enumValues = /* @__PURE__ */ new Set();
56
- for (const match of enumMatches) {
57
- const val = match[1]?.trim();
58
- if (val) enumValues.add(val);
59
- }
60
- if (enumValues.size > 0) allowedValues = Array.from(enumValues);
61
- else allowedValues = ["PAYMENT", "SUBSCRIPTION"].filter((e) => desc.includes(e.toLowerCase()));
62
- }
63
- let whenPresent = false;
64
- for (const match of enumMatches) {
65
- const enumVal = match[1];
66
- const requirement = match[3];
67
- if (enumVal && requirement === "present") {
68
- whenPresent = true;
69
- break;
70
- }
71
- }
72
- if (!whenPresent && desc.includes("subscription") && desc.includes("must be present")) whenPresent = true;
73
- return {
74
- type: "field_enum_constraint",
75
- field,
76
- fieldName: options.getFieldName({
77
- model: tableKey,
78
- field
79
- }),
80
- allowedValues: allowedValues.length > 0 ? allowedValues : ["PAYMENT", "SUBSCRIPTION"],
81
- conditionalField: {
82
- field: conditional,
83
- fieldName: options.getFieldName({
84
- model: tableKey,
85
- field: conditional
86
- }),
87
- whenPresent
88
- }
89
- };
90
- }
91
- }
92
- }
93
- if (desc.includes("must belong to same") || desc.includes("must equal") && desc.includes("when")) {
94
- const fieldMatch = description.match(/(\w+)\.(\w+)\s+must/);
95
- const refMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
96
- description.match(/(\w+)\.(\w+)\s+must equal/);
97
- if (fieldMatch && refMatch) {
98
- const [, modelName, fieldName] = fieldMatch;
99
- const [, refTable, refField, ownerTable, ownerField] = refMatch;
100
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
101
- if (field && table.fields[field]?.references) {
102
- table.fields[field].references;
103
- const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
104
- const ownerTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === ownerTable || k === ownerTable);
105
- if (refTableKey && ownerTableKey) {
106
- const ownershipField = Object.keys(table.fields).find((k) => k.toLowerCase() === ownerField.toLowerCase());
107
- if (ownershipField) return {
108
- type: "cross_table_ownership",
109
- field,
110
- fieldName: options.getFieldName({
111
- model: tableKey,
112
- field
113
- }),
114
- referencedTable: options.getModelName(refTableKey),
115
- referencedField: options.getFieldName({
116
- model: refTableKey,
117
- field: refField
118
- }),
119
- ownershipField,
120
- ownershipFieldName: options.getFieldName({
121
- model: tableKey,
122
- field: ownershipField
123
- })
124
- };
125
- }
126
- }
127
- }
128
- }
129
- if (desc.includes("must match") && desc.includes("of its")) {
130
- const fieldMatch = description.match(/(\w+)\.(\w+)\s+must match/);
131
- const refMatch = description.match(/of its\s+(\w+)/);
132
- const equalityMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
133
- if (fieldMatch && refMatch && equalityMatch) {
134
- const [, , equalityFieldName] = fieldMatch;
135
- const [, refFieldName] = refMatch;
136
- const [, refTable, refEqualityField, , sourceEqualityField] = equalityMatch;
137
- const equalityField = Object.keys(table.fields).find((k) => k.toLowerCase() === equalityFieldName.toLowerCase());
138
- const refField = Object.keys(table.fields).find((k) => k.toLowerCase() === refFieldName.toLowerCase());
139
- if (equalityField && refField && table.fields[refField]?.references) {
140
- const ref = table.fields[refField].references;
141
- const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
142
- if (refTableKey) {
143
- const refEquality = Object.keys(schema[refTableKey].fields).find((k) => k.toLowerCase() === refEqualityField.toLowerCase());
144
- if (refEquality) return {
145
- type: "cross_table_equality",
146
- field: refField,
147
- fieldName: options.getFieldName({
148
- model: tableKey,
149
- field: refField
150
- }),
151
- referencedTable: options.getModelName(refTableKey),
152
- referencedField: options.getFieldName({
153
- model: refTableKey,
154
- field: ref.field
155
- }),
156
- equalityField,
157
- equalityFieldName: options.getFieldName({
158
- model: tableKey,
159
- field: equalityField
160
- }),
161
- referencedEqualityField: refEquality,
162
- referencedEqualityFieldName: options.getFieldName({
163
- model: refTableKey,
164
- field: refEquality
165
- })
166
- };
167
- }
168
- }
169
- }
170
- }
171
- return {
172
- type: "raw",
173
- description
174
- };
175
- }
176
-
177
- //#endregion
178
- //#region src/generators/invariants-sql.ts
179
- /**
180
- * Generate SQL for enforcing invariants in Postgres (and compatible databases).
181
- * Returns SQL statements for CHECK constraints and triggers.
182
- */
183
- function emitPostgresInvariantSql(invariants, schemaName = "public") {
184
- const statements = [];
185
- const checkInvariants = invariants.filter((inv) => inv.enforcement.postgres === "check" && inv.appliesTo.includes("postgres"));
186
- const triggerInvariants = invariants.filter((inv) => inv.enforcement.postgres === "trigger" && inv.appliesTo.includes("postgres"));
187
- for (const inv of checkInvariants) {
188
- const sql = generateCheckConstraint(inv, schemaName);
189
- if (sql) statements.push(sql);
190
- }
191
- for (const inv of triggerInvariants) {
192
- const sql = generateTrigger(inv, schemaName);
193
- if (sql) statements.push(...sql);
194
- }
195
- if (statements.length === 0) return "-- No invariant enforcement SQL generated\n";
196
- return `-- Invariant enforcement SQL
197
- -- Generated from schema invariants
198
- -- DO NOT EDIT MANUALLY - This file is auto-generated
199
-
200
- ${statements.join("\n\n")}
201
- `;
202
- }
203
- /**
204
- * Generate CHECK constraint SQL for an invariant.
205
- */
206
- function generateCheckConstraint(inv, schemaName) {
207
- const { logic, tableName, id } = inv;
208
- switch (logic.type) {
209
- case "field_enum_constraint": {
210
- const { fieldName, allowedValues, conditionalField } = logic;
211
- if (conditionalField) {
212
- const constraintName$1 = `${tableName}_${id}_check`;
213
- const enumCheck$1 = allowedValues.map((val) => `'${val}'`).join(", ");
214
- const subscriptionValue = allowedValues.find((v) => v === "SUBSCRIPTION") || allowedValues.find((v) => v !== "PAYMENT") || allowedValues[0];
215
- const paymentValue = allowedValues.find((v) => v === "PAYMENT") || allowedValues[0];
216
- const conditionalCheck = `(
217
- (${fieldName} = '${subscriptionValue}') = (${conditionalField.fieldName} IS NOT NULL) AND
218
- (${fieldName} = '${paymentValue}') = (${conditionalField.fieldName} IS NULL)
219
- )`;
220
- return `-- ${inv.description}
221
- ALTER TABLE ${schemaName}.${tableName}
222
- ADD CONSTRAINT ${constraintName$1}
223
- CHECK (
224
- ${fieldName} IN (${enumCheck$1}) AND
225
- ${conditionalCheck}
226
- );`;
227
- }
228
- const constraintName = `${tableName}_${id}_check`;
229
- const enumCheck = allowedValues.map((val) => `'${val}'`).join(", ");
230
- return `-- ${inv.description}
231
- ALTER TABLE ${schemaName}.${tableName}
232
- ADD CONSTRAINT ${constraintName}
233
- CHECK (${fieldName} IN (${enumCheck}));`;
234
- }
235
- default: return null;
236
- }
237
- }
238
- /**
239
- * Generate trigger SQL for an invariant.
240
- * Returns array of SQL statements (function + trigger).
241
- */
242
- function generateTrigger(inv, schemaName) {
243
- const { logic, tableName, id } = inv;
244
- switch (logic.type) {
245
- case "cross_table_ownership": {
246
- const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
247
- const functionName = `${tableName}_${id}_fn`;
248
- const triggerName = `${tableName}_${id}_trigger`;
249
- return [`-- Function to enforce: ${inv.description}
250
- CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
251
- RETURNS TRIGGER AS $$
252
- BEGIN
253
- IF NEW.${fieldName} IS NOT NULL THEN
254
- IF NOT EXISTS (
255
- SELECT 1
256
- FROM ${schemaName}.${referencedTable}
257
- WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
258
- AND ${referencedTable}.${ownershipFieldName} = NEW.${ownershipFieldName}
259
- ) THEN
260
- RAISE EXCEPTION 'Invariant violation: % must belong to the same % as the record', '${fieldName}', '${ownershipFieldName}';
261
- END IF;
262
- END IF;
263
- RETURN NEW;
264
- END;
265
- $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
266
- DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
267
- CREATE TRIGGER ${triggerName}
268
- BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
269
- FOR EACH ROW
270
- EXECUTE FUNCTION ${schemaName}.${functionName}();`];
271
- }
272
- case "cross_table_equality": {
273
- const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
274
- const functionName = `${tableName}_${id}_fn`;
275
- const triggerName = `${tableName}_${id}_trigger`;
276
- return [`-- Function to enforce: ${inv.description}
277
- CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
278
- RETURNS TRIGGER AS $$
279
- DECLARE
280
- ref_${referencedEqualityFieldName} TEXT;
281
- BEGIN
282
- IF NEW.${fieldName} IS NOT NULL THEN
283
- SELECT ${referencedEqualityFieldName} INTO ref_${referencedEqualityFieldName}
284
- FROM ${schemaName}.${referencedTable}
285
- WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
286
- LIMIT 1;
287
-
288
- IF ref_${referencedEqualityFieldName} IS NULL THEN
289
- RAISE EXCEPTION 'Invariant violation: Referenced record not found in ${referencedTable}';
290
- END IF;
291
-
292
- IF NEW.${equalityFieldName} != ref_${referencedEqualityFieldName} THEN
293
- RAISE EXCEPTION 'Invariant violation: % must equal %.%', '${equalityFieldName}', '${referencedTable}', '${referencedEqualityFieldName}';
294
- END IF;
295
- END IF;
296
- RETURN NEW;
297
- END;
298
- $$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
299
- DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
300
- CREATE TRIGGER ${triggerName}
301
- BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
302
- FOR EACH ROW
303
- EXECUTE FUNCTION ${schemaName}.${functionName}();`];
304
- }
305
- default: return null;
306
- }
307
- }
308
-
309
- //#endregion
310
- //#region src/utils/drizzle-migrations.ts
311
- /**
312
- * Find or create the Drizzle migrations directory.
313
- * Never places migrations under src/ to avoid drizzle-kit crashes.
314
- *
315
- * @param projectRoot - The project root directory (where package.json typically lives)
316
- * @returns The path to the drizzle migrations directory
317
- */
318
- async function getDrizzleMigrationsDir(projectRoot) {
319
- const drizzleDir = path.resolve(projectRoot, "drizzle");
320
- if (existsSync(drizzleDir)) {
321
- if (!statSync(drizzleDir).isDirectory()) throw new Error(`"drizzle" exists but is not a directory. Please remove it or rename it.`);
322
- return drizzleDir;
323
- }
324
- await fs$1.mkdir(drizzleDir, { recursive: true });
325
- return drizzleDir;
326
- }
327
- /**
328
- * Get the path to the Drizzle meta directory.
329
- */
330
- function getDrizzleMetaDir(projectRoot) {
331
- return path.resolve(projectRoot, "drizzle", "meta");
332
- }
333
- /**
334
- * Get the path to the Drizzle journal file.
335
- */
336
- function getJournalPath(projectRoot) {
337
- return path.resolve(projectRoot, "drizzle", "meta", "_journal.json");
338
- }
339
- /**
340
- * Read the Drizzle journal file, or return null if it doesn't exist.
341
- */
342
- async function readJournal(projectRoot) {
343
- const journalPath = getJournalPath(projectRoot);
344
- if (!existsSync(journalPath)) return null;
345
- const content = await fs$1.readFile(journalPath, "utf-8");
346
- return JSON.parse(content);
347
- }
348
- /**
349
- * Write the Drizzle journal file with stable JSON formatting (2-space indent).
350
- */
351
- async function writeJournal(projectRoot, journal) {
352
- const journalPath = getJournalPath(projectRoot);
353
- await fs$1.writeFile(journalPath, JSON.stringify(journal, null, 2) + "\n", "utf-8");
354
- }
355
- /**
356
- * Ensure the Drizzle journal exists. If it doesn't exist, create it with a baseline init migration.
357
- *
358
- * @param projectRoot - The project root directory
359
- * @returns true if journal was created, false if it already existed
360
- */
361
- async function ensureDrizzleJournal(projectRoot) {
362
- if (existsSync(getJournalPath(projectRoot))) return false;
363
- const metaDir = getDrizzleMetaDir(projectRoot);
364
- await fs$1.mkdir(metaDir, { recursive: true });
365
- await writeJournal(projectRoot, {
366
- version: "7",
367
- dialect: "postgresql",
368
- entries: []
369
- });
370
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
371
- const initMigrationPath = path.join(migrationsDir, "0000_pecunia_init.sql");
372
- await fs$1.writeFile(initMigrationPath, "-- Initial no-op migration created by pecunia-cli\n", "utf-8");
373
- await upsertJournalEntry(projectRoot, "0000_pecunia_init");
374
- return true;
375
- }
376
- /**
377
- * Upsert a journal entry. If an entry with the same tag exists, do nothing.
378
- * Otherwise, append a new entry with the next available idx.
379
- *
380
- * @param projectRoot - The project root directory
381
- * @param tag - The migration tag (filename base without .sql)
382
- */
383
- async function upsertJournalEntry(projectRoot, tag) {
384
- const journal = await readJournal(projectRoot);
385
- if (!journal) throw new Error("Journal does not exist. Call ensureDrizzleJournal() first.");
386
- if (journal.entries.find((entry) => entry.tag === tag)) return;
387
- const nextIdx = (journal.entries.length > 0 ? Math.max(...journal.entries.map((e) => e.idx)) : -1) + 1;
388
- const hasBreakpoints = journal.entries.some((e) => e.breakpoints === true);
389
- const newEntry = {
390
- tag,
391
- idx: nextIdx,
392
- version: journal.version,
393
- when: Date.now(),
394
- ...hasBreakpoints && { breakpoints: true }
395
- };
396
- journal.entries.push(newEntry);
397
- await writeJournal(projectRoot, journal);
398
- }
399
- /**
400
- * Find existing invariants migration tag in the journal.
401
- *
402
- * @param projectRoot - The project root directory
403
- * @returns The tag if found, or null
404
- */
405
- async function findExistingInvariantsTag(projectRoot) {
406
- const journal = await readJournal(projectRoot);
407
- if (!journal) return null;
408
- const invariantsEntry = journal.entries.find((entry) => entry.tag.endsWith("_pecunia_invariants"));
409
- return invariantsEntry ? invariantsEntry.tag : null;
410
- }
411
- /**
412
- * Determine the next migration number by checking journal entries and existing files.
413
- * Prefers checking journal entries first, then falls back to scanning files.
414
- *
415
- * @param projectRoot - The project root directory
416
- * @returns The next migration number (4-digit string, e.g., "0001")
417
- */
418
- async function nextMigrationNumber(projectRoot) {
419
- const existingInvariantsTag = await findExistingInvariantsTag(projectRoot);
420
- if (existingInvariantsTag) {
421
- const match = existingInvariantsTag.match(/^(\d{4})_/);
422
- if (match) return match[1];
423
- }
424
- const journal = await readJournal(projectRoot);
425
- if (journal && journal.entries.length > 0) {
426
- const numbers$1 = journal.entries.map((entry) => {
427
- const match = entry.tag.match(/^(\d{4})_/);
428
- return match ? parseInt(match[1], 10) : 0;
429
- }).filter((n) => n >= 0);
430
- if (numbers$1.length > 0) {
431
- const nextNumber$1 = Math.max(...numbers$1) + 1;
432
- if (nextNumber$1 >= 9999) return "9999";
433
- return String(nextNumber$1).padStart(4, "0");
434
- }
435
- }
436
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
437
- if (!existsSync(migrationsDir)) return "0001";
438
- const migrationFiles = (await fs$1.readdir(migrationsDir)).filter((file) => file.endsWith(".sql") && /^\d{4}_/.test(file));
439
- if (migrationFiles.length === 0) return "0001";
440
- const numbers = migrationFiles.map((file) => {
441
- const match = file.match(/^(\d{4})_/);
442
- return match ? parseInt(match[1], 10) : 0;
443
- }).filter((n) => n > 0);
444
- if (numbers.length === 0) return "0001";
445
- const nextNumber = Math.max(...numbers) + 1;
446
- if (nextNumber >= 9999) return "9999";
447
- return String(nextNumber).padStart(4, "0");
448
- }
449
- /**
450
- * Get the path for the invariants migration file.
451
- * Uses a deterministic name based on migration number to ensure it runs after
452
- * base table creation migrations.
453
- *
454
- * @param migrationsDir - Path to the drizzle migrations directory
455
- * @param migrationNumber - The migration number to use (4-digit string)
456
- * @returns The full path to the invariants migration file
457
- */
458
- function getInvariantsMigrationPath(migrationsDir, migrationNumber) {
459
- return path.join(migrationsDir, `${migrationNumber}_pecunia_invariants.sql`);
460
- }
461
- /**
462
- * Upsert a Drizzle migration file and register it in the journal.
463
- *
464
- * @param projectRoot - The project root directory
465
- * @param tag - The migration tag (filename base without .sql)
466
- * @param sql - The SQL content to write
467
- */
468
- async function upsertDrizzleMigration(projectRoot, tag, sql) {
469
- const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
470
- const migrationPath = path.join(migrationsDir, `${tag}.sql`);
471
- if (existsSync(migrationPath)) {
472
- if (await fs$1.readFile(migrationPath, "utf-8") === sql) {
473
- await upsertJournalEntry(projectRoot, tag);
474
- return;
475
- }
476
- }
477
- await fs$1.writeFile(migrationPath, sql, "utf-8");
478
- await upsertJournalEntry(projectRoot, tag);
479
- }
480
- /**
481
- * Write the invariants SQL file to the drizzle migrations directory.
482
- * Ensures journal exists, determines migration number, and registers in journal.
483
- *
484
- * @param projectRoot - The project root directory
485
- * @param sqlContent - The SQL content to write
486
- * @returns The path to the written file and status flags
487
- */
488
- async function writeInvariantsMigration(projectRoot, sqlContent) {
489
- const journalCreated = await ensureDrizzleJournal(projectRoot);
490
- const migrationNumber = await nextMigrationNumber(projectRoot);
491
- const tag = `${migrationNumber}_pecunia_invariants`;
492
- const migrationPath = getInvariantsMigrationPath(await getDrizzleMigrationsDir(projectRoot), migrationNumber);
493
- let created = false;
494
- let updated = false;
495
- if (existsSync(migrationPath)) {
496
- if (await fs$1.readFile(migrationPath, "utf-8") === sqlContent) {
497
- await upsertJournalEntry(projectRoot, tag);
498
- return {
499
- path: migrationPath,
500
- created: false,
501
- updated: false,
502
- journalCreated
503
- };
504
- }
505
- updated = true;
506
- } else created = true;
507
- await upsertDrizzleMigration(projectRoot, tag, sqlContent);
508
- return {
509
- path: migrationPath,
510
- created,
511
- updated,
512
- journalCreated
513
- };
514
- }
515
-
516
- //#endregion
517
9
  //#region src/generators/drizzle.ts
518
10
  function convertToSnakeCase(str, camelCase) {
519
11
  if (camelCase) return str;
520
12
  return str.replace(/([A-Z]+)([A-Z][a-z])/g, "$1_$2").replace(/([a-z\d])([A-Z])/g, "$1_$2").toLowerCase();
521
13
  }
522
14
  const generateDrizzleSchema = async ({ options, file, adapter }) => {
523
- const tables = getPaymentTables$1(options);
15
+ const tables = getPaymentTables(options);
524
16
  const filePath = file || "./src/db/schema.ts";
525
17
  const databaseType = adapter.options?.provider;
526
18
  const projectRoot = process.cwd();
@@ -900,54 +392,12 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
900
392
  }
901
393
  }
902
394
  code += `\n${relationsString}`;
903
- const typeHints = generateInvariantTypeHints(tables, getModelName, getFieldName);
904
- if (typeHints) code += `\n\n${typeHints}`;
905
- const formattedCode = await prettier.format(code, { parser: "typescript" });
906
- if (databaseType === "pg") {
907
- const result = await writeInvariantsMigration(projectRoot, emitPostgresInvariantSql(normalizeInvariants(tables, {
908
- getModelName,
909
- getFieldName
910
- }), "public"));
911
- if (result.journalCreated) console.log(`Created Drizzle migration journal and invariants migration; now run \`npx drizzle-kit migrate\`.`);
912
- else if (result.created) console.log(`Generated invariants migration: ${path.relative(projectRoot, result.path)}`);
913
- else if (result.updated) console.log(`Updated invariants migration: ${path.relative(projectRoot, result.path)}`);
914
- else console.log(`Invariants migration up to date: ${path.relative(projectRoot, result.path)}`);
915
- }
916
395
  return {
917
- code: formattedCode,
396
+ code: await prettier.format(code, { parser: "typescript" }),
918
397
  fileName: path.relative(projectRoot, resolvedSchemaPath),
919
398
  overwrite: fileExist
920
399
  };
921
400
  };
922
- /**
923
- * Generate TypeScript type hints for invariants (e.g., enum unions).
924
- */
925
- function generateInvariantTypeHints(tables, getModelName, getFieldName) {
926
- const hints = [];
927
- for (const [tableKey, table] of Object.entries(tables)) {
928
- if (!table.invariants) continue;
929
- for (const invariant of table.invariants) {
930
- const desc = invariant.description.toLowerCase();
931
- if (desc.includes("determines") && desc.includes("presence")) {
932
- const modeMatch = invariant.description.match(/(\w+)\.(\w+)\s+determines/);
933
- if (modeMatch) {
934
- const [, , fieldName] = modeMatch;
935
- const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
936
- if (field) {
937
- const fieldAttr = table.fields[field];
938
- if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) {
939
- const enumName = `${getModelName(tableKey)}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Mode`;
940
- const enumValues = fieldAttr.type.map((v) => `"${v}"`).join(" | ");
941
- hints.push(`// Type hint for invariant: ${invariant.id}`);
942
- hints.push(`export type ${enumName} = ${enumValues};`);
943
- }
944
- }
945
- }
946
- }
947
- }
948
- }
949
- return hints.length > 0 ? `\n// Invariant type hints\n${hints.join("\n")}` : "";
950
- }
951
401
  function generateImport({ databaseType, tables }) {
952
402
  const rootImports = ["relations"];
953
403
  const coreImports = [];
@@ -991,24 +441,6 @@ const generateKyselySchema = async ({ options, file, adapter }) => {
991
441
  const { compileMigrations } = await getMigrations(options);
992
442
  const migrations = await compileMigrations();
993
443
  const migrationFile = file || `./better-auth_migrations/${(/* @__PURE__ */ new Date()).toISOString().replace(/:/g, "-")}.sql`;
994
- if ((adapter?.options?.type || "postgres") === "postgres") {
995
- const tables = getPaymentTables(options);
996
- const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
997
- getModelName: initGetModelName({
998
- schema: tables,
999
- usePlural: adapter?.options?.adapterConfig?.usePlural
1000
- }),
1001
- getFieldName: initGetFieldName({
1002
- schema: tables,
1003
- usePlural: false
1004
- })
1005
- }), "public");
1006
- const sqlFilePath = path.join(path.dirname(migrationFile), "invariants.sql");
1007
- const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
1008
- await fs$1.mkdir(sqlDir, { recursive: true });
1009
- await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
1010
- console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
1011
- }
1012
444
  return {
1013
445
  code: migrations.trim() === ";" ? "" : migrations,
1014
446
  fileName: migrationFile
@@ -1037,15 +469,15 @@ function getPrismaVersion(cwd) {
1037
469
  //#region src/generators/prisma.ts
1038
470
  const generatePrismaSchema = async ({ adapter, options, file }) => {
1039
471
  const provider = adapter.options?.provider || "postgresql";
1040
- const tables = getPaymentTables$1(options);
472
+ const tables = getPaymentTables(options);
1041
473
  const filePath = file || "./prisma/schema.prisma";
1042
474
  const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
1043
475
  const getModelName = initGetModelName({
1044
- schema: getPaymentTables$1(options),
476
+ schema: getPaymentTables(options),
1045
477
  usePlural: adapter.options?.adapterConfig?.usePlural
1046
478
  });
1047
479
  const getFieldName = initGetFieldName({
1048
- schema: getPaymentTables$1(options),
480
+ schema: getPaymentTables(options),
1049
481
  usePlural: false
1050
482
  });
1051
483
  let schemaPrisma = "";
@@ -1217,24 +649,6 @@ const generatePrismaSchema = async ({ adapter, options, file }) => {
1217
649
  }
1218
650
  });
1219
651
  const schemaChanged = schema.trim() !== schemaPrisma.trim();
1220
- if (provider === "postgresql") {
1221
- const tables$1 = getPaymentTables$1(options);
1222
- const sql = emitPostgresInvariantSql(normalizeInvariants(tables$1, {
1223
- getModelName: initGetModelName({
1224
- schema: tables$1,
1225
- usePlural: adapter.options?.adapterConfig?.usePlural
1226
- }),
1227
- getFieldName: initGetFieldName({
1228
- schema: tables$1,
1229
- usePlural: false
1230
- })
1231
- }), "public");
1232
- const sqlFilePath = path.join(path.dirname(filePath), "invariants.sql");
1233
- const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
1234
- await fs$1.mkdir(sqlDir, { recursive: true });
1235
- await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
1236
- console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
1237
- }
1238
652
  return {
1239
653
  code: schemaChanged ? schema : "",
1240
654
  fileName: filePath,
@@ -1253,266 +667,27 @@ const getNewPrisma = (provider, cwd) => {
1253
667
  }`;
1254
668
  };
1255
669
 
1256
- //#endregion
1257
- //#region src/generators/invariants-mongo.ts
1258
- /**
1259
- * Generate MongoDB collection validator JSON schema for invariants.
1260
- */
1261
- function emitMongoValidators(invariants) {
1262
- const validators = {};
1263
- for (const inv of invariants) {
1264
- if (inv.enforcement.mongo !== "validator" || !inv.appliesTo.includes("mongo")) continue;
1265
- const collectionName = inv.tableName;
1266
- if (!validators[collectionName]) validators[collectionName] = { $jsonSchema: {
1267
- bsonType: "object",
1268
- required: [],
1269
- properties: {},
1270
- additionalProperties: true
1271
- } };
1272
- const validator = generateMongoValidator(inv);
1273
- if (validator) {
1274
- Object.assign(validators[collectionName].$jsonSchema.properties, validator.properties || {});
1275
- if (validator.required) validators[collectionName].$jsonSchema.required = [...validators[collectionName].$jsonSchema.required || [], ...validator.required];
1276
- if (validator.anyOf) {
1277
- if (!validators[collectionName].$jsonSchema.anyOf) validators[collectionName].$jsonSchema.anyOf = [];
1278
- validators[collectionName].$jsonSchema.anyOf.push(...validator.anyOf);
1279
- }
1280
- }
1281
- }
1282
- return validators;
1283
- }
1284
- /**
1285
- * Generate MongoDB validator JSON schema for a single invariant.
1286
- */
1287
- function generateMongoValidator(inv) {
1288
- const { logic } = inv;
1289
- switch (logic.type) {
1290
- case "field_enum_constraint": {
1291
- const { fieldName, allowedValues, conditionalField } = logic;
1292
- if (conditionalField) {
1293
- const anyOf = [];
1294
- if (!conditionalField.whenPresent) anyOf.push({ properties: {
1295
- [fieldName]: { enum: ["PAYMENT"] },
1296
- [conditionalField.fieldName]: { bsonType: "null" }
1297
- } });
1298
- if (conditionalField.whenPresent) anyOf.push({
1299
- properties: {
1300
- [fieldName]: { enum: ["SUBSCRIPTION"] },
1301
- [conditionalField.fieldName]: { bsonType: "string" }
1302
- },
1303
- required: [conditionalField.fieldName]
1304
- });
1305
- return {
1306
- properties: { [fieldName]: { enum: allowedValues } },
1307
- anyOf
1308
- };
1309
- }
1310
- return { properties: { [fieldName]: { enum: allowedValues } } };
1311
- }
1312
- default: return null;
1313
- }
1314
- }
1315
- /**
1316
- * Generate TypeScript guard module for MongoDB "app" enforcement invariants.
1317
- */
1318
- function emitMongoGuards(invariants) {
1319
- const guardInvariants = invariants.filter((inv) => inv.enforcement.mongo === "app" && inv.appliesTo.includes("mongo"));
1320
- if (guardInvariants.length === 0) return `// No MongoDB app-level invariant guards generated
1321
- // This file is auto-generated - DO NOT EDIT MANUALLY
1322
-
1323
- export {};
1324
- `;
1325
- const guards = [];
1326
- const imports = [];
1327
- for (const inv of guardInvariants) {
1328
- const guard = generateMongoGuard(inv);
1329
- if (guard) {
1330
- guards.push(guard);
1331
- if (guard.imports) imports.push(...guard.imports);
1332
- }
1333
- }
1334
- const uniqueImports = Array.from(new Set(imports));
1335
- return `// MongoDB invariant guards
1336
- // Generated from schema invariants
1337
- // DO NOT EDIT MANUALLY - This file is auto-generated
1338
- //
1339
- // These guards should be called before write operations to enforce invariants
1340
- // at the application level.
1341
-
1342
- ${uniqueImports.length > 0 ? uniqueImports.join("\n") + "\n" : ""}
1343
-
1344
- ${guards.join("\n\n")}
1345
- `;
1346
- }
1347
- /**
1348
- * Generate TypeScript guard function for a single invariant.
1349
- */
1350
- function generateMongoGuard(inv) {
1351
- const { logic, tableName, id, description } = inv;
1352
- switch (logic.type) {
1353
- case "cross_table_ownership": {
1354
- const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
1355
- return { code: `/**
1356
- * Guard: ${description}
1357
- *
1358
- * @param data - The record being created/updated (use camelCase field names)
1359
- * @param db - MongoDB database instance
1360
- * @returns true if invariant is satisfied, throws error otherwise
1361
- */
1362
- export async function ${id}Guard(
1363
- data: { ${fieldName}?: string | null; ${ownershipFieldName}: string },
1364
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1365
- ): Promise<boolean> {
1366
- if (!data.${fieldName}) {
1367
- return true; // Field is optional/nullable
1368
- }
1369
-
1370
- const referenced = await db.collection("${referencedTable}").findOne({
1371
- ${referencedField}: data.${fieldName}
1372
- });
1373
-
1374
- if (!referenced) {
1375
- throw new Error(\`Invariant violation: \${data.${fieldName}} not found in ${referencedTable}\`);
1376
- }
1377
-
1378
- // Compare ownership field (use database field name for referenced record)
1379
- const dataOwnerValue = data.${ownershipFieldName};
1380
- const refOwnerValue = referenced.${ownershipFieldName};
1381
-
1382
- if (refOwnerValue !== dataOwnerValue) {
1383
- throw new Error(
1384
- \`Invariant violation: ${fieldName} must belong to the same ${ownershipFieldName} as the record. \` +
1385
- \`Expected \${dataOwnerValue}, got \${refOwnerValue}\`
1386
- );
1387
- }
1388
-
1389
- return true;
1390
- }` };
1391
- }
1392
- case "cross_table_equality": {
1393
- const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
1394
- return { code: `/**
1395
- * Guard: ${description}
1396
- *
1397
- * @param data - The record being created/updated
1398
- * @param db - MongoDB database instance
1399
- * @returns true if invariant is satisfied, throws error otherwise
1400
- */
1401
- export async function ${id}Guard(
1402
- data: { ${fieldName}: string; ${equalityFieldName}: string },
1403
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1404
- ): Promise<boolean> {
1405
- if (!data.${fieldName}) {
1406
- throw new Error(\`Invariant violation: ${fieldName} is required\`);
1407
- }
1408
-
1409
- const referenced = await db.collection("${referencedTable}").findOne({
1410
- ${referencedField}: data.${fieldName}
1411
- });
1412
-
1413
- if (!referenced) {
1414
- throw new Error(\`Invariant violation: Referenced record not found in ${referencedTable}\`);
1415
- }
1416
-
1417
- if (referenced.${referencedEqualityFieldName} !== data.${equalityFieldName}) {
1418
- throw new Error(
1419
- \`Invariant violation: ${equalityFieldName} must equal ${referencedTable}.${referencedEqualityFieldName}. \` +
1420
- \`Expected \${referenced.${referencedEqualityFieldName}}, got \${data.${equalityFieldName}}\`
1421
- );
1422
- }
1423
-
1424
- return true;
1425
- }` };
1426
- }
1427
- case "raw": return { code: `/**
1428
- * Guard: ${description}
1429
- *
1430
- * TODO: Implement this guard based on the invariant description.
1431
- * This is a placeholder - you must implement the actual validation logic.
1432
- *
1433
- * @param data - The record being created/updated
1434
- * @param db - MongoDB database instance
1435
- * @returns true if invariant is satisfied, throws error otherwise
1436
- */
1437
- export async function ${id}Guard(
1438
- data: any,
1439
- db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
1440
- ): Promise<boolean> {
1441
- // TODO: Implement invariant: ${description}
1442
- console.warn("Guard ${id}Guard is not yet implemented");
1443
- return true;
1444
- }` };
1445
- default: return null;
1446
- }
1447
- }
1448
-
1449
670
  //#endregion
1450
671
  //#region src/generators/mongodb.ts
1451
672
  const generateMongoDBSchema = async ({ options, file, adapter }) => {
1452
- const tables = getPaymentTables$1(options);
673
+ const tables = getPaymentTables(options);
1453
674
  const filePath = file || "./mongodb-schema.ts";
1454
- const invariants = normalizeInvariants(tables, {
1455
- getModelName: initGetModelName({
1456
- schema: tables,
1457
- usePlural: adapter?.options?.adapterConfig?.usePlural
1458
- }),
1459
- getFieldName: initGetFieldName({
1460
- schema: tables,
1461
- usePlural: false
1462
- })
675
+ initGetModelName({
676
+ schema: tables,
677
+ usePlural: adapter?.options?.adapterConfig?.usePlural
678
+ });
679
+ initGetFieldName({
680
+ schema: tables,
681
+ usePlural: false
1463
682
  });
1464
- const validators = emitMongoValidators(invariants);
1465
- const validatorsJson = JSON.stringify(validators, null, 2);
1466
- const guards = emitMongoGuards(invariants);
1467
- const validatorsPath = filePath.replace(/\.ts$/, "-validators.json");
1468
- const validatorsDir = path.dirname(path.resolve(process.cwd(), validatorsPath));
1469
- await fs$1.mkdir(validatorsDir, { recursive: true });
1470
- await fs$1.writeFile(path.resolve(process.cwd(), validatorsPath), validatorsJson);
1471
- console.log(`📝 Generated MongoDB validators: ${validatorsPath}`);
1472
- const guardsPath = filePath.replace(/\.ts$/, "-guards.ts");
1473
- const guardsDir = path.dirname(path.resolve(process.cwd(), guardsPath));
1474
- await fs$1.mkdir(guardsDir, { recursive: true });
1475
- const formattedGuards = await prettier.format(guards, { parser: "typescript" });
1476
- await fs$1.writeFile(path.resolve(process.cwd(), guardsPath), formattedGuards);
1477
- console.log(`📝 Generated MongoDB guards: ${guardsPath}`);
1478
- const validatorsBaseName = path.basename(validatorsPath, ".json");
1479
- const guardsBaseName = path.basename(guardsPath, ".ts");
1480
- const schemaCode = `// MongoDB schema setup
683
+ return {
684
+ code: await prettier.format(`// MongoDB schema setup
1481
685
  // Generated from schema definitions
1482
686
  // DO NOT EDIT MANUALLY - This file is auto-generated
1483
687
 
1484
- import validators from "./${validatorsBaseName}.json";
1485
- import * as guards from "./${guardsBaseName}";
1486
-
1487
- /**
1488
- * MongoDB collection validators.
1489
- * Apply these using db.createCollection() or db.command({ collMod: ... })
1490
- *
1491
- * Example:
1492
- * \`\`\`
1493
- * await db.createCollection("checkout_session", {
1494
- * validator: validators.checkout_session
1495
- * });
1496
- * \`\`\`
1497
- */
1498
- export { validators };
1499
-
1500
- /**
1501
- * MongoDB invariant guards.
1502
- * Call these before write operations to enforce invariants at the application level.
1503
- *
1504
- * Example:
1505
- * \`\`\`
1506
- * import { customer_payment_method_ownershipGuard } from "./${guardsBaseName}";
1507
- *
1508
- * await customer_payment_method_ownershipGuard(data, db);
1509
- * await db.collection("customer").insertOne(data);
1510
- * \`\`\`
1511
- */
1512
- export { guards };
1513
- `;
1514
- return {
1515
- code: await prettier.format(schemaCode, { parser: "typescript" }),
688
+ // Schema definitions for MongoDB collections
689
+ // Use this file to set up your MongoDB collections and indexes
690
+ `, { parser: "typescript" }),
1516
691
  fileName: filePath,
1517
692
  overwrite: true
1518
693
  };
package/dist/index.mjs CHANGED
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import { i as getPackageInfo, n as generateSchema } from "./generators-BXA4y5O8.mjs";
2
+ import { i as getPackageInfo, n as generateSchema } from "./generators-CZDWuCP8.mjs";
3
3
  import { Command } from "commander";
4
4
  import fs, { existsSync, readFileSync } from "node:fs";
5
5
  import fs$1 from "node:fs/promises";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "pecunia-cli",
3
- "version": "0.2.8",
3
+ "version": "0.2.9",
4
4
  "type": "module",
5
5
  "module": "dist/index.mjs",
6
6
  "main": "./dist/index.mjs",