pecunia-cli 0.2.7 → 0.2.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/api.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-
|
|
1
|
+
import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-CZDWuCP8.mjs";
|
|
2
2
|
|
|
3
3
|
export { adapters, generateDrizzleSchema, generateKyselySchema, generatePrismaSchema, generateSchema };
|
|
@@ -1,414 +1,18 @@
|
|
|
1
|
-
import fs, { existsSync
|
|
1
|
+
import fs, { existsSync } from "node:fs";
|
|
2
2
|
import fs$1 from "node:fs/promises";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
import { getMigrations
|
|
5
|
-
import { capitalizeFirstLetter, getPaymentTables
|
|
4
|
+
import { getMigrations } from "pecunia-root";
|
|
5
|
+
import { capitalizeFirstLetter, getPaymentTables, initGetFieldName, initGetModelName } from "pecunia-core";
|
|
6
6
|
import prettier from "prettier";
|
|
7
7
|
import { produceSchema } from "@mrleebo/prisma-ast";
|
|
8
8
|
|
|
9
|
-
//#region src/generators/invariants.ts
|
|
10
|
-
/**
|
|
11
|
-
* Normalize invariants from schema into IR format.
|
|
12
|
-
* This parses invariant descriptions and extracts structured logic.
|
|
13
|
-
*/
|
|
14
|
-
function normalizeInvariants(schema, options) {
|
|
15
|
-
const invariants = [];
|
|
16
|
-
for (const [tableKey, table] of Object.entries(schema)) {
|
|
17
|
-
if (!table.invariants) continue;
|
|
18
|
-
const modelName = options.getModelName(tableKey);
|
|
19
|
-
const tableName = table.modelName;
|
|
20
|
-
for (const invariant of table.invariants) {
|
|
21
|
-
const logic = parseInvariantLogic(invariant.description, tableKey, table, schema, options);
|
|
22
|
-
invariants.push({
|
|
23
|
-
id: invariant.id,
|
|
24
|
-
description: invariant.description,
|
|
25
|
-
modelName,
|
|
26
|
-
tableName,
|
|
27
|
-
appliesTo: invariant.appliesTo,
|
|
28
|
-
enforcement: invariant.enforcement || {},
|
|
29
|
-
logic
|
|
30
|
-
});
|
|
31
|
-
}
|
|
32
|
-
}
|
|
33
|
-
return invariants;
|
|
34
|
-
}
|
|
35
|
-
/**
|
|
36
|
-
* Parse invariant description into structured logic.
|
|
37
|
-
* This is a heuristic parser that extracts common patterns.
|
|
38
|
-
*/
|
|
39
|
-
function parseInvariantLogic(description, tableKey, table, schema, options) {
|
|
40
|
-
const desc = description.toLowerCase();
|
|
41
|
-
if (desc.includes("determines") && desc.includes("presence")) {
|
|
42
|
-
const modeMatch = description.match(/(\w+)\.(\w+)\s+determines/);
|
|
43
|
-
const subMatch = description.match(/(\w+)\s+must be (present|null\/absent)/);
|
|
44
|
-
const enumMatches = description.matchAll(/(\w+)\s*=>\s*(\w+)\s+must be (present|null\/absent)/g);
|
|
45
|
-
if (modeMatch && subMatch) {
|
|
46
|
-
const [, modelName, fieldName] = modeMatch;
|
|
47
|
-
const [, conditionalField] = subMatch;
|
|
48
|
-
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
49
|
-
const conditional = Object.keys(table.fields).find((k) => k.toLowerCase() === conditionalField.toLowerCase());
|
|
50
|
-
if (field && conditional) {
|
|
51
|
-
const fieldAttr = table.fields[field];
|
|
52
|
-
let allowedValues = [];
|
|
53
|
-
if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) allowedValues = fieldAttr.type;
|
|
54
|
-
else {
|
|
55
|
-
const enumValues = /* @__PURE__ */ new Set();
|
|
56
|
-
for (const match of enumMatches) {
|
|
57
|
-
const val = match[1]?.trim();
|
|
58
|
-
if (val) enumValues.add(val);
|
|
59
|
-
}
|
|
60
|
-
if (enumValues.size > 0) allowedValues = Array.from(enumValues);
|
|
61
|
-
else allowedValues = ["PAYMENT", "SUBSCRIPTION"].filter((e) => desc.includes(e.toLowerCase()));
|
|
62
|
-
}
|
|
63
|
-
let whenPresent = false;
|
|
64
|
-
for (const match of enumMatches) {
|
|
65
|
-
const enumVal = match[1];
|
|
66
|
-
const requirement = match[3];
|
|
67
|
-
if (enumVal && requirement === "present") {
|
|
68
|
-
whenPresent = true;
|
|
69
|
-
break;
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
if (!whenPresent && desc.includes("subscription") && desc.includes("must be present")) whenPresent = true;
|
|
73
|
-
return {
|
|
74
|
-
type: "field_enum_constraint",
|
|
75
|
-
field,
|
|
76
|
-
fieldName: options.getFieldName({
|
|
77
|
-
model: tableKey,
|
|
78
|
-
field
|
|
79
|
-
}),
|
|
80
|
-
allowedValues: allowedValues.length > 0 ? allowedValues : ["PAYMENT", "SUBSCRIPTION"],
|
|
81
|
-
conditionalField: {
|
|
82
|
-
field: conditional,
|
|
83
|
-
fieldName: options.getFieldName({
|
|
84
|
-
model: tableKey,
|
|
85
|
-
field: conditional
|
|
86
|
-
}),
|
|
87
|
-
whenPresent
|
|
88
|
-
}
|
|
89
|
-
};
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
}
|
|
93
|
-
if (desc.includes("must belong to same") || desc.includes("must equal") && desc.includes("when")) {
|
|
94
|
-
const fieldMatch = description.match(/(\w+)\.(\w+)\s+must/);
|
|
95
|
-
const refMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
|
|
96
|
-
description.match(/(\w+)\.(\w+)\s+must equal/);
|
|
97
|
-
if (fieldMatch && refMatch) {
|
|
98
|
-
const [, modelName, fieldName] = fieldMatch;
|
|
99
|
-
const [, refTable, refField, ownerTable, ownerField] = refMatch;
|
|
100
|
-
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
101
|
-
if (field && table.fields[field]?.references) {
|
|
102
|
-
table.fields[field].references;
|
|
103
|
-
const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
|
|
104
|
-
const ownerTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === ownerTable || k === ownerTable);
|
|
105
|
-
if (refTableKey && ownerTableKey) {
|
|
106
|
-
const ownershipField = Object.keys(table.fields).find((k) => k.toLowerCase() === ownerField.toLowerCase());
|
|
107
|
-
if (ownershipField) return {
|
|
108
|
-
type: "cross_table_ownership",
|
|
109
|
-
field,
|
|
110
|
-
fieldName: options.getFieldName({
|
|
111
|
-
model: tableKey,
|
|
112
|
-
field
|
|
113
|
-
}),
|
|
114
|
-
referencedTable: options.getModelName(refTableKey),
|
|
115
|
-
referencedField: options.getFieldName({
|
|
116
|
-
model: refTableKey,
|
|
117
|
-
field: refField
|
|
118
|
-
}),
|
|
119
|
-
ownershipField,
|
|
120
|
-
ownershipFieldName: options.getFieldName({
|
|
121
|
-
model: tableKey,
|
|
122
|
-
field: ownershipField
|
|
123
|
-
})
|
|
124
|
-
};
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
}
|
|
128
|
-
}
|
|
129
|
-
if (desc.includes("must match") && desc.includes("of its")) {
|
|
130
|
-
const fieldMatch = description.match(/(\w+)\.(\w+)\s+must match/);
|
|
131
|
-
const refMatch = description.match(/of its\s+(\w+)/);
|
|
132
|
-
const equalityMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
|
|
133
|
-
if (fieldMatch && refMatch && equalityMatch) {
|
|
134
|
-
const [, , equalityFieldName] = fieldMatch;
|
|
135
|
-
const [, refFieldName] = refMatch;
|
|
136
|
-
const [, refTable, refEqualityField, , sourceEqualityField] = equalityMatch;
|
|
137
|
-
const equalityField = Object.keys(table.fields).find((k) => k.toLowerCase() === equalityFieldName.toLowerCase());
|
|
138
|
-
const refField = Object.keys(table.fields).find((k) => k.toLowerCase() === refFieldName.toLowerCase());
|
|
139
|
-
if (equalityField && refField && table.fields[refField]?.references) {
|
|
140
|
-
const ref = table.fields[refField].references;
|
|
141
|
-
const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
|
|
142
|
-
if (refTableKey) {
|
|
143
|
-
const refEquality = Object.keys(schema[refTableKey].fields).find((k) => k.toLowerCase() === refEqualityField.toLowerCase());
|
|
144
|
-
if (refEquality) return {
|
|
145
|
-
type: "cross_table_equality",
|
|
146
|
-
field: refField,
|
|
147
|
-
fieldName: options.getFieldName({
|
|
148
|
-
model: tableKey,
|
|
149
|
-
field: refField
|
|
150
|
-
}),
|
|
151
|
-
referencedTable: options.getModelName(refTableKey),
|
|
152
|
-
referencedField: options.getFieldName({
|
|
153
|
-
model: refTableKey,
|
|
154
|
-
field: ref.field
|
|
155
|
-
}),
|
|
156
|
-
equalityField,
|
|
157
|
-
equalityFieldName: options.getFieldName({
|
|
158
|
-
model: tableKey,
|
|
159
|
-
field: equalityField
|
|
160
|
-
}),
|
|
161
|
-
referencedEqualityField: refEquality,
|
|
162
|
-
referencedEqualityFieldName: options.getFieldName({
|
|
163
|
-
model: refTableKey,
|
|
164
|
-
field: refEquality
|
|
165
|
-
})
|
|
166
|
-
};
|
|
167
|
-
}
|
|
168
|
-
}
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
return {
|
|
172
|
-
type: "raw",
|
|
173
|
-
description
|
|
174
|
-
};
|
|
175
|
-
}
|
|
176
|
-
|
|
177
|
-
//#endregion
|
|
178
|
-
//#region src/generators/invariants-sql.ts
|
|
179
|
-
/**
|
|
180
|
-
* Generate SQL for enforcing invariants in Postgres (and compatible databases).
|
|
181
|
-
* Returns SQL statements for CHECK constraints and triggers.
|
|
182
|
-
*/
|
|
183
|
-
function emitPostgresInvariantSql(invariants, schemaName = "public") {
|
|
184
|
-
const statements = [];
|
|
185
|
-
const checkInvariants = invariants.filter((inv) => inv.enforcement.postgres === "check" && inv.appliesTo.includes("postgres"));
|
|
186
|
-
const triggerInvariants = invariants.filter((inv) => inv.enforcement.postgres === "trigger" && inv.appliesTo.includes("postgres"));
|
|
187
|
-
for (const inv of checkInvariants) {
|
|
188
|
-
const sql = generateCheckConstraint(inv, schemaName);
|
|
189
|
-
if (sql) statements.push(sql);
|
|
190
|
-
}
|
|
191
|
-
for (const inv of triggerInvariants) {
|
|
192
|
-
const sql = generateTrigger(inv, schemaName);
|
|
193
|
-
if (sql) statements.push(...sql);
|
|
194
|
-
}
|
|
195
|
-
if (statements.length === 0) return "-- No invariant enforcement SQL generated\n";
|
|
196
|
-
return `-- Invariant enforcement SQL
|
|
197
|
-
-- Generated from schema invariants
|
|
198
|
-
-- DO NOT EDIT MANUALLY - This file is auto-generated
|
|
199
|
-
|
|
200
|
-
${statements.join("\n\n")}
|
|
201
|
-
`;
|
|
202
|
-
}
|
|
203
|
-
/**
|
|
204
|
-
* Generate CHECK constraint SQL for an invariant.
|
|
205
|
-
*/
|
|
206
|
-
function generateCheckConstraint(inv, schemaName) {
|
|
207
|
-
const { logic, tableName, id } = inv;
|
|
208
|
-
switch (logic.type) {
|
|
209
|
-
case "field_enum_constraint": {
|
|
210
|
-
const { fieldName, allowedValues, conditionalField } = logic;
|
|
211
|
-
if (conditionalField) {
|
|
212
|
-
const constraintName$1 = `${tableName}_${id}_check`;
|
|
213
|
-
const enumCheck$1 = allowedValues.map((val) => `'${val}'`).join(", ");
|
|
214
|
-
const subscriptionValue = allowedValues.find((v) => v === "SUBSCRIPTION") || allowedValues.find((v) => v !== "PAYMENT") || allowedValues[0];
|
|
215
|
-
const paymentValue = allowedValues.find((v) => v === "PAYMENT") || allowedValues[0];
|
|
216
|
-
const conditionalCheck = `(
|
|
217
|
-
(${fieldName} = '${subscriptionValue}') = (${conditionalField.fieldName} IS NOT NULL) AND
|
|
218
|
-
(${fieldName} = '${paymentValue}') = (${conditionalField.fieldName} IS NULL)
|
|
219
|
-
)`;
|
|
220
|
-
return `-- ${inv.description}
|
|
221
|
-
ALTER TABLE ${schemaName}.${tableName}
|
|
222
|
-
ADD CONSTRAINT ${constraintName$1}
|
|
223
|
-
CHECK (
|
|
224
|
-
${fieldName} IN (${enumCheck$1}) AND
|
|
225
|
-
${conditionalCheck}
|
|
226
|
-
);`;
|
|
227
|
-
}
|
|
228
|
-
const constraintName = `${tableName}_${id}_check`;
|
|
229
|
-
const enumCheck = allowedValues.map((val) => `'${val}'`).join(", ");
|
|
230
|
-
return `-- ${inv.description}
|
|
231
|
-
ALTER TABLE ${schemaName}.${tableName}
|
|
232
|
-
ADD CONSTRAINT ${constraintName}
|
|
233
|
-
CHECK (${fieldName} IN (${enumCheck}));`;
|
|
234
|
-
}
|
|
235
|
-
default: return null;
|
|
236
|
-
}
|
|
237
|
-
}
|
|
238
|
-
/**
|
|
239
|
-
* Generate trigger SQL for an invariant.
|
|
240
|
-
* Returns array of SQL statements (function + trigger).
|
|
241
|
-
*/
|
|
242
|
-
function generateTrigger(inv, schemaName) {
|
|
243
|
-
const { logic, tableName, id } = inv;
|
|
244
|
-
switch (logic.type) {
|
|
245
|
-
case "cross_table_ownership": {
|
|
246
|
-
const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
|
|
247
|
-
const functionName = `${tableName}_${id}_fn`;
|
|
248
|
-
const triggerName = `${tableName}_${id}_trigger`;
|
|
249
|
-
return [`-- Function to enforce: ${inv.description}
|
|
250
|
-
CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
|
|
251
|
-
RETURNS TRIGGER AS $$
|
|
252
|
-
BEGIN
|
|
253
|
-
IF NEW.${fieldName} IS NOT NULL THEN
|
|
254
|
-
IF NOT EXISTS (
|
|
255
|
-
SELECT 1
|
|
256
|
-
FROM ${schemaName}.${referencedTable}
|
|
257
|
-
WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
|
|
258
|
-
AND ${referencedTable}.${ownershipFieldName} = NEW.${ownershipFieldName}
|
|
259
|
-
) THEN
|
|
260
|
-
RAISE EXCEPTION 'Invariant violation: % must belong to the same % as the record', '${fieldName}', '${ownershipFieldName}';
|
|
261
|
-
END IF;
|
|
262
|
-
END IF;
|
|
263
|
-
RETURN NEW;
|
|
264
|
-
END;
|
|
265
|
-
$$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
|
|
266
|
-
DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
|
|
267
|
-
CREATE TRIGGER ${triggerName}
|
|
268
|
-
BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
|
|
269
|
-
FOR EACH ROW
|
|
270
|
-
EXECUTE FUNCTION ${schemaName}.${functionName}();`];
|
|
271
|
-
}
|
|
272
|
-
case "cross_table_equality": {
|
|
273
|
-
const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
|
|
274
|
-
const functionName = `${tableName}_${id}_fn`;
|
|
275
|
-
const triggerName = `${tableName}_${id}_trigger`;
|
|
276
|
-
return [`-- Function to enforce: ${inv.description}
|
|
277
|
-
CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
|
|
278
|
-
RETURNS TRIGGER AS $$
|
|
279
|
-
DECLARE
|
|
280
|
-
ref_${referencedEqualityFieldName} TEXT;
|
|
281
|
-
BEGIN
|
|
282
|
-
IF NEW.${fieldName} IS NOT NULL THEN
|
|
283
|
-
SELECT ${referencedEqualityFieldName} INTO ref_${referencedEqualityFieldName}
|
|
284
|
-
FROM ${schemaName}.${referencedTable}
|
|
285
|
-
WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
|
|
286
|
-
LIMIT 1;
|
|
287
|
-
|
|
288
|
-
IF ref_${referencedEqualityFieldName} IS NULL THEN
|
|
289
|
-
RAISE EXCEPTION 'Invariant violation: Referenced record not found in ${referencedTable}';
|
|
290
|
-
END IF;
|
|
291
|
-
|
|
292
|
-
IF NEW.${equalityFieldName} != ref_${referencedEqualityFieldName} THEN
|
|
293
|
-
RAISE EXCEPTION 'Invariant violation: % must equal %.%', '${equalityFieldName}', '${referencedTable}', '${referencedEqualityFieldName}';
|
|
294
|
-
END IF;
|
|
295
|
-
END IF;
|
|
296
|
-
RETURN NEW;
|
|
297
|
-
END;
|
|
298
|
-
$$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
|
|
299
|
-
DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
|
|
300
|
-
CREATE TRIGGER ${triggerName}
|
|
301
|
-
BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
|
|
302
|
-
FOR EACH ROW
|
|
303
|
-
EXECUTE FUNCTION ${schemaName}.${functionName}();`];
|
|
304
|
-
}
|
|
305
|
-
default: return null;
|
|
306
|
-
}
|
|
307
|
-
}
|
|
308
|
-
|
|
309
|
-
//#endregion
|
|
310
|
-
//#region src/utils/drizzle-migrations.ts
|
|
311
|
-
/**
|
|
312
|
-
* Find or create the Drizzle migrations directory.
|
|
313
|
-
* Never places migrations under src/ to avoid drizzle-kit crashes.
|
|
314
|
-
*
|
|
315
|
-
* @param projectRoot - The project root directory (where package.json typically lives)
|
|
316
|
-
* @returns The path to the drizzle migrations directory
|
|
317
|
-
*/
|
|
318
|
-
async function getDrizzleMigrationsDir(projectRoot) {
|
|
319
|
-
const drizzleDir = path.resolve(projectRoot, "drizzle");
|
|
320
|
-
if (existsSync(drizzleDir)) {
|
|
321
|
-
if (!statSync(drizzleDir).isDirectory()) throw new Error(`"drizzle" exists but is not a directory. Please remove it or rename it.`);
|
|
322
|
-
return drizzleDir;
|
|
323
|
-
}
|
|
324
|
-
await fs$1.mkdir(drizzleDir, { recursive: true });
|
|
325
|
-
return drizzleDir;
|
|
326
|
-
}
|
|
327
|
-
/**
|
|
328
|
-
* Find existing invariants migration file if it exists.
|
|
329
|
-
*
|
|
330
|
-
* @param migrationsDir - Path to the drizzle migrations directory
|
|
331
|
-
* @returns The migration number if found, or null
|
|
332
|
-
*/
|
|
333
|
-
async function findExistingInvariantsMigration(migrationsDir) {
|
|
334
|
-
if (!existsSync(migrationsDir)) return null;
|
|
335
|
-
const invariantsFile = (await fs$1.readdir(migrationsDir)).find((file) => file.endsWith("_pecunia_invariants.sql") && /^\d{4}_/.test(file));
|
|
336
|
-
if (!invariantsFile) return null;
|
|
337
|
-
const match = invariantsFile.match(/^(\d{4})_/);
|
|
338
|
-
return match ? match[1] : null;
|
|
339
|
-
}
|
|
340
|
-
/**
|
|
341
|
-
* Determine the next migration number by scanning existing migration files.
|
|
342
|
-
* Migration files are expected to follow the pattern: `NNNN_description.sql`
|
|
343
|
-
* where NNNN is a 4-digit number.
|
|
344
|
-
*
|
|
345
|
-
* @param migrationsDir - Path to the drizzle migrations directory
|
|
346
|
-
* @returns The next migration number (4-digit string, e.g., "0001")
|
|
347
|
-
*/
|
|
348
|
-
async function getNextMigrationNumber(migrationsDir) {
|
|
349
|
-
const existingInvariantsNumber = await findExistingInvariantsMigration(migrationsDir);
|
|
350
|
-
if (existingInvariantsNumber) return existingInvariantsNumber;
|
|
351
|
-
if (!existsSync(migrationsDir)) return "0001";
|
|
352
|
-
const migrationFiles = (await fs$1.readdir(migrationsDir)).filter((file) => file.endsWith(".sql") && /^\d{4}_/.test(file));
|
|
353
|
-
if (migrationFiles.length === 0) return "0001";
|
|
354
|
-
const numbers = migrationFiles.map((file) => {
|
|
355
|
-
const match = file.match(/^(\d{4})_/);
|
|
356
|
-
return match ? parseInt(match[1], 10) : 0;
|
|
357
|
-
}).filter((n) => n > 0);
|
|
358
|
-
if (numbers.length === 0) return "0001";
|
|
359
|
-
const nextNumber = Math.max(...numbers) + 1;
|
|
360
|
-
if (nextNumber >= 9999) return "9999";
|
|
361
|
-
return String(nextNumber).padStart(4, "0");
|
|
362
|
-
}
|
|
363
|
-
/**
|
|
364
|
-
* Get the path for the invariants migration file.
|
|
365
|
-
* Uses a deterministic name based on migration number to ensure it runs after
|
|
366
|
-
* base table creation migrations.
|
|
367
|
-
*
|
|
368
|
-
* @param migrationsDir - Path to the drizzle migrations directory
|
|
369
|
-
* @param migrationNumber - The migration number to use (4-digit string)
|
|
370
|
-
* @returns The full path to the invariants migration file
|
|
371
|
-
*/
|
|
372
|
-
function getInvariantsMigrationPath(migrationsDir, migrationNumber) {
|
|
373
|
-
return path.join(migrationsDir, `${migrationNumber}_pecunia_invariants.sql`);
|
|
374
|
-
}
|
|
375
|
-
/**
|
|
376
|
-
* Write the invariants SQL file to the drizzle migrations directory.
|
|
377
|
-
* Handles idempotency by checking if the file exists and has the same content.
|
|
378
|
-
*
|
|
379
|
-
* @param projectRoot - The project root directory
|
|
380
|
-
* @param sqlContent - The SQL content to write
|
|
381
|
-
* @returns The path to the written file, or null if no changes were needed
|
|
382
|
-
*/
|
|
383
|
-
async function writeInvariantsMigration(projectRoot, sqlContent) {
|
|
384
|
-
const migrationsDir = await getDrizzleMigrationsDir(projectRoot);
|
|
385
|
-
const migrationPath = getInvariantsMigrationPath(migrationsDir, await getNextMigrationNumber(migrationsDir));
|
|
386
|
-
let created = false;
|
|
387
|
-
let updated = false;
|
|
388
|
-
if (existsSync(migrationPath)) {
|
|
389
|
-
if (await fs$1.readFile(migrationPath, "utf-8") === sqlContent) return {
|
|
390
|
-
path: migrationPath,
|
|
391
|
-
created: false,
|
|
392
|
-
updated: false
|
|
393
|
-
};
|
|
394
|
-
updated = true;
|
|
395
|
-
} else created = true;
|
|
396
|
-
await fs$1.writeFile(migrationPath, sqlContent, "utf-8");
|
|
397
|
-
return {
|
|
398
|
-
path: migrationPath,
|
|
399
|
-
created,
|
|
400
|
-
updated
|
|
401
|
-
};
|
|
402
|
-
}
|
|
403
|
-
|
|
404
|
-
//#endregion
|
|
405
9
|
//#region src/generators/drizzle.ts
|
|
406
10
|
function convertToSnakeCase(str, camelCase) {
|
|
407
11
|
if (camelCase) return str;
|
|
408
12
|
return str.replace(/([A-Z]+)([A-Z][a-z])/g, "$1_$2").replace(/([a-z\d])([A-Z])/g, "$1_$2").toLowerCase();
|
|
409
13
|
}
|
|
410
14
|
const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
411
|
-
const tables = getPaymentTables
|
|
15
|
+
const tables = getPaymentTables(options);
|
|
412
16
|
const filePath = file || "./src/db/schema.ts";
|
|
413
17
|
const databaseType = adapter.options?.provider;
|
|
414
18
|
const projectRoot = process.cwd();
|
|
@@ -788,53 +392,12 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
788
392
|
}
|
|
789
393
|
}
|
|
790
394
|
code += `\n${relationsString}`;
|
|
791
|
-
const typeHints = generateInvariantTypeHints(tables, getModelName, getFieldName);
|
|
792
|
-
if (typeHints) code += `\n\n${typeHints}`;
|
|
793
|
-
const formattedCode = await prettier.format(code, { parser: "typescript" });
|
|
794
|
-
if (databaseType === "pg") {
|
|
795
|
-
const result = await writeInvariantsMigration(projectRoot, emitPostgresInvariantSql(normalizeInvariants(tables, {
|
|
796
|
-
getModelName,
|
|
797
|
-
getFieldName
|
|
798
|
-
}), "public"));
|
|
799
|
-
if (result.created) console.log(`Generated invariants migration: ${path.relative(projectRoot, result.path)}\nNote: Created ./drizzle directory. Invariants SQL is placed in Drizzle migrations.`);
|
|
800
|
-
else if (result.updated) console.log(`Updated invariants migration: ${path.relative(projectRoot, result.path)}`);
|
|
801
|
-
else console.log(`Invariants migration up to date: ${path.relative(projectRoot, result.path)}`);
|
|
802
|
-
}
|
|
803
395
|
return {
|
|
804
|
-
code:
|
|
396
|
+
code: await prettier.format(code, { parser: "typescript" }),
|
|
805
397
|
fileName: path.relative(projectRoot, resolvedSchemaPath),
|
|
806
398
|
overwrite: fileExist
|
|
807
399
|
};
|
|
808
400
|
};
|
|
809
|
-
/**
|
|
810
|
-
* Generate TypeScript type hints for invariants (e.g., enum unions).
|
|
811
|
-
*/
|
|
812
|
-
function generateInvariantTypeHints(tables, getModelName, getFieldName) {
|
|
813
|
-
const hints = [];
|
|
814
|
-
for (const [tableKey, table] of Object.entries(tables)) {
|
|
815
|
-
if (!table.invariants) continue;
|
|
816
|
-
for (const invariant of table.invariants) {
|
|
817
|
-
const desc = invariant.description.toLowerCase();
|
|
818
|
-
if (desc.includes("determines") && desc.includes("presence")) {
|
|
819
|
-
const modeMatch = invariant.description.match(/(\w+)\.(\w+)\s+determines/);
|
|
820
|
-
if (modeMatch) {
|
|
821
|
-
const [, , fieldName] = modeMatch;
|
|
822
|
-
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
823
|
-
if (field) {
|
|
824
|
-
const fieldAttr = table.fields[field];
|
|
825
|
-
if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) {
|
|
826
|
-
const enumName = `${getModelName(tableKey)}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Mode`;
|
|
827
|
-
const enumValues = fieldAttr.type.map((v) => `"${v}"`).join(" | ");
|
|
828
|
-
hints.push(`// Type hint for invariant: ${invariant.id}`);
|
|
829
|
-
hints.push(`export type ${enumName} = ${enumValues};`);
|
|
830
|
-
}
|
|
831
|
-
}
|
|
832
|
-
}
|
|
833
|
-
}
|
|
834
|
-
}
|
|
835
|
-
}
|
|
836
|
-
return hints.length > 0 ? `\n// Invariant type hints\n${hints.join("\n")}` : "";
|
|
837
|
-
}
|
|
838
401
|
function generateImport({ databaseType, tables }) {
|
|
839
402
|
const rootImports = ["relations"];
|
|
840
403
|
const coreImports = [];
|
|
@@ -878,24 +441,6 @@ const generateKyselySchema = async ({ options, file, adapter }) => {
|
|
|
878
441
|
const { compileMigrations } = await getMigrations(options);
|
|
879
442
|
const migrations = await compileMigrations();
|
|
880
443
|
const migrationFile = file || `./better-auth_migrations/${(/* @__PURE__ */ new Date()).toISOString().replace(/:/g, "-")}.sql`;
|
|
881
|
-
if ((adapter?.options?.type || "postgres") === "postgres") {
|
|
882
|
-
const tables = getPaymentTables(options);
|
|
883
|
-
const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
|
|
884
|
-
getModelName: initGetModelName({
|
|
885
|
-
schema: tables,
|
|
886
|
-
usePlural: adapter?.options?.adapterConfig?.usePlural
|
|
887
|
-
}),
|
|
888
|
-
getFieldName: initGetFieldName({
|
|
889
|
-
schema: tables,
|
|
890
|
-
usePlural: false
|
|
891
|
-
})
|
|
892
|
-
}), "public");
|
|
893
|
-
const sqlFilePath = path.join(path.dirname(migrationFile), "invariants.sql");
|
|
894
|
-
const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
|
|
895
|
-
await fs$1.mkdir(sqlDir, { recursive: true });
|
|
896
|
-
await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
|
|
897
|
-
console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
|
|
898
|
-
}
|
|
899
444
|
return {
|
|
900
445
|
code: migrations.trim() === ";" ? "" : migrations,
|
|
901
446
|
fileName: migrationFile
|
|
@@ -924,15 +469,15 @@ function getPrismaVersion(cwd) {
|
|
|
924
469
|
//#region src/generators/prisma.ts
|
|
925
470
|
const generatePrismaSchema = async ({ adapter, options, file }) => {
|
|
926
471
|
const provider = adapter.options?.provider || "postgresql";
|
|
927
|
-
const tables = getPaymentTables
|
|
472
|
+
const tables = getPaymentTables(options);
|
|
928
473
|
const filePath = file || "./prisma/schema.prisma";
|
|
929
474
|
const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
|
|
930
475
|
const getModelName = initGetModelName({
|
|
931
|
-
schema: getPaymentTables
|
|
476
|
+
schema: getPaymentTables(options),
|
|
932
477
|
usePlural: adapter.options?.adapterConfig?.usePlural
|
|
933
478
|
});
|
|
934
479
|
const getFieldName = initGetFieldName({
|
|
935
|
-
schema: getPaymentTables
|
|
480
|
+
schema: getPaymentTables(options),
|
|
936
481
|
usePlural: false
|
|
937
482
|
});
|
|
938
483
|
let schemaPrisma = "";
|
|
@@ -1104,24 +649,6 @@ const generatePrismaSchema = async ({ adapter, options, file }) => {
|
|
|
1104
649
|
}
|
|
1105
650
|
});
|
|
1106
651
|
const schemaChanged = schema.trim() !== schemaPrisma.trim();
|
|
1107
|
-
if (provider === "postgresql") {
|
|
1108
|
-
const tables$1 = getPaymentTables$1(options);
|
|
1109
|
-
const sql = emitPostgresInvariantSql(normalizeInvariants(tables$1, {
|
|
1110
|
-
getModelName: initGetModelName({
|
|
1111
|
-
schema: tables$1,
|
|
1112
|
-
usePlural: adapter.options?.adapterConfig?.usePlural
|
|
1113
|
-
}),
|
|
1114
|
-
getFieldName: initGetFieldName({
|
|
1115
|
-
schema: tables$1,
|
|
1116
|
-
usePlural: false
|
|
1117
|
-
})
|
|
1118
|
-
}), "public");
|
|
1119
|
-
const sqlFilePath = path.join(path.dirname(filePath), "invariants.sql");
|
|
1120
|
-
const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
|
|
1121
|
-
await fs$1.mkdir(sqlDir, { recursive: true });
|
|
1122
|
-
await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
|
|
1123
|
-
console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
|
|
1124
|
-
}
|
|
1125
652
|
return {
|
|
1126
653
|
code: schemaChanged ? schema : "",
|
|
1127
654
|
fileName: filePath,
|
|
@@ -1140,266 +667,27 @@ const getNewPrisma = (provider, cwd) => {
|
|
|
1140
667
|
}`;
|
|
1141
668
|
};
|
|
1142
669
|
|
|
1143
|
-
//#endregion
|
|
1144
|
-
//#region src/generators/invariants-mongo.ts
|
|
1145
|
-
/**
|
|
1146
|
-
* Generate MongoDB collection validator JSON schema for invariants.
|
|
1147
|
-
*/
|
|
1148
|
-
function emitMongoValidators(invariants) {
|
|
1149
|
-
const validators = {};
|
|
1150
|
-
for (const inv of invariants) {
|
|
1151
|
-
if (inv.enforcement.mongo !== "validator" || !inv.appliesTo.includes("mongo")) continue;
|
|
1152
|
-
const collectionName = inv.tableName;
|
|
1153
|
-
if (!validators[collectionName]) validators[collectionName] = { $jsonSchema: {
|
|
1154
|
-
bsonType: "object",
|
|
1155
|
-
required: [],
|
|
1156
|
-
properties: {},
|
|
1157
|
-
additionalProperties: true
|
|
1158
|
-
} };
|
|
1159
|
-
const validator = generateMongoValidator(inv);
|
|
1160
|
-
if (validator) {
|
|
1161
|
-
Object.assign(validators[collectionName].$jsonSchema.properties, validator.properties || {});
|
|
1162
|
-
if (validator.required) validators[collectionName].$jsonSchema.required = [...validators[collectionName].$jsonSchema.required || [], ...validator.required];
|
|
1163
|
-
if (validator.anyOf) {
|
|
1164
|
-
if (!validators[collectionName].$jsonSchema.anyOf) validators[collectionName].$jsonSchema.anyOf = [];
|
|
1165
|
-
validators[collectionName].$jsonSchema.anyOf.push(...validator.anyOf);
|
|
1166
|
-
}
|
|
1167
|
-
}
|
|
1168
|
-
}
|
|
1169
|
-
return validators;
|
|
1170
|
-
}
|
|
1171
|
-
/**
|
|
1172
|
-
* Generate MongoDB validator JSON schema for a single invariant.
|
|
1173
|
-
*/
|
|
1174
|
-
function generateMongoValidator(inv) {
|
|
1175
|
-
const { logic } = inv;
|
|
1176
|
-
switch (logic.type) {
|
|
1177
|
-
case "field_enum_constraint": {
|
|
1178
|
-
const { fieldName, allowedValues, conditionalField } = logic;
|
|
1179
|
-
if (conditionalField) {
|
|
1180
|
-
const anyOf = [];
|
|
1181
|
-
if (!conditionalField.whenPresent) anyOf.push({ properties: {
|
|
1182
|
-
[fieldName]: { enum: ["PAYMENT"] },
|
|
1183
|
-
[conditionalField.fieldName]: { bsonType: "null" }
|
|
1184
|
-
} });
|
|
1185
|
-
if (conditionalField.whenPresent) anyOf.push({
|
|
1186
|
-
properties: {
|
|
1187
|
-
[fieldName]: { enum: ["SUBSCRIPTION"] },
|
|
1188
|
-
[conditionalField.fieldName]: { bsonType: "string" }
|
|
1189
|
-
},
|
|
1190
|
-
required: [conditionalField.fieldName]
|
|
1191
|
-
});
|
|
1192
|
-
return {
|
|
1193
|
-
properties: { [fieldName]: { enum: allowedValues } },
|
|
1194
|
-
anyOf
|
|
1195
|
-
};
|
|
1196
|
-
}
|
|
1197
|
-
return { properties: { [fieldName]: { enum: allowedValues } } };
|
|
1198
|
-
}
|
|
1199
|
-
default: return null;
|
|
1200
|
-
}
|
|
1201
|
-
}
|
|
1202
|
-
/**
|
|
1203
|
-
* Generate TypeScript guard module for MongoDB "app" enforcement invariants.
|
|
1204
|
-
*/
|
|
1205
|
-
function emitMongoGuards(invariants) {
|
|
1206
|
-
const guardInvariants = invariants.filter((inv) => inv.enforcement.mongo === "app" && inv.appliesTo.includes("mongo"));
|
|
1207
|
-
if (guardInvariants.length === 0) return `// No MongoDB app-level invariant guards generated
|
|
1208
|
-
// This file is auto-generated - DO NOT EDIT MANUALLY
|
|
1209
|
-
|
|
1210
|
-
export {};
|
|
1211
|
-
`;
|
|
1212
|
-
const guards = [];
|
|
1213
|
-
const imports = [];
|
|
1214
|
-
for (const inv of guardInvariants) {
|
|
1215
|
-
const guard = generateMongoGuard(inv);
|
|
1216
|
-
if (guard) {
|
|
1217
|
-
guards.push(guard);
|
|
1218
|
-
if (guard.imports) imports.push(...guard.imports);
|
|
1219
|
-
}
|
|
1220
|
-
}
|
|
1221
|
-
const uniqueImports = Array.from(new Set(imports));
|
|
1222
|
-
return `// MongoDB invariant guards
|
|
1223
|
-
// Generated from schema invariants
|
|
1224
|
-
// DO NOT EDIT MANUALLY - This file is auto-generated
|
|
1225
|
-
//
|
|
1226
|
-
// These guards should be called before write operations to enforce invariants
|
|
1227
|
-
// at the application level.
|
|
1228
|
-
|
|
1229
|
-
${uniqueImports.length > 0 ? uniqueImports.join("\n") + "\n" : ""}
|
|
1230
|
-
|
|
1231
|
-
${guards.join("\n\n")}
|
|
1232
|
-
`;
|
|
1233
|
-
}
|
|
1234
|
-
/**
|
|
1235
|
-
* Generate TypeScript guard function for a single invariant.
|
|
1236
|
-
*/
|
|
1237
|
-
function generateMongoGuard(inv) {
|
|
1238
|
-
const { logic, tableName, id, description } = inv;
|
|
1239
|
-
switch (logic.type) {
|
|
1240
|
-
case "cross_table_ownership": {
|
|
1241
|
-
const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
|
|
1242
|
-
return { code: `/**
|
|
1243
|
-
* Guard: ${description}
|
|
1244
|
-
*
|
|
1245
|
-
* @param data - The record being created/updated (use camelCase field names)
|
|
1246
|
-
* @param db - MongoDB database instance
|
|
1247
|
-
* @returns true if invariant is satisfied, throws error otherwise
|
|
1248
|
-
*/
|
|
1249
|
-
export async function ${id}Guard(
|
|
1250
|
-
data: { ${fieldName}?: string | null; ${ownershipFieldName}: string },
|
|
1251
|
-
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1252
|
-
): Promise<boolean> {
|
|
1253
|
-
if (!data.${fieldName}) {
|
|
1254
|
-
return true; // Field is optional/nullable
|
|
1255
|
-
}
|
|
1256
|
-
|
|
1257
|
-
const referenced = await db.collection("${referencedTable}").findOne({
|
|
1258
|
-
${referencedField}: data.${fieldName}
|
|
1259
|
-
});
|
|
1260
|
-
|
|
1261
|
-
if (!referenced) {
|
|
1262
|
-
throw new Error(\`Invariant violation: \${data.${fieldName}} not found in ${referencedTable}\`);
|
|
1263
|
-
}
|
|
1264
|
-
|
|
1265
|
-
// Compare ownership field (use database field name for referenced record)
|
|
1266
|
-
const dataOwnerValue = data.${ownershipFieldName};
|
|
1267
|
-
const refOwnerValue = referenced.${ownershipFieldName};
|
|
1268
|
-
|
|
1269
|
-
if (refOwnerValue !== dataOwnerValue) {
|
|
1270
|
-
throw new Error(
|
|
1271
|
-
\`Invariant violation: ${fieldName} must belong to the same ${ownershipFieldName} as the record. \` +
|
|
1272
|
-
\`Expected \${dataOwnerValue}, got \${refOwnerValue}\`
|
|
1273
|
-
);
|
|
1274
|
-
}
|
|
1275
|
-
|
|
1276
|
-
return true;
|
|
1277
|
-
}` };
|
|
1278
|
-
}
|
|
1279
|
-
case "cross_table_equality": {
|
|
1280
|
-
const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
|
|
1281
|
-
return { code: `/**
|
|
1282
|
-
* Guard: ${description}
|
|
1283
|
-
*
|
|
1284
|
-
* @param data - The record being created/updated
|
|
1285
|
-
* @param db - MongoDB database instance
|
|
1286
|
-
* @returns true if invariant is satisfied, throws error otherwise
|
|
1287
|
-
*/
|
|
1288
|
-
export async function ${id}Guard(
|
|
1289
|
-
data: { ${fieldName}: string; ${equalityFieldName}: string },
|
|
1290
|
-
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1291
|
-
): Promise<boolean> {
|
|
1292
|
-
if (!data.${fieldName}) {
|
|
1293
|
-
throw new Error(\`Invariant violation: ${fieldName} is required\`);
|
|
1294
|
-
}
|
|
1295
|
-
|
|
1296
|
-
const referenced = await db.collection("${referencedTable}").findOne({
|
|
1297
|
-
${referencedField}: data.${fieldName}
|
|
1298
|
-
});
|
|
1299
|
-
|
|
1300
|
-
if (!referenced) {
|
|
1301
|
-
throw new Error(\`Invariant violation: Referenced record not found in ${referencedTable}\`);
|
|
1302
|
-
}
|
|
1303
|
-
|
|
1304
|
-
if (referenced.${referencedEqualityFieldName} !== data.${equalityFieldName}) {
|
|
1305
|
-
throw new Error(
|
|
1306
|
-
\`Invariant violation: ${equalityFieldName} must equal ${referencedTable}.${referencedEqualityFieldName}. \` +
|
|
1307
|
-
\`Expected \${referenced.${referencedEqualityFieldName}}, got \${data.${equalityFieldName}}\`
|
|
1308
|
-
);
|
|
1309
|
-
}
|
|
1310
|
-
|
|
1311
|
-
return true;
|
|
1312
|
-
}` };
|
|
1313
|
-
}
|
|
1314
|
-
case "raw": return { code: `/**
|
|
1315
|
-
* Guard: ${description}
|
|
1316
|
-
*
|
|
1317
|
-
* TODO: Implement this guard based on the invariant description.
|
|
1318
|
-
* This is a placeholder - you must implement the actual validation logic.
|
|
1319
|
-
*
|
|
1320
|
-
* @param data - The record being created/updated
|
|
1321
|
-
* @param db - MongoDB database instance
|
|
1322
|
-
* @returns true if invariant is satisfied, throws error otherwise
|
|
1323
|
-
*/
|
|
1324
|
-
export async function ${id}Guard(
|
|
1325
|
-
data: any,
|
|
1326
|
-
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1327
|
-
): Promise<boolean> {
|
|
1328
|
-
// TODO: Implement invariant: ${description}
|
|
1329
|
-
console.warn("Guard ${id}Guard is not yet implemented");
|
|
1330
|
-
return true;
|
|
1331
|
-
}` };
|
|
1332
|
-
default: return null;
|
|
1333
|
-
}
|
|
1334
|
-
}
|
|
1335
|
-
|
|
1336
670
|
//#endregion
|
|
1337
671
|
//#region src/generators/mongodb.ts
|
|
1338
672
|
const generateMongoDBSchema = async ({ options, file, adapter }) => {
|
|
1339
|
-
const tables = getPaymentTables
|
|
673
|
+
const tables = getPaymentTables(options);
|
|
1340
674
|
const filePath = file || "./mongodb-schema.ts";
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
usePlural: adapter?.options?.adapterConfig?.usePlural
|
|
1345
|
-
}),
|
|
1346
|
-
getFieldName: initGetFieldName({
|
|
1347
|
-
schema: tables,
|
|
1348
|
-
usePlural: false
|
|
1349
|
-
})
|
|
675
|
+
initGetModelName({
|
|
676
|
+
schema: tables,
|
|
677
|
+
usePlural: adapter?.options?.adapterConfig?.usePlural
|
|
1350
678
|
});
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
await fs$1.writeFile(path.resolve(process.cwd(), validatorsPath), validatorsJson);
|
|
1358
|
-
console.log(`📝 Generated MongoDB validators: ${validatorsPath}`);
|
|
1359
|
-
const guardsPath = filePath.replace(/\.ts$/, "-guards.ts");
|
|
1360
|
-
const guardsDir = path.dirname(path.resolve(process.cwd(), guardsPath));
|
|
1361
|
-
await fs$1.mkdir(guardsDir, { recursive: true });
|
|
1362
|
-
const formattedGuards = await prettier.format(guards, { parser: "typescript" });
|
|
1363
|
-
await fs$1.writeFile(path.resolve(process.cwd(), guardsPath), formattedGuards);
|
|
1364
|
-
console.log(`📝 Generated MongoDB guards: ${guardsPath}`);
|
|
1365
|
-
const validatorsBaseName = path.basename(validatorsPath, ".json");
|
|
1366
|
-
const guardsBaseName = path.basename(guardsPath, ".ts");
|
|
1367
|
-
const schemaCode = `// MongoDB schema setup
|
|
679
|
+
initGetFieldName({
|
|
680
|
+
schema: tables,
|
|
681
|
+
usePlural: false
|
|
682
|
+
});
|
|
683
|
+
return {
|
|
684
|
+
code: await prettier.format(`// MongoDB schema setup
|
|
1368
685
|
// Generated from schema definitions
|
|
1369
686
|
// DO NOT EDIT MANUALLY - This file is auto-generated
|
|
1370
687
|
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
/**
|
|
1375
|
-
* MongoDB collection validators.
|
|
1376
|
-
* Apply these using db.createCollection() or db.command({ collMod: ... })
|
|
1377
|
-
*
|
|
1378
|
-
* Example:
|
|
1379
|
-
* \`\`\`
|
|
1380
|
-
* await db.createCollection("checkout_session", {
|
|
1381
|
-
* validator: validators.checkout_session
|
|
1382
|
-
* });
|
|
1383
|
-
* \`\`\`
|
|
1384
|
-
*/
|
|
1385
|
-
export { validators };
|
|
1386
|
-
|
|
1387
|
-
/**
|
|
1388
|
-
* MongoDB invariant guards.
|
|
1389
|
-
* Call these before write operations to enforce invariants at the application level.
|
|
1390
|
-
*
|
|
1391
|
-
* Example:
|
|
1392
|
-
* \`\`\`
|
|
1393
|
-
* import { customer_payment_method_ownershipGuard } from "./${guardsBaseName}";
|
|
1394
|
-
*
|
|
1395
|
-
* await customer_payment_method_ownershipGuard(data, db);
|
|
1396
|
-
* await db.collection("customer").insertOne(data);
|
|
1397
|
-
* \`\`\`
|
|
1398
|
-
*/
|
|
1399
|
-
export { guards };
|
|
1400
|
-
`;
|
|
1401
|
-
return {
|
|
1402
|
-
code: await prettier.format(schemaCode, { parser: "typescript" }),
|
|
688
|
+
// Schema definitions for MongoDB collections
|
|
689
|
+
// Use this file to set up your MongoDB collections and indexes
|
|
690
|
+
`, { parser: "typescript" }),
|
|
1403
691
|
fileName: filePath,
|
|
1404
692
|
overwrite: true
|
|
1405
693
|
};
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { i as getPackageInfo, n as generateSchema } from "./generators-
|
|
2
|
+
import { i as getPackageInfo, n as generateSchema } from "./generators-CZDWuCP8.mjs";
|
|
3
3
|
import { Command } from "commander";
|
|
4
4
|
import fs, { existsSync, readFileSync } from "node:fs";
|
|
5
5
|
import fs$1 from "node:fs/promises";
|