pecunia-cli 0.1.9 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/api.d.mts +1 -0
- package/dist/api.mjs +1 -1
- package/dist/{generators-D5zLfxo1.mjs → generators-DXOqBlgH.mjs} +673 -16
- package/dist/index.mjs +1 -1
- package/package.json +3 -3
package/dist/api.d.mts
CHANGED
package/dist/api.mjs
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-
|
|
1
|
+
import { a as generateKyselySchema, n as generateSchema, o as generateDrizzleSchema, r as generatePrismaSchema, t as adapters } from "./generators-DXOqBlgH.mjs";
|
|
2
2
|
|
|
3
3
|
export { adapters, generateDrizzleSchema, generateKyselySchema, generatePrismaSchema, generateSchema };
|
|
@@ -1,18 +1,319 @@
|
|
|
1
1
|
import fs, { existsSync } from "node:fs";
|
|
2
2
|
import fs$1 from "node:fs/promises";
|
|
3
3
|
import path from "node:path";
|
|
4
|
-
import { getMigrations } from "pecunia-root";
|
|
5
|
-
import { capitalizeFirstLetter, getPaymentTables, initGetFieldName, initGetModelName } from "pecunia-core";
|
|
4
|
+
import { getMigrations, getPaymentTables } from "pecunia-root";
|
|
5
|
+
import { capitalizeFirstLetter, getPaymentTables as getPaymentTables$1, initGetFieldName, initGetModelName } from "pecunia-core";
|
|
6
6
|
import prettier from "prettier";
|
|
7
7
|
import { produceSchema } from "@mrleebo/prisma-ast";
|
|
8
8
|
|
|
9
|
+
//#region src/generators/invariants.ts
|
|
10
|
+
/**
|
|
11
|
+
* Normalize invariants from schema into IR format.
|
|
12
|
+
* This parses invariant descriptions and extracts structured logic.
|
|
13
|
+
*/
|
|
14
|
+
function normalizeInvariants(schema, options) {
|
|
15
|
+
const invariants = [];
|
|
16
|
+
for (const [tableKey, table] of Object.entries(schema)) {
|
|
17
|
+
if (!table.invariants) continue;
|
|
18
|
+
const modelName = options.getModelName(tableKey);
|
|
19
|
+
const tableName = table.modelName;
|
|
20
|
+
for (const invariant of table.invariants) {
|
|
21
|
+
const logic = parseInvariantLogic(invariant.description, tableKey, table, schema, options);
|
|
22
|
+
invariants.push({
|
|
23
|
+
id: invariant.id,
|
|
24
|
+
description: invariant.description,
|
|
25
|
+
modelName,
|
|
26
|
+
tableName,
|
|
27
|
+
appliesTo: invariant.appliesTo,
|
|
28
|
+
enforcement: invariant.enforcement || {},
|
|
29
|
+
logic
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
return invariants;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Parse invariant description into structured logic.
|
|
37
|
+
* This is a heuristic parser that extracts common patterns.
|
|
38
|
+
*/
|
|
39
|
+
function parseInvariantLogic(description, tableKey, table, schema, options) {
|
|
40
|
+
const desc = description.toLowerCase();
|
|
41
|
+
if (desc.includes("determines") && desc.includes("presence")) {
|
|
42
|
+
const modeMatch = description.match(/(\w+)\.(\w+)\s+determines/);
|
|
43
|
+
const subMatch = description.match(/(\w+)\s+must be (present|null\/absent)/);
|
|
44
|
+
const enumMatches = description.matchAll(/(\w+)\s*=>\s*(\w+)\s+must be (present|null\/absent)/g);
|
|
45
|
+
if (modeMatch && subMatch) {
|
|
46
|
+
const [, modelName, fieldName] = modeMatch;
|
|
47
|
+
const [, conditionalField] = subMatch;
|
|
48
|
+
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
49
|
+
const conditional = Object.keys(table.fields).find((k) => k.toLowerCase() === conditionalField.toLowerCase());
|
|
50
|
+
if (field && conditional) {
|
|
51
|
+
const fieldAttr = table.fields[field];
|
|
52
|
+
let allowedValues = [];
|
|
53
|
+
if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) allowedValues = fieldAttr.type;
|
|
54
|
+
else {
|
|
55
|
+
const enumValues = /* @__PURE__ */ new Set();
|
|
56
|
+
for (const match of enumMatches) {
|
|
57
|
+
const val = match[1]?.trim();
|
|
58
|
+
if (val) enumValues.add(val);
|
|
59
|
+
}
|
|
60
|
+
if (enumValues.size > 0) allowedValues = Array.from(enumValues);
|
|
61
|
+
else allowedValues = ["PAYMENT", "SUBSCRIPTION"].filter((e) => desc.includes(e.toLowerCase()));
|
|
62
|
+
}
|
|
63
|
+
let whenPresent = false;
|
|
64
|
+
for (const match of enumMatches) {
|
|
65
|
+
const enumVal = match[1];
|
|
66
|
+
const requirement = match[3];
|
|
67
|
+
if (enumVal && requirement === "present") {
|
|
68
|
+
whenPresent = true;
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
if (!whenPresent && desc.includes("subscription") && desc.includes("must be present")) whenPresent = true;
|
|
73
|
+
return {
|
|
74
|
+
type: "field_enum_constraint",
|
|
75
|
+
field,
|
|
76
|
+
fieldName: options.getFieldName({
|
|
77
|
+
model: tableKey,
|
|
78
|
+
field
|
|
79
|
+
}),
|
|
80
|
+
allowedValues: allowedValues.length > 0 ? allowedValues : ["PAYMENT", "SUBSCRIPTION"],
|
|
81
|
+
conditionalField: {
|
|
82
|
+
field: conditional,
|
|
83
|
+
fieldName: options.getFieldName({
|
|
84
|
+
model: tableKey,
|
|
85
|
+
field: conditional
|
|
86
|
+
}),
|
|
87
|
+
whenPresent
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
if (desc.includes("must belong to same") || desc.includes("must equal") && desc.includes("when")) {
|
|
94
|
+
const fieldMatch = description.match(/(\w+)\.(\w+)\s+must/);
|
|
95
|
+
const refMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
|
|
96
|
+
description.match(/(\w+)\.(\w+)\s+must equal/);
|
|
97
|
+
if (fieldMatch && refMatch) {
|
|
98
|
+
const [, modelName, fieldName] = fieldMatch;
|
|
99
|
+
const [, refTable, refField, ownerTable, ownerField] = refMatch;
|
|
100
|
+
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
101
|
+
if (field && table.fields[field]?.references) {
|
|
102
|
+
table.fields[field].references;
|
|
103
|
+
const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
|
|
104
|
+
const ownerTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === ownerTable || k === ownerTable);
|
|
105
|
+
if (refTableKey && ownerTableKey) {
|
|
106
|
+
const ownershipField = Object.keys(table.fields).find((k) => k.toLowerCase() === ownerField.toLowerCase());
|
|
107
|
+
if (ownershipField) return {
|
|
108
|
+
type: "cross_table_ownership",
|
|
109
|
+
field,
|
|
110
|
+
fieldName: options.getFieldName({
|
|
111
|
+
model: tableKey,
|
|
112
|
+
field
|
|
113
|
+
}),
|
|
114
|
+
referencedTable: options.getModelName(refTableKey),
|
|
115
|
+
referencedField: options.getFieldName({
|
|
116
|
+
model: refTableKey,
|
|
117
|
+
field: refField
|
|
118
|
+
}),
|
|
119
|
+
ownershipField,
|
|
120
|
+
ownershipFieldName: options.getFieldName({
|
|
121
|
+
model: tableKey,
|
|
122
|
+
field: ownershipField
|
|
123
|
+
})
|
|
124
|
+
};
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (desc.includes("must match") && desc.includes("of its")) {
|
|
130
|
+
const fieldMatch = description.match(/(\w+)\.(\w+)\s+must match/);
|
|
131
|
+
const refMatch = description.match(/of its\s+(\w+)/);
|
|
132
|
+
const equalityMatch = description.match(/(\w+)\.(\w+)\s+must equal\s+(\w+)\.(\w+)/);
|
|
133
|
+
if (fieldMatch && refMatch && equalityMatch) {
|
|
134
|
+
const [, , equalityFieldName] = fieldMatch;
|
|
135
|
+
const [, refFieldName] = refMatch;
|
|
136
|
+
const [, refTable, refEqualityField, , sourceEqualityField] = equalityMatch;
|
|
137
|
+
const equalityField = Object.keys(table.fields).find((k) => k.toLowerCase() === equalityFieldName.toLowerCase());
|
|
138
|
+
const refField = Object.keys(table.fields).find((k) => k.toLowerCase() === refFieldName.toLowerCase());
|
|
139
|
+
if (equalityField && refField && table.fields[refField]?.references) {
|
|
140
|
+
const ref = table.fields[refField].references;
|
|
141
|
+
const refTableKey = Object.keys(schema).find((k) => schema[k]?.modelName === refTable || k === refTable);
|
|
142
|
+
if (refTableKey) {
|
|
143
|
+
const refEquality = Object.keys(schema[refTableKey].fields).find((k) => k.toLowerCase() === refEqualityField.toLowerCase());
|
|
144
|
+
if (refEquality) return {
|
|
145
|
+
type: "cross_table_equality",
|
|
146
|
+
field: refField,
|
|
147
|
+
fieldName: options.getFieldName({
|
|
148
|
+
model: tableKey,
|
|
149
|
+
field: refField
|
|
150
|
+
}),
|
|
151
|
+
referencedTable: options.getModelName(refTableKey),
|
|
152
|
+
referencedField: options.getFieldName({
|
|
153
|
+
model: refTableKey,
|
|
154
|
+
field: ref.field
|
|
155
|
+
}),
|
|
156
|
+
equalityField,
|
|
157
|
+
equalityFieldName: options.getFieldName({
|
|
158
|
+
model: tableKey,
|
|
159
|
+
field: equalityField
|
|
160
|
+
}),
|
|
161
|
+
referencedEqualityField: refEquality,
|
|
162
|
+
referencedEqualityFieldName: options.getFieldName({
|
|
163
|
+
model: refTableKey,
|
|
164
|
+
field: refEquality
|
|
165
|
+
})
|
|
166
|
+
};
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return {
|
|
172
|
+
type: "raw",
|
|
173
|
+
description
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
//#endregion
|
|
178
|
+
//#region src/generators/invariants-sql.ts
|
|
179
|
+
/**
|
|
180
|
+
* Generate SQL for enforcing invariants in Postgres (and compatible databases).
|
|
181
|
+
* Returns SQL statements for CHECK constraints and triggers.
|
|
182
|
+
*/
|
|
183
|
+
function emitPostgresInvariantSql(invariants, schemaName = "public") {
|
|
184
|
+
const statements = [];
|
|
185
|
+
const checkInvariants = invariants.filter((inv) => inv.enforcement.postgres === "check" && inv.appliesTo.includes("postgres"));
|
|
186
|
+
const triggerInvariants = invariants.filter((inv) => inv.enforcement.postgres === "trigger" && inv.appliesTo.includes("postgres"));
|
|
187
|
+
for (const inv of checkInvariants) {
|
|
188
|
+
const sql = generateCheckConstraint(inv, schemaName);
|
|
189
|
+
if (sql) statements.push(sql);
|
|
190
|
+
}
|
|
191
|
+
for (const inv of triggerInvariants) {
|
|
192
|
+
const sql = generateTrigger(inv, schemaName);
|
|
193
|
+
if (sql) statements.push(...sql);
|
|
194
|
+
}
|
|
195
|
+
if (statements.length === 0) return "-- No invariant enforcement SQL generated\n";
|
|
196
|
+
return `-- Invariant enforcement SQL
|
|
197
|
+
-- Generated from schema invariants
|
|
198
|
+
-- DO NOT EDIT MANUALLY - This file is auto-generated
|
|
199
|
+
|
|
200
|
+
${statements.join("\n\n")}
|
|
201
|
+
`;
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Generate CHECK constraint SQL for an invariant.
|
|
205
|
+
*/
|
|
206
|
+
function generateCheckConstraint(inv, schemaName) {
|
|
207
|
+
const { logic, tableName, id } = inv;
|
|
208
|
+
switch (logic.type) {
|
|
209
|
+
case "field_enum_constraint": {
|
|
210
|
+
const { fieldName, allowedValues, conditionalField } = logic;
|
|
211
|
+
if (conditionalField) {
|
|
212
|
+
const constraintName$1 = `${tableName}_${id}_check`;
|
|
213
|
+
const enumCheck$1 = allowedValues.map((val) => `'${val}'`).join(", ");
|
|
214
|
+
const subscriptionValue = allowedValues.find((v) => v === "SUBSCRIPTION") || allowedValues.find((v) => v !== "PAYMENT") || allowedValues[0];
|
|
215
|
+
const paymentValue = allowedValues.find((v) => v === "PAYMENT") || allowedValues[0];
|
|
216
|
+
const conditionalCheck = `(
|
|
217
|
+
(${fieldName} = '${subscriptionValue}') = (${conditionalField.fieldName} IS NOT NULL) AND
|
|
218
|
+
(${fieldName} = '${paymentValue}') = (${conditionalField.fieldName} IS NULL)
|
|
219
|
+
)`;
|
|
220
|
+
return `-- ${inv.description}
|
|
221
|
+
ALTER TABLE ${schemaName}.${tableName}
|
|
222
|
+
ADD CONSTRAINT ${constraintName$1}
|
|
223
|
+
CHECK (
|
|
224
|
+
${fieldName} IN (${enumCheck$1}) AND
|
|
225
|
+
${conditionalCheck}
|
|
226
|
+
);`;
|
|
227
|
+
}
|
|
228
|
+
const constraintName = `${tableName}_${id}_check`;
|
|
229
|
+
const enumCheck = allowedValues.map((val) => `'${val}'`).join(", ");
|
|
230
|
+
return `-- ${inv.description}
|
|
231
|
+
ALTER TABLE ${schemaName}.${tableName}
|
|
232
|
+
ADD CONSTRAINT ${constraintName}
|
|
233
|
+
CHECK (${fieldName} IN (${enumCheck}));`;
|
|
234
|
+
}
|
|
235
|
+
default: return null;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Generate trigger SQL for an invariant.
|
|
240
|
+
* Returns array of SQL statements (function + trigger).
|
|
241
|
+
*/
|
|
242
|
+
function generateTrigger(inv, schemaName) {
|
|
243
|
+
const { logic, tableName, id } = inv;
|
|
244
|
+
switch (logic.type) {
|
|
245
|
+
case "cross_table_ownership": {
|
|
246
|
+
const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
|
|
247
|
+
const functionName = `${tableName}_${id}_fn`;
|
|
248
|
+
const triggerName = `${tableName}_${id}_trigger`;
|
|
249
|
+
return [`-- Function to enforce: ${inv.description}
|
|
250
|
+
CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
|
|
251
|
+
RETURNS TRIGGER AS $$
|
|
252
|
+
BEGIN
|
|
253
|
+
IF NEW.${fieldName} IS NOT NULL THEN
|
|
254
|
+
IF NOT EXISTS (
|
|
255
|
+
SELECT 1
|
|
256
|
+
FROM ${schemaName}.${referencedTable}
|
|
257
|
+
WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
|
|
258
|
+
AND ${referencedTable}.${ownershipFieldName} = NEW.${ownershipFieldName}
|
|
259
|
+
) THEN
|
|
260
|
+
RAISE EXCEPTION 'Invariant violation: % must belong to the same % as the record', '${fieldName}', '${ownershipFieldName}';
|
|
261
|
+
END IF;
|
|
262
|
+
END IF;
|
|
263
|
+
RETURN NEW;
|
|
264
|
+
END;
|
|
265
|
+
$$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
|
|
266
|
+
DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
|
|
267
|
+
CREATE TRIGGER ${triggerName}
|
|
268
|
+
BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
|
|
269
|
+
FOR EACH ROW
|
|
270
|
+
EXECUTE FUNCTION ${schemaName}.${functionName}();`];
|
|
271
|
+
}
|
|
272
|
+
case "cross_table_equality": {
|
|
273
|
+
const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
|
|
274
|
+
const functionName = `${tableName}_${id}_fn`;
|
|
275
|
+
const triggerName = `${tableName}_${id}_trigger`;
|
|
276
|
+
return [`-- Function to enforce: ${inv.description}
|
|
277
|
+
CREATE OR REPLACE FUNCTION ${schemaName}.${functionName}()
|
|
278
|
+
RETURNS TRIGGER AS $$
|
|
279
|
+
DECLARE
|
|
280
|
+
ref_${referencedEqualityFieldName} TEXT;
|
|
281
|
+
BEGIN
|
|
282
|
+
IF NEW.${fieldName} IS NOT NULL THEN
|
|
283
|
+
SELECT ${referencedEqualityFieldName} INTO ref_${referencedEqualityFieldName}
|
|
284
|
+
FROM ${schemaName}.${referencedTable}
|
|
285
|
+
WHERE ${referencedTable}.${referencedField} = NEW.${fieldName}
|
|
286
|
+
LIMIT 1;
|
|
287
|
+
|
|
288
|
+
IF ref_${referencedEqualityFieldName} IS NULL THEN
|
|
289
|
+
RAISE EXCEPTION 'Invariant violation: Referenced record not found in ${referencedTable}';
|
|
290
|
+
END IF;
|
|
291
|
+
|
|
292
|
+
IF NEW.${equalityFieldName} != ref_${referencedEqualityFieldName} THEN
|
|
293
|
+
RAISE EXCEPTION 'Invariant violation: % must equal %.%', '${equalityFieldName}', '${referencedTable}', '${referencedEqualityFieldName}';
|
|
294
|
+
END IF;
|
|
295
|
+
END IF;
|
|
296
|
+
RETURN NEW;
|
|
297
|
+
END;
|
|
298
|
+
$$ LANGUAGE plpgsql;`, `-- Trigger to enforce: ${inv.description}
|
|
299
|
+
DROP TRIGGER IF EXISTS ${triggerName} ON ${schemaName}.${tableName};
|
|
300
|
+
CREATE TRIGGER ${triggerName}
|
|
301
|
+
BEFORE INSERT OR UPDATE ON ${schemaName}.${tableName}
|
|
302
|
+
FOR EACH ROW
|
|
303
|
+
EXECUTE FUNCTION ${schemaName}.${functionName}();`];
|
|
304
|
+
}
|
|
305
|
+
default: return null;
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
//#endregion
|
|
9
310
|
//#region src/generators/drizzle.ts
|
|
10
311
|
function convertToSnakeCase(str, camelCase) {
|
|
11
312
|
if (camelCase) return str;
|
|
12
313
|
return str.replace(/([A-Z]+)([A-Z][a-z])/g, "$1_$2").replace(/([a-z\d])([A-Z])/g, "$1_$2").toLowerCase();
|
|
13
314
|
}
|
|
14
315
|
const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
15
|
-
const tables = getPaymentTables(options);
|
|
316
|
+
const tables = getPaymentTables$1(options);
|
|
16
317
|
const filePath = file || "./payment-schema.ts";
|
|
17
318
|
const databaseType = adapter.options?.provider;
|
|
18
319
|
if (!databaseType) throw new Error("Database provider type is undefined during Drizzle schema generation. Please define a `provider` in the Drizzle adapter config.");
|
|
@@ -120,6 +421,7 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
120
421
|
});
|
|
121
422
|
if (attr.references) {
|
|
122
423
|
const referencedModelName = tableNameMap.get(attr.references.model) || getModelName(attr.references.model);
|
|
424
|
+
const onDelete = attr.references.onDelete || "no action";
|
|
123
425
|
references.push({
|
|
124
426
|
fieldName,
|
|
125
427
|
referencedTable: referencedModelName,
|
|
@@ -127,8 +429,8 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
127
429
|
model: attr.references.model,
|
|
128
430
|
field: attr.references.field
|
|
129
431
|
}),
|
|
130
|
-
onDelete
|
|
131
|
-
required: attr.required
|
|
432
|
+
onDelete,
|
|
433
|
+
required: attr.required ?? false,
|
|
132
434
|
originalModel: attr.references.model
|
|
133
435
|
});
|
|
134
436
|
}
|
|
@@ -168,7 +470,14 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
168
470
|
const reverseKey = `${referencedTableKey}->${tableDef.tableKey}`;
|
|
169
471
|
const reverseRef = referenceGraph.get(reverseKey);
|
|
170
472
|
if (reverseRef) {
|
|
171
|
-
|
|
473
|
+
const thisIsNullableWithSetNull = !ref.required && ref.onDelete === "set null";
|
|
474
|
+
const reverseIsNullableWithSetNull = !reverseRef.required && reverseRef.onDelete === "set null";
|
|
475
|
+
if (thisIsNullableWithSetNull) continue;
|
|
476
|
+
if (reverseIsNullableWithSetNull && ref.required) {
|
|
477
|
+
skipReferences.add(`${tableDef.tableKey}.${ref.fieldName}`);
|
|
478
|
+
continue;
|
|
479
|
+
}
|
|
480
|
+
if (!ref.required && (reverseRef.required || ref.onDelete !== "cascade" && reverseRef.onDelete === "cascade")) skipReferences.add(`${tableDef.tableKey}.${ref.fieldName}`);
|
|
172
481
|
}
|
|
173
482
|
}
|
|
174
483
|
for (const tableDef of tableDefinitions) {
|
|
@@ -205,7 +514,8 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
205
514
|
if (reverseRef) comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // Primary FK: ${reverseRef.sourceModelName}.${reverseRef.fieldName} -> ${modelName}.${fieldName}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
|
|
206
515
|
else comment = `\n // FK constraint removed to break circular dependency with ${ref.referencedTable}\n // This field still maintains referential integrity via application logic and Drizzle relations`;
|
|
207
516
|
}
|
|
208
|
-
const
|
|
517
|
+
const isRequired = attr.required === true;
|
|
518
|
+
const fieldDef = `${fieldName}: ${type}${isRequired ? ".notNull()" : ""}${attr.unique ? ".unique()" : ""}${referenceChain}`;
|
|
209
519
|
return comment ? `${comment}\n ${fieldDef}` : fieldDef;
|
|
210
520
|
});
|
|
211
521
|
const schema = `export const ${modelName} = ${databaseType}Table("${convertToSnakeCase(modelName, adapter.options?.camelCase)}", {
|
|
@@ -325,12 +635,55 @@ const generateDrizzleSchema = async ({ options, file, adapter }) => {
|
|
|
325
635
|
}
|
|
326
636
|
}
|
|
327
637
|
code += `\n${relationsString}`;
|
|
638
|
+
const typeHints = generateInvariantTypeHints(tables, getModelName, getFieldName);
|
|
639
|
+
if (typeHints) code += `\n\n${typeHints}`;
|
|
640
|
+
const formattedCode = await prettier.format(code, { parser: "typescript" });
|
|
641
|
+
if (databaseType === "pg") {
|
|
642
|
+
const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
|
|
643
|
+
getModelName,
|
|
644
|
+
getFieldName
|
|
645
|
+
}), "public");
|
|
646
|
+
const sqlFilePath = filePath.replace(/\.ts$/, "-invariants.sql");
|
|
647
|
+
const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
|
|
648
|
+
await fs$1.mkdir(sqlDir, { recursive: true });
|
|
649
|
+
await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
|
|
650
|
+
console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
|
|
651
|
+
}
|
|
328
652
|
return {
|
|
329
|
-
code:
|
|
653
|
+
code: formattedCode,
|
|
330
654
|
fileName: filePath,
|
|
331
655
|
overwrite: fileExist
|
|
332
656
|
};
|
|
333
657
|
};
|
|
658
|
+
/**
|
|
659
|
+
* Generate TypeScript type hints for invariants (e.g., enum unions).
|
|
660
|
+
*/
|
|
661
|
+
function generateInvariantTypeHints(tables, getModelName, getFieldName) {
|
|
662
|
+
const hints = [];
|
|
663
|
+
for (const [tableKey, table] of Object.entries(tables)) {
|
|
664
|
+
if (!table.invariants) continue;
|
|
665
|
+
for (const invariant of table.invariants) {
|
|
666
|
+
const desc = invariant.description.toLowerCase();
|
|
667
|
+
if (desc.includes("determines") && desc.includes("presence")) {
|
|
668
|
+
const modeMatch = invariant.description.match(/(\w+)\.(\w+)\s+determines/);
|
|
669
|
+
if (modeMatch) {
|
|
670
|
+
const [, , fieldName] = modeMatch;
|
|
671
|
+
const field = Object.keys(table.fields).find((k) => k.toLowerCase() === fieldName.toLowerCase());
|
|
672
|
+
if (field) {
|
|
673
|
+
const fieldAttr = table.fields[field];
|
|
674
|
+
if (typeof fieldAttr.type !== "string" && Array.isArray(fieldAttr.type) && fieldAttr.type.every((x) => typeof x === "string")) {
|
|
675
|
+
const enumName = `${getModelName(tableKey)}${fieldName.charAt(0).toUpperCase() + fieldName.slice(1)}Mode`;
|
|
676
|
+
const enumValues = fieldAttr.type.map((v) => `"${v}"`).join(" | ");
|
|
677
|
+
hints.push(`// Type hint for invariant: ${invariant.id}`);
|
|
678
|
+
hints.push(`export type ${enumName} = ${enumValues};`);
|
|
679
|
+
}
|
|
680
|
+
}
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
return hints.length > 0 ? `\n// Invariant type hints\n${hints.join("\n")}` : "";
|
|
686
|
+
}
|
|
334
687
|
function generateImport({ databaseType, tables }) {
|
|
335
688
|
const rootImports = ["relations"];
|
|
336
689
|
const coreImports = [];
|
|
@@ -370,12 +723,31 @@ function generateImport({ databaseType, tables }) {
|
|
|
370
723
|
|
|
371
724
|
//#endregion
|
|
372
725
|
//#region src/generators/kysely.ts
|
|
373
|
-
const generateKyselySchema = async ({ options, file }) => {
|
|
726
|
+
const generateKyselySchema = async ({ options, file, adapter }) => {
|
|
374
727
|
const { compileMigrations } = await getMigrations(options);
|
|
375
728
|
const migrations = await compileMigrations();
|
|
729
|
+
const migrationFile = file || `./better-auth_migrations/${(/* @__PURE__ */ new Date()).toISOString().replace(/:/g, "-")}.sql`;
|
|
730
|
+
if ((adapter?.options?.type || "postgres") === "postgres") {
|
|
731
|
+
const tables = getPaymentTables(options);
|
|
732
|
+
const sql = emitPostgresInvariantSql(normalizeInvariants(tables, {
|
|
733
|
+
getModelName: initGetModelName({
|
|
734
|
+
schema: tables,
|
|
735
|
+
usePlural: adapter?.options?.adapterConfig?.usePlural
|
|
736
|
+
}),
|
|
737
|
+
getFieldName: initGetFieldName({
|
|
738
|
+
schema: tables,
|
|
739
|
+
usePlural: false
|
|
740
|
+
})
|
|
741
|
+
}), "public");
|
|
742
|
+
const sqlFilePath = path.join(path.dirname(migrationFile), "invariants.sql");
|
|
743
|
+
const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
|
|
744
|
+
await fs$1.mkdir(sqlDir, { recursive: true });
|
|
745
|
+
await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
|
|
746
|
+
console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
|
|
747
|
+
}
|
|
376
748
|
return {
|
|
377
749
|
code: migrations.trim() === ";" ? "" : migrations,
|
|
378
|
-
fileName:
|
|
750
|
+
fileName: migrationFile
|
|
379
751
|
};
|
|
380
752
|
};
|
|
381
753
|
|
|
@@ -401,15 +773,15 @@ function getPrismaVersion(cwd) {
|
|
|
401
773
|
//#region src/generators/prisma.ts
|
|
402
774
|
const generatePrismaSchema = async ({ adapter, options, file }) => {
|
|
403
775
|
const provider = adapter.options?.provider || "postgresql";
|
|
404
|
-
const tables = getPaymentTables(options);
|
|
776
|
+
const tables = getPaymentTables$1(options);
|
|
405
777
|
const filePath = file || "./prisma/schema.prisma";
|
|
406
778
|
const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
|
|
407
779
|
const getModelName = initGetModelName({
|
|
408
|
-
schema: getPaymentTables(options),
|
|
780
|
+
schema: getPaymentTables$1(options),
|
|
409
781
|
usePlural: adapter.options?.adapterConfig?.usePlural
|
|
410
782
|
});
|
|
411
783
|
const getFieldName = initGetFieldName({
|
|
412
|
-
schema: getPaymentTables(options),
|
|
784
|
+
schema: getPaymentTables$1(options),
|
|
413
785
|
usePlural: false
|
|
414
786
|
});
|
|
415
787
|
let schemaPrisma = "";
|
|
@@ -534,8 +906,9 @@ const generatePrismaSchema = async ({ adapter, options, file }) => {
|
|
|
534
906
|
if (attr.references) {
|
|
535
907
|
const referencedOriginalModelName = getModelName(attr.references.model);
|
|
536
908
|
const referencedCustomModelName = tables[referencedOriginalModelName]?.modelName || referencedOriginalModelName;
|
|
537
|
-
let action = "
|
|
538
|
-
if (attr.references.onDelete === "
|
|
909
|
+
let action = "NoAction";
|
|
910
|
+
if (attr.references.onDelete === "cascade") action = "Cascade";
|
|
911
|
+
else if (attr.references.onDelete === "no action") action = "NoAction";
|
|
539
912
|
else if (attr.references.onDelete === "set null") action = "SetNull";
|
|
540
913
|
else if (attr.references.onDelete === "set default") action = "SetDefault";
|
|
541
914
|
else if (attr.references.onDelete === "restrict") action = "Restrict";
|
|
@@ -580,6 +953,24 @@ const generatePrismaSchema = async ({ adapter, options, file }) => {
|
|
|
580
953
|
}
|
|
581
954
|
});
|
|
582
955
|
const schemaChanged = schema.trim() !== schemaPrisma.trim();
|
|
956
|
+
if (provider === "postgresql") {
|
|
957
|
+
const tables$1 = getPaymentTables$1(options);
|
|
958
|
+
const sql = emitPostgresInvariantSql(normalizeInvariants(tables$1, {
|
|
959
|
+
getModelName: initGetModelName({
|
|
960
|
+
schema: tables$1,
|
|
961
|
+
usePlural: adapter.options?.adapterConfig?.usePlural
|
|
962
|
+
}),
|
|
963
|
+
getFieldName: initGetFieldName({
|
|
964
|
+
schema: tables$1,
|
|
965
|
+
usePlural: false
|
|
966
|
+
})
|
|
967
|
+
}), "public");
|
|
968
|
+
const sqlFilePath = path.join(path.dirname(filePath), "invariants.sql");
|
|
969
|
+
const sqlDir = path.dirname(path.resolve(process.cwd(), sqlFilePath));
|
|
970
|
+
await fs$1.mkdir(sqlDir, { recursive: true });
|
|
971
|
+
await fs$1.writeFile(path.resolve(process.cwd(), sqlFilePath), sql);
|
|
972
|
+
console.log(`📝 Generated invariant SQL: ${sqlFilePath}`);
|
|
973
|
+
}
|
|
583
974
|
return {
|
|
584
975
|
code: schemaChanged ? schema : "",
|
|
585
976
|
fileName: filePath,
|
|
@@ -598,12 +989,278 @@ const getNewPrisma = (provider, cwd) => {
|
|
|
598
989
|
}`;
|
|
599
990
|
};
|
|
600
991
|
|
|
992
|
+
//#endregion
|
|
993
|
+
//#region src/generators/invariants-mongo.ts
|
|
994
|
+
/**
|
|
995
|
+
* Generate MongoDB collection validator JSON schema for invariants.
|
|
996
|
+
*/
|
|
997
|
+
function emitMongoValidators(invariants) {
|
|
998
|
+
const validators = {};
|
|
999
|
+
for (const inv of invariants) {
|
|
1000
|
+
if (inv.enforcement.mongo !== "validator" || !inv.appliesTo.includes("mongo")) continue;
|
|
1001
|
+
const collectionName = inv.tableName;
|
|
1002
|
+
if (!validators[collectionName]) validators[collectionName] = { $jsonSchema: {
|
|
1003
|
+
bsonType: "object",
|
|
1004
|
+
required: [],
|
|
1005
|
+
properties: {},
|
|
1006
|
+
additionalProperties: true
|
|
1007
|
+
} };
|
|
1008
|
+
const validator = generateMongoValidator(inv);
|
|
1009
|
+
if (validator) {
|
|
1010
|
+
Object.assign(validators[collectionName].$jsonSchema.properties, validator.properties || {});
|
|
1011
|
+
if (validator.required) validators[collectionName].$jsonSchema.required = [...validators[collectionName].$jsonSchema.required || [], ...validator.required];
|
|
1012
|
+
if (validator.anyOf) {
|
|
1013
|
+
if (!validators[collectionName].$jsonSchema.anyOf) validators[collectionName].$jsonSchema.anyOf = [];
|
|
1014
|
+
validators[collectionName].$jsonSchema.anyOf.push(...validator.anyOf);
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
}
|
|
1018
|
+
return validators;
|
|
1019
|
+
}
|
|
1020
|
+
/**
|
|
1021
|
+
* Generate MongoDB validator JSON schema for a single invariant.
|
|
1022
|
+
*/
|
|
1023
|
+
function generateMongoValidator(inv) {
|
|
1024
|
+
const { logic } = inv;
|
|
1025
|
+
switch (logic.type) {
|
|
1026
|
+
case "field_enum_constraint": {
|
|
1027
|
+
const { fieldName, allowedValues, conditionalField } = logic;
|
|
1028
|
+
if (conditionalField) {
|
|
1029
|
+
const anyOf = [];
|
|
1030
|
+
if (!conditionalField.whenPresent) anyOf.push({ properties: {
|
|
1031
|
+
[fieldName]: { enum: ["PAYMENT"] },
|
|
1032
|
+
[conditionalField.fieldName]: { bsonType: "null" }
|
|
1033
|
+
} });
|
|
1034
|
+
if (conditionalField.whenPresent) anyOf.push({
|
|
1035
|
+
properties: {
|
|
1036
|
+
[fieldName]: { enum: ["SUBSCRIPTION"] },
|
|
1037
|
+
[conditionalField.fieldName]: { bsonType: "string" }
|
|
1038
|
+
},
|
|
1039
|
+
required: [conditionalField.fieldName]
|
|
1040
|
+
});
|
|
1041
|
+
return {
|
|
1042
|
+
properties: { [fieldName]: { enum: allowedValues } },
|
|
1043
|
+
anyOf
|
|
1044
|
+
};
|
|
1045
|
+
}
|
|
1046
|
+
return { properties: { [fieldName]: { enum: allowedValues } } };
|
|
1047
|
+
}
|
|
1048
|
+
default: return null;
|
|
1049
|
+
}
|
|
1050
|
+
}
|
|
1051
|
+
/**
|
|
1052
|
+
* Generate TypeScript guard module for MongoDB "app" enforcement invariants.
|
|
1053
|
+
*/
|
|
1054
|
+
function emitMongoGuards(invariants) {
|
|
1055
|
+
const guardInvariants = invariants.filter((inv) => inv.enforcement.mongo === "app" && inv.appliesTo.includes("mongo"));
|
|
1056
|
+
if (guardInvariants.length === 0) return `// No MongoDB app-level invariant guards generated
|
|
1057
|
+
// This file is auto-generated - DO NOT EDIT MANUALLY
|
|
1058
|
+
|
|
1059
|
+
export {};
|
|
1060
|
+
`;
|
|
1061
|
+
const guards = [];
|
|
1062
|
+
const imports = [];
|
|
1063
|
+
for (const inv of guardInvariants) {
|
|
1064
|
+
const guard = generateMongoGuard(inv);
|
|
1065
|
+
if (guard) {
|
|
1066
|
+
guards.push(guard);
|
|
1067
|
+
if (guard.imports) imports.push(...guard.imports);
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
const uniqueImports = Array.from(new Set(imports));
|
|
1071
|
+
return `// MongoDB invariant guards
|
|
1072
|
+
// Generated from schema invariants
|
|
1073
|
+
// DO NOT EDIT MANUALLY - This file is auto-generated
|
|
1074
|
+
//
|
|
1075
|
+
// These guards should be called before write operations to enforce invariants
|
|
1076
|
+
// at the application level.
|
|
1077
|
+
|
|
1078
|
+
${uniqueImports.length > 0 ? uniqueImports.join("\n") + "\n" : ""}
|
|
1079
|
+
|
|
1080
|
+
${guards.join("\n\n")}
|
|
1081
|
+
`;
|
|
1082
|
+
}
|
|
1083
|
+
/**
|
|
1084
|
+
* Generate TypeScript guard function for a single invariant.
|
|
1085
|
+
*/
|
|
1086
|
+
function generateMongoGuard(inv) {
|
|
1087
|
+
const { logic, tableName, id, description } = inv;
|
|
1088
|
+
switch (logic.type) {
|
|
1089
|
+
case "cross_table_ownership": {
|
|
1090
|
+
const { fieldName, referencedTable, referencedField, ownershipFieldName } = logic;
|
|
1091
|
+
return { code: `/**
|
|
1092
|
+
* Guard: ${description}
|
|
1093
|
+
*
|
|
1094
|
+
* @param data - The record being created/updated (use camelCase field names)
|
|
1095
|
+
* @param db - MongoDB database instance
|
|
1096
|
+
* @returns true if invariant is satisfied, throws error otherwise
|
|
1097
|
+
*/
|
|
1098
|
+
export async function ${id}Guard(
|
|
1099
|
+
data: { ${fieldName}?: string | null; ${ownershipFieldName}: string },
|
|
1100
|
+
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1101
|
+
): Promise<boolean> {
|
|
1102
|
+
if (!data.${fieldName}) {
|
|
1103
|
+
return true; // Field is optional/nullable
|
|
1104
|
+
}
|
|
1105
|
+
|
|
1106
|
+
const referenced = await db.collection("${referencedTable}").findOne({
|
|
1107
|
+
${referencedField}: data.${fieldName}
|
|
1108
|
+
});
|
|
1109
|
+
|
|
1110
|
+
if (!referenced) {
|
|
1111
|
+
throw new Error(\`Invariant violation: \${data.${fieldName}} not found in ${referencedTable}\`);
|
|
1112
|
+
}
|
|
1113
|
+
|
|
1114
|
+
// Compare ownership field (use database field name for referenced record)
|
|
1115
|
+
const dataOwnerValue = data.${ownershipFieldName};
|
|
1116
|
+
const refOwnerValue = referenced.${ownershipFieldName};
|
|
1117
|
+
|
|
1118
|
+
if (refOwnerValue !== dataOwnerValue) {
|
|
1119
|
+
throw new Error(
|
|
1120
|
+
\`Invariant violation: ${fieldName} must belong to the same ${ownershipFieldName} as the record. \` +
|
|
1121
|
+
\`Expected \${dataOwnerValue}, got \${refOwnerValue}\`
|
|
1122
|
+
);
|
|
1123
|
+
}
|
|
1124
|
+
|
|
1125
|
+
return true;
|
|
1126
|
+
}` };
|
|
1127
|
+
}
|
|
1128
|
+
case "cross_table_equality": {
|
|
1129
|
+
const { fieldName, referencedTable, referencedField, equalityFieldName, referencedEqualityFieldName } = logic;
|
|
1130
|
+
return { code: `/**
|
|
1131
|
+
* Guard: ${description}
|
|
1132
|
+
*
|
|
1133
|
+
* @param data - The record being created/updated
|
|
1134
|
+
* @param db - MongoDB database instance
|
|
1135
|
+
* @returns true if invariant is satisfied, throws error otherwise
|
|
1136
|
+
*/
|
|
1137
|
+
export async function ${id}Guard(
|
|
1138
|
+
data: { ${fieldName}: string; ${equalityFieldName}: string },
|
|
1139
|
+
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1140
|
+
): Promise<boolean> {
|
|
1141
|
+
if (!data.${fieldName}) {
|
|
1142
|
+
throw new Error(\`Invariant violation: ${fieldName} is required\`);
|
|
1143
|
+
}
|
|
1144
|
+
|
|
1145
|
+
const referenced = await db.collection("${referencedTable}").findOne({
|
|
1146
|
+
${referencedField}: data.${fieldName}
|
|
1147
|
+
});
|
|
1148
|
+
|
|
1149
|
+
if (!referenced) {
|
|
1150
|
+
throw new Error(\`Invariant violation: Referenced record not found in ${referencedTable}\`);
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
if (referenced.${referencedEqualityFieldName} !== data.${equalityFieldName}) {
|
|
1154
|
+
throw new Error(
|
|
1155
|
+
\`Invariant violation: ${equalityFieldName} must equal ${referencedTable}.${referencedEqualityFieldName}. \` +
|
|
1156
|
+
\`Expected \${referenced.${referencedEqualityFieldName}}, got \${data.${equalityFieldName}}\`
|
|
1157
|
+
);
|
|
1158
|
+
}
|
|
1159
|
+
|
|
1160
|
+
return true;
|
|
1161
|
+
}` };
|
|
1162
|
+
}
|
|
1163
|
+
case "raw": return { code: `/**
|
|
1164
|
+
* Guard: ${description}
|
|
1165
|
+
*
|
|
1166
|
+
* TODO: Implement this guard based on the invariant description.
|
|
1167
|
+
* This is a placeholder - you must implement the actual validation logic.
|
|
1168
|
+
*
|
|
1169
|
+
* @param data - The record being created/updated
|
|
1170
|
+
* @param db - MongoDB database instance
|
|
1171
|
+
* @returns true if invariant is satisfied, throws error otherwise
|
|
1172
|
+
*/
|
|
1173
|
+
export async function ${id}Guard(
|
|
1174
|
+
data: any,
|
|
1175
|
+
db: { collection(name: string): { findOne(filter: any): Promise<any | null> } }
|
|
1176
|
+
): Promise<boolean> {
|
|
1177
|
+
// TODO: Implement invariant: ${description}
|
|
1178
|
+
console.warn("Guard ${id}Guard is not yet implemented");
|
|
1179
|
+
return true;
|
|
1180
|
+
}` };
|
|
1181
|
+
default: return null;
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
|
|
1185
|
+
//#endregion
|
|
1186
|
+
//#region src/generators/mongodb.ts
|
|
1187
|
+
const generateMongoDBSchema = async ({ options, file, adapter }) => {
|
|
1188
|
+
const tables = getPaymentTables$1(options);
|
|
1189
|
+
const filePath = file || "./mongodb-schema.ts";
|
|
1190
|
+
const invariants = normalizeInvariants(tables, {
|
|
1191
|
+
getModelName: initGetModelName({
|
|
1192
|
+
schema: tables,
|
|
1193
|
+
usePlural: adapter?.options?.adapterConfig?.usePlural
|
|
1194
|
+
}),
|
|
1195
|
+
getFieldName: initGetFieldName({
|
|
1196
|
+
schema: tables,
|
|
1197
|
+
usePlural: false
|
|
1198
|
+
})
|
|
1199
|
+
});
|
|
1200
|
+
const validators = emitMongoValidators(invariants);
|
|
1201
|
+
const validatorsJson = JSON.stringify(validators, null, 2);
|
|
1202
|
+
const guards = emitMongoGuards(invariants);
|
|
1203
|
+
const validatorsPath = filePath.replace(/\.ts$/, "-validators.json");
|
|
1204
|
+
const validatorsDir = path.dirname(path.resolve(process.cwd(), validatorsPath));
|
|
1205
|
+
await fs$1.mkdir(validatorsDir, { recursive: true });
|
|
1206
|
+
await fs$1.writeFile(path.resolve(process.cwd(), validatorsPath), validatorsJson);
|
|
1207
|
+
console.log(`📝 Generated MongoDB validators: ${validatorsPath}`);
|
|
1208
|
+
const guardsPath = filePath.replace(/\.ts$/, "-guards.ts");
|
|
1209
|
+
const guardsDir = path.dirname(path.resolve(process.cwd(), guardsPath));
|
|
1210
|
+
await fs$1.mkdir(guardsDir, { recursive: true });
|
|
1211
|
+
const formattedGuards = await prettier.format(guards, { parser: "typescript" });
|
|
1212
|
+
await fs$1.writeFile(path.resolve(process.cwd(), guardsPath), formattedGuards);
|
|
1213
|
+
console.log(`📝 Generated MongoDB guards: ${guardsPath}`);
|
|
1214
|
+
const validatorsBaseName = path.basename(validatorsPath, ".json");
|
|
1215
|
+
const guardsBaseName = path.basename(guardsPath, ".ts");
|
|
1216
|
+
const schemaCode = `// MongoDB schema setup
|
|
1217
|
+
// Generated from schema definitions
|
|
1218
|
+
// DO NOT EDIT MANUALLY - This file is auto-generated
|
|
1219
|
+
|
|
1220
|
+
import validators from "./${validatorsBaseName}.json";
|
|
1221
|
+
import * as guards from "./${guardsBaseName}";
|
|
1222
|
+
|
|
1223
|
+
/**
|
|
1224
|
+
* MongoDB collection validators.
|
|
1225
|
+
* Apply these using db.createCollection() or db.command({ collMod: ... })
|
|
1226
|
+
*
|
|
1227
|
+
* Example:
|
|
1228
|
+
* \`\`\`
|
|
1229
|
+
* await db.createCollection("checkout_session", {
|
|
1230
|
+
* validator: validators.checkout_session
|
|
1231
|
+
* });
|
|
1232
|
+
* \`\`\`
|
|
1233
|
+
*/
|
|
1234
|
+
export { validators };
|
|
1235
|
+
|
|
1236
|
+
/**
|
|
1237
|
+
* MongoDB invariant guards.
|
|
1238
|
+
* Call these before write operations to enforce invariants at the application level.
|
|
1239
|
+
*
|
|
1240
|
+
* Example:
|
|
1241
|
+
* \`\`\`
|
|
1242
|
+
* import { customer_payment_method_ownershipGuard } from "./${guardsBaseName}";
|
|
1243
|
+
*
|
|
1244
|
+
* await customer_payment_method_ownershipGuard(data, db);
|
|
1245
|
+
* await db.collection("customer").insertOne(data);
|
|
1246
|
+
* \`\`\`
|
|
1247
|
+
*/
|
|
1248
|
+
export { guards };
|
|
1249
|
+
`;
|
|
1250
|
+
return {
|
|
1251
|
+
code: await prettier.format(schemaCode, { parser: "typescript" }),
|
|
1252
|
+
fileName: filePath,
|
|
1253
|
+
overwrite: true
|
|
1254
|
+
};
|
|
1255
|
+
};
|
|
1256
|
+
|
|
601
1257
|
//#endregion
|
|
602
1258
|
//#region src/generators/index.ts
|
|
603
1259
|
const adapters = {
|
|
604
1260
|
prisma: generatePrismaSchema,
|
|
605
1261
|
drizzle: generateDrizzleSchema,
|
|
606
|
-
kysely: generateKyselySchema
|
|
1262
|
+
kysely: generateKyselySchema,
|
|
1263
|
+
mongodb: generateMongoDBSchema
|
|
607
1264
|
};
|
|
608
1265
|
const generateSchema = async (opts) => {
|
|
609
1266
|
const adapter = opts.adapter;
|
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
import { i as getPackageInfo, n as generateSchema } from "./generators-
|
|
2
|
+
import { i as getPackageInfo, n as generateSchema } from "./generators-DXOqBlgH.mjs";
|
|
3
3
|
import { Command } from "commander";
|
|
4
4
|
import fs, { existsSync, readFileSync } from "node:fs";
|
|
5
5
|
import fs$1 from "node:fs/promises";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pecunia-cli",
|
|
3
|
-
"version": "0.1
|
|
3
|
+
"version": "0.2.1",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"module": "dist/index.mjs",
|
|
6
6
|
"main": "./dist/index.mjs",
|
|
@@ -64,8 +64,8 @@
|
|
|
64
64
|
"dotenv": "^17.2.2",
|
|
65
65
|
"drizzle-orm": "^0.33.0",
|
|
66
66
|
"open": "^10.2.0",
|
|
67
|
-
"pecunia-core": "^0.1.
|
|
68
|
-
"pecunia-root": "^0.2.
|
|
67
|
+
"pecunia-core": "^0.1.3",
|
|
68
|
+
"pecunia-root": "^0.2.2",
|
|
69
69
|
"pg": "^8.16.3",
|
|
70
70
|
"prettier": "^3.6.2",
|
|
71
71
|
"prompts": "^2.4.2",
|