@c15t/cli 1.2.0-beta.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.mjs +1058 -0
- package/package.json +53 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,1058 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { Command } from 'commander';
|
|
3
|
+
import fs$1, { existsSync } from 'node:fs';
|
|
4
|
+
import fs from 'node:fs/promises';
|
|
5
|
+
import path from 'node:path';
|
|
6
|
+
import { logger } from '@c15t/backend';
|
|
7
|
+
import { getConsentTables, getMigrations, getAdapter } from '@c15t/backend/db';
|
|
8
|
+
import chalk from 'chalk';
|
|
9
|
+
import prompts from 'prompts';
|
|
10
|
+
import yoctoSpinner from 'yocto-spinner';
|
|
11
|
+
import { z } from 'zod';
|
|
12
|
+
import { produceSchema } from '@mrleebo/prisma-ast';
|
|
13
|
+
import babelPresetReact from '@babel/preset-react';
|
|
14
|
+
import babelPresetTypescript from '@babel/preset-typescript';
|
|
15
|
+
import { C15TError } from '@c15t/backend/error';
|
|
16
|
+
import { loadConfig } from 'c12';
|
|
17
|
+
import 'dotenv/config';
|
|
18
|
+
import Crypto from 'node:crypto';
|
|
19
|
+
import fs$2 from 'fs-extra';
|
|
20
|
+
|
|
21
|
+
function convertToSnakeCase(str) {
|
|
22
|
+
if (str === void 0 || str === null) {
|
|
23
|
+
return "";
|
|
24
|
+
}
|
|
25
|
+
return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
|
|
26
|
+
}
|
|
27
|
+
const generateDrizzleSchema = async ({
|
|
28
|
+
options,
|
|
29
|
+
file,
|
|
30
|
+
adapter
|
|
31
|
+
}) => {
|
|
32
|
+
const tables = getConsentTables(options);
|
|
33
|
+
const filePath = file || "./auth-schema.ts";
|
|
34
|
+
const databaseType = adapter.options?.provider;
|
|
35
|
+
const usePlural = adapter.options?.usePlural;
|
|
36
|
+
const timestampAndBoolean = databaseType !== "sqlite" ? "timestamp, boolean" : "";
|
|
37
|
+
const int = databaseType === "mysql" ? "int" : "integer";
|
|
38
|
+
const hasBigint = Object.values(tables).some(
|
|
39
|
+
(table) => Object.values(table.fields).some(
|
|
40
|
+
(field) => "bigint" in field && field.bigint
|
|
41
|
+
)
|
|
42
|
+
);
|
|
43
|
+
const bigint = databaseType !== "sqlite" ? "bigint" : "";
|
|
44
|
+
const text = databaseType === "mysql" ? "varchar, text" : "text";
|
|
45
|
+
const jsonType = ["mysql", "pg"].includes(databaseType || "") ? ", json" : "";
|
|
46
|
+
let code = `import { ${databaseType}Table, ${text}, ${int}${hasBigint ? `, ${bigint}` : ""}, ${timestampAndBoolean}${jsonType} } from "drizzle-orm/${databaseType}-core";`;
|
|
47
|
+
const fileExist = existsSync(filePath);
|
|
48
|
+
let isFirstTable = true;
|
|
49
|
+
for (const table in tables) {
|
|
50
|
+
if (Object.prototype.hasOwnProperty.call(tables, table)) {
|
|
51
|
+
let getMySQLStringType = function(field, name) {
|
|
52
|
+
if (field.unique) {
|
|
53
|
+
return `varchar('${name}', { length: 255 })`;
|
|
54
|
+
}
|
|
55
|
+
if (field.references) {
|
|
56
|
+
return `varchar('${name}', { length: 36 })`;
|
|
57
|
+
}
|
|
58
|
+
return `text('${name}')`;
|
|
59
|
+
}, getType = function(fieldName, field) {
|
|
60
|
+
const snakeCaseName = convertToSnakeCase(fieldName);
|
|
61
|
+
const type = field.type;
|
|
62
|
+
const typeMap = {
|
|
63
|
+
string: {
|
|
64
|
+
sqlite: `text('${snakeCaseName}')`,
|
|
65
|
+
pg: `text('${snakeCaseName}')`,
|
|
66
|
+
mysql: getMySQLStringType(field, snakeCaseName)
|
|
67
|
+
},
|
|
68
|
+
boolean: {
|
|
69
|
+
sqlite: `integer('${snakeCaseName}', { mode: 'boolean' })`,
|
|
70
|
+
pg: `boolean('${snakeCaseName}')`,
|
|
71
|
+
mysql: `boolean('${snakeCaseName}')`
|
|
72
|
+
},
|
|
73
|
+
number: {
|
|
74
|
+
sqlite: `integer('${snakeCaseName}')`,
|
|
75
|
+
pg: "bigint" in field && field.bigint ? `bigint('${snakeCaseName}', { mode: 'number' })` : `integer('${snakeCaseName}')`,
|
|
76
|
+
mysql: "bigint" in field && field.bigint ? `bigint('${snakeCaseName}', { mode: 'number' })` : `int('${snakeCaseName}')`
|
|
77
|
+
},
|
|
78
|
+
date: {
|
|
79
|
+
sqlite: `integer('${snakeCaseName}', { mode: 'timestamp' })`,
|
|
80
|
+
pg: `timestamp('${snakeCaseName}')`,
|
|
81
|
+
mysql: `timestamp('${snakeCaseName}')`
|
|
82
|
+
},
|
|
83
|
+
// Add JSON type support
|
|
84
|
+
json: {
|
|
85
|
+
sqlite: `text('${snakeCaseName}')`,
|
|
86
|
+
// SQLite uses TEXT for JSON
|
|
87
|
+
pg: `json('${snakeCaseName}')`,
|
|
88
|
+
// PostgreSQL native JSON
|
|
89
|
+
mysql: `json('${snakeCaseName}')`
|
|
90
|
+
// MySQL native JSON
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
if (!typeMap[type]) {
|
|
94
|
+
return `text('${snakeCaseName}')`;
|
|
95
|
+
}
|
|
96
|
+
const dbType = databaseType && ["sqlite", "pg", "mysql"].includes(databaseType) ? databaseType : "sqlite";
|
|
97
|
+
return typeMap[type][dbType];
|
|
98
|
+
};
|
|
99
|
+
let modelName = usePlural ? `${tables[table].modelName}s` : tables[table].modelName;
|
|
100
|
+
if (!modelName) {
|
|
101
|
+
modelName = table;
|
|
102
|
+
}
|
|
103
|
+
const fields = tables[table].fields;
|
|
104
|
+
const id = databaseType === "mysql" ? `varchar("id", { length: 36 }).primaryKey()` : `text("id").primaryKey()`;
|
|
105
|
+
const tableNameForSQL = convertToSnakeCase(modelName);
|
|
106
|
+
if (isFirstTable) {
|
|
107
|
+
code += "\n\n";
|
|
108
|
+
isFirstTable = false;
|
|
109
|
+
} else {
|
|
110
|
+
code += "\n\n";
|
|
111
|
+
}
|
|
112
|
+
const schema = `export const ${modelName} = ${databaseType}Table("${tableNameForSQL}", {
|
|
113
|
+
id: ${id},
|
|
114
|
+
${Object.keys(fields).map((field) => {
|
|
115
|
+
if (Object.prototype.hasOwnProperty.call(fields, field)) {
|
|
116
|
+
const attr = fields[field];
|
|
117
|
+
return ` ${field}: ${getType(field, attr)}${attr.required ? ".notNull()" : ""}${attr.unique ? ".unique()" : ""}${attr.references ? `.references(()=> ${usePlural ? `${attr.references.model}s` : attr.references.model}.${attr.references.field}, { onDelete: 'cascade' })` : ""}`;
|
|
118
|
+
}
|
|
119
|
+
return "";
|
|
120
|
+
}).filter(Boolean).join(",\n")}
|
|
121
|
+
});`;
|
|
122
|
+
code += schema;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
code,
|
|
127
|
+
fileName: filePath,
|
|
128
|
+
overwrite: fileExist
|
|
129
|
+
};
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
const CREATE_TABLE_REGEX = /create\s+table\s+"([^"]+)"\s+\((.*)\)/i;
|
|
133
|
+
const CREATE_INDEX_REGEX = /create\s+index\s+"?([^"\s]+)"?\s+on\s+"?([^"\s]+)"?/i;
|
|
134
|
+
const NOT_NULL_REGEX = /\bnot null\b/gi;
|
|
135
|
+
const PRIMARY_KEY_REGEX = /\bprimary key\b/gi;
|
|
136
|
+
const REFERENCES_REGEX = /\breferences\b/gi;
|
|
137
|
+
const UNIQUE_REGEX = /\bunique\b/gi;
|
|
138
|
+
const CREATE_TABLE_KEYWORD_REGEX = /\bcreate\s+table\b/gi;
|
|
139
|
+
const CREATE_INDEX_KEYWORD_REGEX = /\bcreate\s+index\b/gi;
|
|
140
|
+
const ALTER_TABLE_REGEX = /\balter\s+table\b/gi;
|
|
141
|
+
const INSERT_INTO_REGEX = /\binsert\s+into\b/gi;
|
|
142
|
+
const UPDATE_REGEX = /\bupdate\b/gi;
|
|
143
|
+
const DELETE_FROM_REGEX = /\bdelete\s+from\b/gi;
|
|
144
|
+
const SELECT_REGEX = /\bselect\b/gi;
|
|
145
|
+
const FROM_REGEX = /\bfrom\b/gi;
|
|
146
|
+
const WHERE_REGEX = /\bwhere\b/gi;
|
|
147
|
+
const JOIN_REGEX = /\bjoin\b/gi;
|
|
148
|
+
const ON_REGEX = /\bon\b/gi;
|
|
149
|
+
const AND_REGEX = /\band\b/gi;
|
|
150
|
+
const OR_REGEX = /\bor\b/gi;
|
|
151
|
+
const BOOLEAN_FIELD_REGEX = /("is[A-Z][a-zA-Z0-9]*")\s+integer/g;
|
|
152
|
+
const DATE_FIELD_REGEX = /("(?:created|updated|expires)At")\s+date/gi;
|
|
153
|
+
const TEXT_FIELD_REGEX = /("(?:name|code|description|id)")\s+text/gi;
|
|
154
|
+
const JSON_FIELD_REGEX = /("(?:metadata|config|data|settings|options|preferences|attributes)")\s+text/gi;
|
|
155
|
+
function formatSQL(sql, databaseType = "sqlite", options) {
|
|
156
|
+
const dbType = databaseType === "pg" ? "postgresql" : databaseType;
|
|
157
|
+
const statements = sql.split(";").filter((stmt) => stmt.trim());
|
|
158
|
+
const rollbackStatements = [];
|
|
159
|
+
const formattedStatements = statements.map((statement) => {
|
|
160
|
+
const trimmedStmt = statement.trim().toLowerCase();
|
|
161
|
+
if (trimmedStmt.startsWith("create table")) {
|
|
162
|
+
const match = statement.match(CREATE_TABLE_REGEX);
|
|
163
|
+
if (match) {
|
|
164
|
+
const [_, tableName, columnsStr] = match;
|
|
165
|
+
rollbackStatements.unshift(`DROP TABLE IF EXISTS "${tableName}"`);
|
|
166
|
+
const columns = columnsStr.split(",").map((col) => col.trim());
|
|
167
|
+
const formattedColumns = columns.map((col) => {
|
|
168
|
+
let formattedCol = col.replace(NOT_NULL_REGEX, "NOT NULL").replace(PRIMARY_KEY_REGEX, "PRIMARY KEY").replace(REFERENCES_REGEX, "REFERENCES").replace(UNIQUE_REGEX, "UNIQUE");
|
|
169
|
+
if (dbType === "postgresql") {
|
|
170
|
+
formattedCol = formattedCol.replace(BOOLEAN_FIELD_REGEX, "$1 boolean").replace(DATE_FIELD_REGEX, "$1 timestamp with time zone").replace(TEXT_FIELD_REGEX, "$1 varchar(255)").replace(JSON_FIELD_REGEX, "$1 jsonb");
|
|
171
|
+
} else if (dbType === "mysql") {
|
|
172
|
+
formattedCol = formattedCol.replace(BOOLEAN_FIELD_REGEX, "$1 TINYINT(1)").replace(DATE_FIELD_REGEX, "$1 DATETIME").replace(TEXT_FIELD_REGEX, "$1 VARCHAR(255)").replace(JSON_FIELD_REGEX, "$1 JSON");
|
|
173
|
+
} else if (dbType === "sqlite") {
|
|
174
|
+
formattedCol = formattedCol.replace(
|
|
175
|
+
JSON_FIELD_REGEX,
|
|
176
|
+
"$1 text -- stored as JSON"
|
|
177
|
+
);
|
|
178
|
+
}
|
|
179
|
+
return formattedCol;
|
|
180
|
+
}).map((col) => ` ${col}`).join(",\n");
|
|
181
|
+
return `CREATE TABLE IF NOT EXISTS "${tableName}" (
|
|
182
|
+
${formattedColumns}
|
|
183
|
+
);`;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (trimmedStmt.startsWith("create index")) {
|
|
187
|
+
const indexMatch = statement.match(CREATE_INDEX_REGEX);
|
|
188
|
+
if (indexMatch) {
|
|
189
|
+
const [_, indexName] = indexMatch;
|
|
190
|
+
rollbackStatements.unshift(`DROP INDEX IF EXISTS "${indexName}"`);
|
|
191
|
+
return `CREATE INDEX IF NOT EXISTS "${indexName}" ${statement.substring(statement.toLowerCase().indexOf("on")).trim()};`;
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
return `${statement.trim().replace(CREATE_TABLE_KEYWORD_REGEX, "CREATE TABLE").replace(CREATE_INDEX_KEYWORD_REGEX, "CREATE INDEX").replace(ALTER_TABLE_REGEX, "ALTER TABLE").replace(INSERT_INTO_REGEX, "INSERT INTO").replace(UPDATE_REGEX, "UPDATE").replace(DELETE_FROM_REGEX, "DELETE FROM").replace(SELECT_REGEX, "SELECT").replace(FROM_REGEX, "FROM").replace(WHERE_REGEX, "WHERE").replace(JOIN_REGEX, "JOIN").replace(ON_REGEX, "ON").replace(AND_REGEX, "AND").replace(OR_REGEX, "OR")};`;
|
|
195
|
+
}).join("\n\n");
|
|
196
|
+
const useTransactions = dbType !== "d1";
|
|
197
|
+
const transactionStart = useTransactions ? dbType === "mysql" ? "START TRANSACTION;" : "BEGIN;" : "";
|
|
198
|
+
const transactionEnd = useTransactions ? "COMMIT;" : "";
|
|
199
|
+
const timestamp = options?.timestamp || (/* @__PURE__ */ new Date()).toISOString();
|
|
200
|
+
return `-- Migration generated by C15T (${timestamp})
|
|
201
|
+
-- Database type: ${dbType}
|
|
202
|
+
-- Description: Automatically generated schema migration
|
|
203
|
+
--
|
|
204
|
+
-- Wrapped in a transaction for atomicity
|
|
205
|
+
-- To roll back this migration, use the ROLLBACK section below
|
|
206
|
+
|
|
207
|
+
${transactionStart}
|
|
208
|
+
-- MIGRATION
|
|
209
|
+
${formattedStatements}
|
|
210
|
+
${transactionEnd}
|
|
211
|
+
|
|
212
|
+
-- ROLLBACK
|
|
213
|
+
-- Uncomment the section below to roll back this migration
|
|
214
|
+
/*
|
|
215
|
+
${transactionStart}
|
|
216
|
+
|
|
217
|
+
${rollbackStatements.join(";\n\n")};
|
|
218
|
+
|
|
219
|
+
${transactionEnd}
|
|
220
|
+
*/`;
|
|
221
|
+
}
|
|
222
|
+
const generateMigrations = async ({
|
|
223
|
+
options,
|
|
224
|
+
file,
|
|
225
|
+
adapter
|
|
226
|
+
}) => {
|
|
227
|
+
const { compileMigrations } = await getMigrations(options);
|
|
228
|
+
const migrations = await compileMigrations();
|
|
229
|
+
let databaseType = "sqlite";
|
|
230
|
+
if (adapter?.options?.provider) {
|
|
231
|
+
databaseType = adapter.options.provider;
|
|
232
|
+
} else if (options.database && "options" in options.database && options.database.options && typeof options.database.options === "object" && "provider" in options.database.options) {
|
|
233
|
+
databaseType = options.database.options.provider;
|
|
234
|
+
}
|
|
235
|
+
const isTest = process.env.NODE_ENV === "test" || file?.includes("test");
|
|
236
|
+
const testTimestamp = options?._testTimestamp;
|
|
237
|
+
const formatOptions = {
|
|
238
|
+
timestamp: testTimestamp || (isTest ? "2023-01-01T00:00:00.000Z" : void 0)
|
|
239
|
+
};
|
|
240
|
+
const formattedMigrations = formatSQL(
|
|
241
|
+
migrations,
|
|
242
|
+
databaseType,
|
|
243
|
+
formatOptions
|
|
244
|
+
);
|
|
245
|
+
const generatedFileName = file || `./c15t_migrations/${Date.now()}_create_tables.sql`;
|
|
246
|
+
return {
|
|
247
|
+
code: formattedMigrations,
|
|
248
|
+
fileName: generatedFileName
|
|
249
|
+
};
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
function capitalizeFirstLetter(str) {
|
|
253
|
+
return str.charAt(0).toUpperCase() + str.slice(1);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
const generatePrismaSchema = async ({
|
|
257
|
+
adapter,
|
|
258
|
+
options,
|
|
259
|
+
file
|
|
260
|
+
}) => {
|
|
261
|
+
const provider = adapter.options?.provider || "postgresql";
|
|
262
|
+
const tables = getConsentTables(options);
|
|
263
|
+
const filePath = file || "./prisma/schema.prisma";
|
|
264
|
+
const schemaPrismaExist = existsSync(path.join(process.cwd(), filePath));
|
|
265
|
+
let schemaPrisma = "";
|
|
266
|
+
if (schemaPrismaExist) {
|
|
267
|
+
schemaPrisma = await fs.readFile(
|
|
268
|
+
path.join(process.cwd(), filePath),
|
|
269
|
+
"utf-8"
|
|
270
|
+
);
|
|
271
|
+
} else {
|
|
272
|
+
schemaPrisma = getNewPrisma(provider);
|
|
273
|
+
}
|
|
274
|
+
const manyToManyRelations = /* @__PURE__ */ new Map();
|
|
275
|
+
for (const table in tables) {
|
|
276
|
+
if (Object.hasOwn(tables, table)) {
|
|
277
|
+
const fields = tables[table]?.fields;
|
|
278
|
+
for (const field in fields) {
|
|
279
|
+
if (Object.hasOwn(fields, field)) {
|
|
280
|
+
const attr = fields[field];
|
|
281
|
+
if (attr?.references) {
|
|
282
|
+
const referencedModel = capitalizeFirstLetter(
|
|
283
|
+
attr.references.model
|
|
284
|
+
);
|
|
285
|
+
if (!manyToManyRelations.has(referencedModel)) {
|
|
286
|
+
manyToManyRelations.set(referencedModel, /* @__PURE__ */ new Set());
|
|
287
|
+
}
|
|
288
|
+
manyToManyRelations.get(referencedModel).add(capitalizeFirstLetter(table));
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
const schema = produceSchema(schemaPrisma, (builder) => {
|
|
295
|
+
function getPrismaType(type, isOptional, isBigint) {
|
|
296
|
+
const isJsonField = type === "json" || type === "jsonb";
|
|
297
|
+
if (isJsonField) {
|
|
298
|
+
if (provider === "postgresql" || provider === "mysql") {
|
|
299
|
+
return isOptional ? "Json?" : "Json";
|
|
300
|
+
}
|
|
301
|
+
return isOptional ? "String?" : 'String @map("json_as_text")';
|
|
302
|
+
}
|
|
303
|
+
if (type === "string") {
|
|
304
|
+
return isOptional ? "String?" : "String";
|
|
305
|
+
}
|
|
306
|
+
if (type === "number" && isBigint) {
|
|
307
|
+
return isOptional ? "BigInt?" : "BigInt";
|
|
308
|
+
}
|
|
309
|
+
if (type === "number") {
|
|
310
|
+
return isOptional ? "Int?" : "Int";
|
|
311
|
+
}
|
|
312
|
+
if (type === "boolean") {
|
|
313
|
+
return isOptional ? "Boolean?" : "Boolean";
|
|
314
|
+
}
|
|
315
|
+
if (type === "date") {
|
|
316
|
+
return isOptional ? "DateTime?" : "DateTime";
|
|
317
|
+
}
|
|
318
|
+
if (type === "string[]") {
|
|
319
|
+
return "String[]";
|
|
320
|
+
}
|
|
321
|
+
if (type === "number[]") {
|
|
322
|
+
return "Int[]";
|
|
323
|
+
}
|
|
324
|
+
return "String";
|
|
325
|
+
}
|
|
326
|
+
for (const table in tables) {
|
|
327
|
+
if (Object.hasOwn(tables, table)) {
|
|
328
|
+
const fields = tables[table]?.fields;
|
|
329
|
+
const originalTable = tables[table]?.modelName;
|
|
330
|
+
const modelName = capitalizeFirstLetter(originalTable || table);
|
|
331
|
+
const prismaModel = builder.findByType("model", { name: modelName });
|
|
332
|
+
if (!prismaModel) {
|
|
333
|
+
if (provider === "mongodb") {
|
|
334
|
+
builder.model(modelName).field("id", "String").attribute("id").attribute(`map("_id")`);
|
|
335
|
+
} else {
|
|
336
|
+
builder.model(modelName).field("id", "String").attribute("id");
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
for (const field in fields) {
|
|
340
|
+
if (Object.hasOwn(fields, field)) {
|
|
341
|
+
const attr = fields[field];
|
|
342
|
+
const existingField = builder.findByType("field", {
|
|
343
|
+
name: field,
|
|
344
|
+
within: prismaModel?.properties
|
|
345
|
+
});
|
|
346
|
+
if (existingField) {
|
|
347
|
+
continue;
|
|
348
|
+
}
|
|
349
|
+
builder.model(modelName).field(
|
|
350
|
+
field,
|
|
351
|
+
getPrismaType(attr.type, !attr?.required, attr?.bigint || false)
|
|
352
|
+
);
|
|
353
|
+
if (attr.unique) {
|
|
354
|
+
builder.model(modelName).blockAttribute(`unique([${field}])`);
|
|
355
|
+
}
|
|
356
|
+
if (attr.references) {
|
|
357
|
+
builder.model(modelName).field(
|
|
358
|
+
`${attr.references.model.toLowerCase()}`,
|
|
359
|
+
capitalizeFirstLetter(attr.references.model)
|
|
360
|
+
).attribute(
|
|
361
|
+
`relation(fields: [${field}], references: [${attr.references.field}], onDelete: Cascade)`
|
|
362
|
+
);
|
|
363
|
+
}
|
|
364
|
+
if (!attr.unique && !attr.references && provider === "mysql" && attr.type === "string") {
|
|
365
|
+
builder.model(modelName).field(field).attribute("db.Text");
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
if (originalTable && originalTable !== modelName) {
|
|
370
|
+
const hasMapAttribute = builder.findByType("attribute", {
|
|
371
|
+
name: "map",
|
|
372
|
+
within: prismaModel?.properties
|
|
373
|
+
});
|
|
374
|
+
if (!hasMapAttribute) {
|
|
375
|
+
builder.model(modelName).blockAttribute("map", originalTable);
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
for (const [
|
|
381
|
+
referencedModel,
|
|
382
|
+
relatedModels
|
|
383
|
+
] of manyToManyRelations.entries()) {
|
|
384
|
+
for (const relatedModel of relatedModels) {
|
|
385
|
+
const fieldName = `${relatedModel.toLowerCase()}s`;
|
|
386
|
+
const model = builder.findByType("model", { name: referencedModel });
|
|
387
|
+
if (model) {
|
|
388
|
+
const existingField = builder.findByType("field", {
|
|
389
|
+
name: fieldName,
|
|
390
|
+
within: model.properties
|
|
391
|
+
});
|
|
392
|
+
if (!existingField) {
|
|
393
|
+
builder.model(referencedModel).field(fieldName, `${relatedModel}[]`);
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
}
|
|
397
|
+
}
|
|
398
|
+
});
|
|
399
|
+
return {
|
|
400
|
+
code: schema.trim() === schemaPrisma.trim() ? "" : schema,
|
|
401
|
+
fileName: filePath
|
|
402
|
+
};
|
|
403
|
+
};
|
|
404
|
+
const getNewPrisma = (provider) => `generator client {
|
|
405
|
+
provider = "prisma-client-js"
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
datasource db {
|
|
409
|
+
provider = "${provider}"
|
|
410
|
+
url = ${provider === "sqlite" ? `"file:./dev.db"` : `env("DATABASE_URL")`}
|
|
411
|
+
}`;
|
|
412
|
+
|
|
413
|
+
const adapters = {
|
|
414
|
+
prisma: generatePrismaSchema,
|
|
415
|
+
drizzle: generateDrizzleSchema,
|
|
416
|
+
kysely: generateMigrations
|
|
417
|
+
};
|
|
418
|
+
const getGenerator = (opts) => {
|
|
419
|
+
const adapter = opts.adapter;
|
|
420
|
+
const generator = adapter.id in adapters ? adapters[adapter.id] : null;
|
|
421
|
+
if (!generator) {
|
|
422
|
+
logger.error(`${adapter.id} is not supported.`);
|
|
423
|
+
process.exit(1);
|
|
424
|
+
}
|
|
425
|
+
return generator(opts);
|
|
426
|
+
};
|
|
427
|
+
|
|
428
|
+
function addSvelteKitEnvModules(aliases) {
|
|
429
|
+
aliases["$env/dynamic/private"] = createDataUriModule(
|
|
430
|
+
createDynamicEnvModule()
|
|
431
|
+
);
|
|
432
|
+
aliases["$env/dynamic/public"] = createDataUriModule(
|
|
433
|
+
createDynamicEnvModule()
|
|
434
|
+
);
|
|
435
|
+
aliases["$env/static/private"] = createDataUriModule(
|
|
436
|
+
createStaticEnvModule(filterPrivateEnv("PUBLIC_", ""))
|
|
437
|
+
);
|
|
438
|
+
aliases["$env/static/public"] = createDataUriModule(
|
|
439
|
+
createStaticEnvModule(filterPublicEnv("PUBLIC_", ""))
|
|
440
|
+
);
|
|
441
|
+
}
|
|
442
|
+
function createDataUriModule(module) {
|
|
443
|
+
return `data:text/javascript;charset=utf-8,${encodeURIComponent(module)}`;
|
|
444
|
+
}
|
|
445
|
+
function createStaticEnvModule(env) {
|
|
446
|
+
const declarations = Object.keys(env).filter((k) => validIdentifier.test(k) && !reserved.has(k)).map((k) => `export const ${k} = ${JSON.stringify(env[k])};`);
|
|
447
|
+
return `
|
|
448
|
+
${declarations.join("\n")}
|
|
449
|
+
// jiti dirty hack: .unknown
|
|
450
|
+
`;
|
|
451
|
+
}
|
|
452
|
+
function createDynamicEnvModule() {
|
|
453
|
+
return `
|
|
454
|
+
export const env = process.env;
|
|
455
|
+
// jiti dirty hack: .unknown
|
|
456
|
+
`;
|
|
457
|
+
}
|
|
458
|
+
function filterPrivateEnv(publicPrefix, privatePrefix) {
|
|
459
|
+
return Object.fromEntries(
|
|
460
|
+
Object.entries(process.env).filter(
|
|
461
|
+
([k]) => k.startsWith(privatePrefix) && (!k.startsWith(publicPrefix))
|
|
462
|
+
)
|
|
463
|
+
);
|
|
464
|
+
}
|
|
465
|
+
function filterPublicEnv(publicPrefix, privatePrefix) {
|
|
466
|
+
return Object.fromEntries(
|
|
467
|
+
Object.entries(process.env).filter(
|
|
468
|
+
([k]) => k.startsWith(publicPrefix) && (privatePrefix === "")
|
|
469
|
+
)
|
|
470
|
+
);
|
|
471
|
+
}
|
|
472
|
+
const validIdentifier = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/;
|
|
473
|
+
const reserved = /* @__PURE__ */ new Set([
|
|
474
|
+
"do",
|
|
475
|
+
"if",
|
|
476
|
+
"in",
|
|
477
|
+
"for",
|
|
478
|
+
"let",
|
|
479
|
+
"new",
|
|
480
|
+
"try",
|
|
481
|
+
"var",
|
|
482
|
+
"case",
|
|
483
|
+
"else",
|
|
484
|
+
"enum",
|
|
485
|
+
"eval",
|
|
486
|
+
"null",
|
|
487
|
+
"this",
|
|
488
|
+
"true",
|
|
489
|
+
"void",
|
|
490
|
+
"with",
|
|
491
|
+
"await",
|
|
492
|
+
"break",
|
|
493
|
+
"catch",
|
|
494
|
+
"class",
|
|
495
|
+
"const",
|
|
496
|
+
"false",
|
|
497
|
+
"super",
|
|
498
|
+
"throw",
|
|
499
|
+
"while",
|
|
500
|
+
"yield",
|
|
501
|
+
"delete",
|
|
502
|
+
"export",
|
|
503
|
+
"import",
|
|
504
|
+
"public",
|
|
505
|
+
"return",
|
|
506
|
+
"static",
|
|
507
|
+
"switch",
|
|
508
|
+
"typeof",
|
|
509
|
+
"default",
|
|
510
|
+
"extends",
|
|
511
|
+
"finally",
|
|
512
|
+
"package",
|
|
513
|
+
"private",
|
|
514
|
+
"continue",
|
|
515
|
+
"debugger",
|
|
516
|
+
"function",
|
|
517
|
+
"arguments",
|
|
518
|
+
"interface",
|
|
519
|
+
"protected",
|
|
520
|
+
"implements",
|
|
521
|
+
"instanceof"
|
|
522
|
+
]);
|
|
523
|
+
|
|
524
|
+
const configFileNames = ["c15t", "consent", "cmp"];
|
|
525
|
+
const extensions = [
|
|
526
|
+
".js",
|
|
527
|
+
".jsx",
|
|
528
|
+
".ts",
|
|
529
|
+
".tsx",
|
|
530
|
+
".cjs",
|
|
531
|
+
".cts",
|
|
532
|
+
".mjs",
|
|
533
|
+
".mts",
|
|
534
|
+
".server.cjs",
|
|
535
|
+
".server.cts",
|
|
536
|
+
".server.js",
|
|
537
|
+
".server.jsx",
|
|
538
|
+
".server.mjs",
|
|
539
|
+
".server.mts",
|
|
540
|
+
".server.ts",
|
|
541
|
+
".server.tsx"
|
|
542
|
+
];
|
|
543
|
+
let possiblePaths = configFileNames.flatMap(
|
|
544
|
+
(name) => extensions.map((ext) => `${name}${ext}`)
|
|
545
|
+
);
|
|
546
|
+
const directories = [
|
|
547
|
+
"",
|
|
548
|
+
"lib/server/",
|
|
549
|
+
"server/",
|
|
550
|
+
"lib/",
|
|
551
|
+
"utils/",
|
|
552
|
+
"config/",
|
|
553
|
+
"src/",
|
|
554
|
+
"app/"
|
|
555
|
+
];
|
|
556
|
+
possiblePaths = directories.flatMap(
|
|
557
|
+
(dir) => possiblePaths.map((file) => `${dir}${file}`)
|
|
558
|
+
);
|
|
559
|
+
const monorepoSubdirs = ["packages/*", "apps/*"];
|
|
560
|
+
function stripJsonComments(jsonString) {
|
|
561
|
+
return jsonString.replace(
|
|
562
|
+
/\\"|"(?:\\"|[^"])*"|(\/\/.*|\/\*[\s\S]*?\*\/)/g,
|
|
563
|
+
(m, g) => g ? "" : m
|
|
564
|
+
).replace(/,(?=\s*[}\]])/g, "");
|
|
565
|
+
}
|
|
566
|
+
function getPathAliases(cwd) {
|
|
567
|
+
const tsConfigPath = path.join(cwd, "tsconfig.json");
|
|
568
|
+
if (!fs$1.existsSync(tsConfigPath)) {
|
|
569
|
+
const jsConfigPath = path.join(cwd, "jsconfig.json");
|
|
570
|
+
if (!fs$1.existsSync(jsConfigPath)) {
|
|
571
|
+
return null;
|
|
572
|
+
}
|
|
573
|
+
return extractAliasesFromConfigFile(jsConfigPath, cwd);
|
|
574
|
+
}
|
|
575
|
+
return extractAliasesFromConfigFile(tsConfigPath, cwd);
|
|
576
|
+
}
|
|
577
|
+
function extractAliasesFromConfigFile(configPath, cwd) {
|
|
578
|
+
try {
|
|
579
|
+
const configContent = fs$1.readFileSync(configPath, "utf8");
|
|
580
|
+
const strippedConfigContent = stripJsonComments(configContent);
|
|
581
|
+
const config = JSON.parse(strippedConfigContent);
|
|
582
|
+
const { paths = {}, baseUrl = "." } = config.compilerOptions || {};
|
|
583
|
+
const result = {};
|
|
584
|
+
const obj = Object.entries(paths);
|
|
585
|
+
for (const [alias, aliasPaths] of obj) {
|
|
586
|
+
for (const aliasedPath of aliasPaths) {
|
|
587
|
+
const resolvedBaseUrl = path.join(cwd, baseUrl);
|
|
588
|
+
const finalAlias = alias.slice(-1) === "*" ? alias.slice(0, -1) : alias;
|
|
589
|
+
const finalAliasedPath = aliasedPath.slice(-1) === "*" ? aliasedPath.slice(0, -1) : aliasedPath;
|
|
590
|
+
result[finalAlias || ""] = path.join(resolvedBaseUrl, finalAliasedPath);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
addSvelteKitEnvModules(result);
|
|
594
|
+
return result;
|
|
595
|
+
} catch (error) {
|
|
596
|
+
logger.warn(`Error parsing config file ${configPath}`, error);
|
|
597
|
+
return null;
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
const jitiOptions = (cwd) => {
|
|
601
|
+
const alias = getPathAliases(cwd) || {};
|
|
602
|
+
return {
|
|
603
|
+
transformOptions: {
|
|
604
|
+
babel: {
|
|
605
|
+
presets: [
|
|
606
|
+
[
|
|
607
|
+
babelPresetTypescript,
|
|
608
|
+
{
|
|
609
|
+
isTSX: true,
|
|
610
|
+
allExtensions: true
|
|
611
|
+
}
|
|
612
|
+
],
|
|
613
|
+
[babelPresetReact, { runtime: "automatic" }]
|
|
614
|
+
]
|
|
615
|
+
}
|
|
616
|
+
},
|
|
617
|
+
extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs", ".mts", ".cts"],
|
|
618
|
+
alias
|
|
619
|
+
};
|
|
620
|
+
};
|
|
621
|
+
function extractOptionsFromConfig(config) {
|
|
622
|
+
if (config.c15t && typeof config.c15t === "function") {
|
|
623
|
+
return config.c15t();
|
|
624
|
+
}
|
|
625
|
+
if (config.default && typeof config.default === "function") {
|
|
626
|
+
return config.default();
|
|
627
|
+
}
|
|
628
|
+
if (config.c15tInstance && typeof config.c15tInstance === "function") {
|
|
629
|
+
return config.c15tInstance();
|
|
630
|
+
}
|
|
631
|
+
if (config.consent && typeof config.consent === "function") {
|
|
632
|
+
return config.consent();
|
|
633
|
+
}
|
|
634
|
+
return config.c15t?.options || config.default?.options || config.c15tInstance?.options || config.instance?.options || config.consent?.options || config.config?.options || // Also check for direct exports of options objects
|
|
635
|
+
(config.default && typeof config.default === "object" && "appName" in config.default ? config.default : null) || // Finally check for direct exports of the instance
|
|
636
|
+
(config.c15t && typeof config.c15t === "object" && "appName" in config.c15t ? config.c15t : null) || null;
|
|
637
|
+
}
|
|
638
|
+
function findDirectories(cwd, patterns) {
|
|
639
|
+
const results = [];
|
|
640
|
+
for (const pattern of patterns) {
|
|
641
|
+
if (pattern.includes("*")) {
|
|
642
|
+
const [prefix, _] = pattern.split("*");
|
|
643
|
+
const basePath = path.join(cwd, prefix);
|
|
644
|
+
try {
|
|
645
|
+
if (fs$1.existsSync(basePath)) {
|
|
646
|
+
const entries = fs$1.readdirSync(basePath, { withFileTypes: true });
|
|
647
|
+
for (const entry of entries) {
|
|
648
|
+
if (entry.isDirectory()) {
|
|
649
|
+
results.push(path.join(prefix, entry.name));
|
|
650
|
+
}
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
} catch {
|
|
654
|
+
}
|
|
655
|
+
} else if (fs$1.existsSync(path.join(cwd, pattern)) && fs$1.statSync(path.join(cwd, pattern)).isDirectory()) {
|
|
656
|
+
results.push(pattern);
|
|
657
|
+
}
|
|
658
|
+
}
|
|
659
|
+
return results;
|
|
660
|
+
}
|
|
661
|
+
function validateConfig(config) {
|
|
662
|
+
if (!config) {
|
|
663
|
+
return false;
|
|
664
|
+
}
|
|
665
|
+
return typeof config === "object";
|
|
666
|
+
}
|
|
667
|
+
async function getConfig({
|
|
668
|
+
cwd,
|
|
669
|
+
configPath
|
|
670
|
+
}) {
|
|
671
|
+
const foundPaths = [];
|
|
672
|
+
const failedImports = [];
|
|
673
|
+
try {
|
|
674
|
+
let configFile = null;
|
|
675
|
+
if (configPath) {
|
|
676
|
+
const resolvedPath = path.join(cwd, configPath);
|
|
677
|
+
try {
|
|
678
|
+
if (!fs$1.existsSync(resolvedPath)) {
|
|
679
|
+
throw new C15TError(
|
|
680
|
+
`Configuration file not found: ${resolvedPath}
|
|
681
|
+
Make sure the path is correct and the file exists.`
|
|
682
|
+
);
|
|
683
|
+
}
|
|
684
|
+
foundPaths.push(resolvedPath);
|
|
685
|
+
const { config } = await loadConfig({
|
|
686
|
+
configFile: resolvedPath,
|
|
687
|
+
dotenv: true,
|
|
688
|
+
jitiOptions: jitiOptions(cwd)
|
|
689
|
+
});
|
|
690
|
+
configFile = extractOptionsFromConfig(config);
|
|
691
|
+
if (!configFile) {
|
|
692
|
+
throw new C15TError(
|
|
693
|
+
// biome-ignore lint/style/useTemplate: keep it split so its easier to read
|
|
694
|
+
`Found config file at ${resolvedPath} but couldn't extract c15t options.
|
|
695
|
+
Make sure you're exporting c15t with one of these patterns:
|
|
696
|
+
- export const c15t = c15tInstance({...})
|
|
697
|
+
- export const consent = c15tInstance({...})
|
|
698
|
+
- export const c15tInstance = c15tInstance({...})
|
|
699
|
+
- export default c15tInstance({...})`
|
|
700
|
+
);
|
|
701
|
+
}
|
|
702
|
+
} catch (e) {
|
|
703
|
+
if (fs$1.existsSync(resolvedPath)) {
|
|
704
|
+
failedImports.push(resolvedPath);
|
|
705
|
+
if (e instanceof C15TError) {
|
|
706
|
+
throw e;
|
|
707
|
+
}
|
|
708
|
+
throw new C15TError(
|
|
709
|
+
// biome-ignore lint/style/useTemplate: keep it split so its easier to read
|
|
710
|
+
`Config file found at ${resolvedPath} but failed to load.
|
|
711
|
+
This usually happens because of import problems:
|
|
712
|
+
- Check for invalid import paths
|
|
713
|
+
- Ensure all dependencies are installed
|
|
714
|
+
- Verify path aliases in tsconfig.json
|
|
715
|
+
|
|
716
|
+
Error details: ${e instanceof Error ? e.message : String(e)}`
|
|
717
|
+
);
|
|
718
|
+
}
|
|
719
|
+
throw e;
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
if (!configFile) {
|
|
723
|
+
const searchDirs = [""];
|
|
724
|
+
searchDirs.push(...findDirectories(cwd, monorepoSubdirs));
|
|
725
|
+
for (const dir of searchDirs) {
|
|
726
|
+
for (const possiblePath of possiblePaths) {
|
|
727
|
+
const configPath2 = path.join(dir, possiblePath);
|
|
728
|
+
const fullPath = path.join(cwd, configPath2);
|
|
729
|
+
if (!fs$1.existsSync(fullPath)) {
|
|
730
|
+
continue;
|
|
731
|
+
}
|
|
732
|
+
foundPaths.push(fullPath);
|
|
733
|
+
try {
|
|
734
|
+
const { config } = await loadConfig({
|
|
735
|
+
configFile: configPath2,
|
|
736
|
+
jitiOptions: jitiOptions(cwd)
|
|
737
|
+
});
|
|
738
|
+
if (Object.keys(config).length > 0) {
|
|
739
|
+
configFile = extractOptionsFromConfig(config);
|
|
740
|
+
if (configFile && validateConfig(configFile)) {
|
|
741
|
+
logger.info(`\u2705 Using c15t config from ${fullPath}`);
|
|
742
|
+
break;
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
} catch (e) {
|
|
746
|
+
if (typeof e === "object" && e && "message" in e && typeof e.message === "string" && e.message.includes(
|
|
747
|
+
"This module cannot be imported from a Client Component module"
|
|
748
|
+
)) {
|
|
749
|
+
throw new C15TError(
|
|
750
|
+
// biome-ignore lint/style/useTemplate: keep it split so its easier to read
|
|
751
|
+
`Found config file at ${fullPath}, but it imports 'server-only'.
|
|
752
|
+
Please temporarily remove the 'server-only' import while using the CLI,
|
|
753
|
+
and you can add it back afterwards.`
|
|
754
|
+
);
|
|
755
|
+
}
|
|
756
|
+
failedImports.push(fullPath);
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
if (configFile) {
|
|
760
|
+
break;
|
|
761
|
+
}
|
|
762
|
+
}
|
|
763
|
+
}
|
|
764
|
+
if (!configFile) {
|
|
765
|
+
if (foundPaths.length > 0) {
|
|
766
|
+
logger.error(
|
|
767
|
+
`\u274C Found ${foundPaths.length} potential config files, but couldn't load any of them:`
|
|
768
|
+
);
|
|
769
|
+
for (const filePath of foundPaths.slice(0, 3)) {
|
|
770
|
+
logger.error(` - ${filePath}`);
|
|
771
|
+
}
|
|
772
|
+
if (foundPaths.length > 3) {
|
|
773
|
+
logger.error(` - ...and ${foundPaths.length - 3} more`);
|
|
774
|
+
}
|
|
775
|
+
if (failedImports.length > 0) {
|
|
776
|
+
logger.error("\n\u2753 Common issues that prevent loading config files:");
|
|
777
|
+
logger.error(" - Missing dependencies (check your package.json)");
|
|
778
|
+
logger.error(
|
|
779
|
+
" - Import path issues (check your import statements)"
|
|
780
|
+
);
|
|
781
|
+
logger.error(
|
|
782
|
+
" - Path alias configuration (check your tsconfig.json)"
|
|
783
|
+
);
|
|
784
|
+
logger.error(
|
|
785
|
+
" - Export format (make sure you're exporting c15t, c15tInstance, consent, or default)"
|
|
786
|
+
);
|
|
787
|
+
}
|
|
788
|
+
throw new C15TError("Unable to load any c15t configuration file");
|
|
789
|
+
}
|
|
790
|
+
logger.error(
|
|
791
|
+
"\u274C No c15t configuration files found in standard locations"
|
|
792
|
+
);
|
|
793
|
+
logger.info("\n\u{1F4DD} Create a c15t.ts file with your configuration:");
|
|
794
|
+
logger.info(`
|
|
795
|
+
import { c15tInstance } from '@c15t/backend';
|
|
796
|
+
|
|
797
|
+
export const c15t = c15tInstance({
|
|
798
|
+
appName: 'My App',
|
|
799
|
+
basePath: '/api/c15t',
|
|
800
|
+
// Add your configuration here
|
|
801
|
+
});
|
|
802
|
+
`);
|
|
803
|
+
throw new C15TError(
|
|
804
|
+
"No c15t config file found. Create a c15t.ts file or specify with --config"
|
|
805
|
+
);
|
|
806
|
+
}
|
|
807
|
+
return configFile;
|
|
808
|
+
} catch (e) {
|
|
809
|
+
if (typeof e === "object" && e && "message" in e && typeof e.message === "string" && e.message.includes(
|
|
810
|
+
"This module cannot be imported from a Client Component module"
|
|
811
|
+
)) {
|
|
812
|
+
logger.error(
|
|
813
|
+
"\u274C Server-only import detected in config file\nPlease temporarily remove the 'server-only' import while using the CLI,\nand you can add it back afterwards."
|
|
814
|
+
);
|
|
815
|
+
process.exit(1);
|
|
816
|
+
}
|
|
817
|
+
if (e instanceof C15TError) {
|
|
818
|
+
logger.error(`\u274C ${e.message}`);
|
|
819
|
+
} else {
|
|
820
|
+
logger.error(`\u274C Couldn't read your c15t configuration`);
|
|
821
|
+
logger.error(` Error: ${e instanceof Error ? e.message : String(e)}`);
|
|
822
|
+
}
|
|
823
|
+
if (failedImports.length > 0) {
|
|
824
|
+
logger.info(
|
|
825
|
+
"\n\u{1F4A1} Tip: If you're having import issues, try running with verbose logging:"
|
|
826
|
+
);
|
|
827
|
+
logger.info(" DEBUG=c15t* npx c15t@latest <command>");
|
|
828
|
+
}
|
|
829
|
+
process.exit(1);
|
|
830
|
+
}
|
|
831
|
+
}
|
|
832
|
+
|
|
833
|
+
async function generateAction(opts) {
|
|
834
|
+
const options = z.object({
|
|
835
|
+
cwd: z.string(),
|
|
836
|
+
config: z.string().optional(),
|
|
837
|
+
output: z.string().optional(),
|
|
838
|
+
y: z.boolean().optional()
|
|
839
|
+
}).parse(opts);
|
|
840
|
+
const cwd = path.resolve(options.cwd);
|
|
841
|
+
if (!existsSync(cwd)) {
|
|
842
|
+
logger.error(`The directory "${cwd}" does not exist.`);
|
|
843
|
+
process.exit(1);
|
|
844
|
+
}
|
|
845
|
+
const config = await getConfig({
|
|
846
|
+
cwd,
|
|
847
|
+
configPath: options.config
|
|
848
|
+
});
|
|
849
|
+
if (!config) {
|
|
850
|
+
logger.error(
|
|
851
|
+
"No configuration file found. Add a `c15t.ts` file to your project or pass the path to the configuration file using the `--config` flag."
|
|
852
|
+
);
|
|
853
|
+
return;
|
|
854
|
+
}
|
|
855
|
+
const adapter = await getAdapter(config).catch((e) => {
|
|
856
|
+
logger.error(e.message);
|
|
857
|
+
process.exit(1);
|
|
858
|
+
});
|
|
859
|
+
const spinner = yoctoSpinner({ text: "preparing schema..." }).start();
|
|
860
|
+
const schema = await getGenerator({
|
|
861
|
+
adapter,
|
|
862
|
+
file: options.output,
|
|
863
|
+
options: config
|
|
864
|
+
});
|
|
865
|
+
spinner.stop();
|
|
866
|
+
if (!schema.code) {
|
|
867
|
+
logger.info("Your schema is already up to date.");
|
|
868
|
+
process.exit(0);
|
|
869
|
+
}
|
|
870
|
+
if (schema.append || schema.overwrite) {
|
|
871
|
+
let confirm2 = options.y;
|
|
872
|
+
if (!confirm2) {
|
|
873
|
+
const response = await prompts({
|
|
874
|
+
type: "confirm",
|
|
875
|
+
name: "confirm",
|
|
876
|
+
message: `The file ${schema.fileName} already exists. Do you want to ${chalk.yellow(
|
|
877
|
+
`${schema.overwrite ? "overwrite" : "append"}`
|
|
878
|
+
)} the schema to the file?`
|
|
879
|
+
});
|
|
880
|
+
confirm2 = response.confirm;
|
|
881
|
+
}
|
|
882
|
+
if (confirm2) {
|
|
883
|
+
const exist = existsSync(path.join(cwd, schema.fileName));
|
|
884
|
+
if (!exist) {
|
|
885
|
+
await fs.mkdir(path.dirname(path.join(cwd, schema.fileName)), {
|
|
886
|
+
recursive: true
|
|
887
|
+
});
|
|
888
|
+
}
|
|
889
|
+
if (schema.overwrite) {
|
|
890
|
+
await fs.writeFile(path.join(cwd, schema.fileName), schema.code);
|
|
891
|
+
} else {
|
|
892
|
+
await fs.appendFile(path.join(cwd, schema.fileName), schema.code);
|
|
893
|
+
}
|
|
894
|
+
logger.success(
|
|
895
|
+
`\u{1F680} Schema was ${schema.overwrite ? "overwritten" : "appended"} successfully!`
|
|
896
|
+
);
|
|
897
|
+
process.exit(0);
|
|
898
|
+
} else {
|
|
899
|
+
logger.error("Schema generation aborted.");
|
|
900
|
+
process.exit(1);
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
let confirm = options.y;
|
|
904
|
+
if (!confirm) {
|
|
905
|
+
const response = await prompts({
|
|
906
|
+
type: "confirm",
|
|
907
|
+
name: "confirm",
|
|
908
|
+
message: `Do you want to generate the schema to ${chalk.yellow(
|
|
909
|
+
schema.fileName
|
|
910
|
+
)}?`
|
|
911
|
+
});
|
|
912
|
+
confirm = response.confirm;
|
|
913
|
+
}
|
|
914
|
+
if (!confirm) {
|
|
915
|
+
logger.error("Schema generation aborted.");
|
|
916
|
+
process.exit(1);
|
|
917
|
+
}
|
|
918
|
+
if (!options.output) {
|
|
919
|
+
const dirExist = existsSync(path.dirname(path.join(cwd, schema.fileName)));
|
|
920
|
+
if (!dirExist) {
|
|
921
|
+
await fs.mkdir(path.dirname(path.join(cwd, schema.fileName)), {
|
|
922
|
+
recursive: true
|
|
923
|
+
});
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
await fs.writeFile(
|
|
927
|
+
options.output || path.join(cwd, schema.fileName),
|
|
928
|
+
schema.code
|
|
929
|
+
);
|
|
930
|
+
logger.success("\u{1F680} Schema was generated successfully!");
|
|
931
|
+
process.exit(0);
|
|
932
|
+
}
|
|
933
|
+
const generate = new Command("generate").option(
|
|
934
|
+
"-c, --cwd <cwd>",
|
|
935
|
+
"the working directory. defaults to the current directory.",
|
|
936
|
+
process.cwd()
|
|
937
|
+
).option(
|
|
938
|
+
"--config <config>",
|
|
939
|
+
"the path to the configuration file. defaults to the first configuration file found."
|
|
940
|
+
).option("--output <output>", "the file to output to the generated schema").option("-y, --y", "automatically answer yes to all prompts", false).action(generateAction);
|
|
941
|
+
|
|
942
|
+
async function migrateAction(opts) {
|
|
943
|
+
const options = z.object({
|
|
944
|
+
cwd: z.string(),
|
|
945
|
+
config: z.string().optional(),
|
|
946
|
+
y: z.boolean().optional()
|
|
947
|
+
}).parse(opts);
|
|
948
|
+
const cwd = path.resolve(options.cwd);
|
|
949
|
+
if (!existsSync(cwd)) {
|
|
950
|
+
logger.error(`The directory "${cwd}" does not exist.`);
|
|
951
|
+
process.exit(1);
|
|
952
|
+
}
|
|
953
|
+
const config = await getConfig({
|
|
954
|
+
cwd,
|
|
955
|
+
configPath: options.config
|
|
956
|
+
});
|
|
957
|
+
if (!config) {
|
|
958
|
+
logger.error(
|
|
959
|
+
"No configuration file found. Add a `c15t.ts` file to your project or pass the path to the configuration file using the `--config` flag."
|
|
960
|
+
);
|
|
961
|
+
return;
|
|
962
|
+
}
|
|
963
|
+
const db = await getAdapter(config);
|
|
964
|
+
if (!db) {
|
|
965
|
+
logger.error(
|
|
966
|
+
"Invalid database configuration. Make sure you're not using adapters. Migrate command only works with built-in Kysely adapter."
|
|
967
|
+
);
|
|
968
|
+
process.exit(1);
|
|
969
|
+
}
|
|
970
|
+
if (db.id !== "kysely") {
|
|
971
|
+
if (db.id === "prisma") {
|
|
972
|
+
logger.error(
|
|
973
|
+
"The migrate command only works with the built-in Kysely adapter. For Prisma, run `npx @c15t/cli generate` to create the schema, then use Prisma\u2019s migrate or push to apply it."
|
|
974
|
+
);
|
|
975
|
+
process.exit(0);
|
|
976
|
+
}
|
|
977
|
+
if (db.id === "drizzle") {
|
|
978
|
+
logger.error(
|
|
979
|
+
"The migrate command only works with the built-in Kysely adapter. For Drizzle, run `npx @c15t/cli generate` to create the schema, then use Drizzle\u2019s migrate or push to apply it."
|
|
980
|
+
);
|
|
981
|
+
process.exit(0);
|
|
982
|
+
}
|
|
983
|
+
logger.error("Migrate command isn't supported for this adapter.");
|
|
984
|
+
process.exit(1);
|
|
985
|
+
}
|
|
986
|
+
const spinner = yoctoSpinner({ text: "preparing migration..." }).start();
|
|
987
|
+
const { toBeAdded, toBeCreated, runMigrations } = await getMigrations(config);
|
|
988
|
+
if (!toBeAdded.length && !toBeCreated.length) {
|
|
989
|
+
spinner.stop();
|
|
990
|
+
logger.info("\u{1F680} No migrations needed.");
|
|
991
|
+
process.exit(0);
|
|
992
|
+
}
|
|
993
|
+
spinner.stop();
|
|
994
|
+
logger.info("\u{1F511} The migration will affect the following:");
|
|
995
|
+
for (const table of [...toBeCreated, ...toBeAdded]) {
|
|
996
|
+
console.log(
|
|
997
|
+
"->",
|
|
998
|
+
chalk.magenta(Object.keys(table.fields).join(", ")),
|
|
999
|
+
chalk.white("fields on"),
|
|
1000
|
+
chalk.yellow(`${table.table}`),
|
|
1001
|
+
chalk.white("table.")
|
|
1002
|
+
);
|
|
1003
|
+
}
|
|
1004
|
+
let migrate2 = options.y;
|
|
1005
|
+
if (!migrate2) {
|
|
1006
|
+
const response = await prompts({
|
|
1007
|
+
type: "confirm",
|
|
1008
|
+
name: "migrate",
|
|
1009
|
+
message: "Are you sure you want to run these migrations?",
|
|
1010
|
+
initial: false
|
|
1011
|
+
});
|
|
1012
|
+
migrate2 = response.migrate;
|
|
1013
|
+
}
|
|
1014
|
+
if (!migrate2) {
|
|
1015
|
+
logger.info("Migration cancelled.");
|
|
1016
|
+
process.exit(0);
|
|
1017
|
+
}
|
|
1018
|
+
spinner?.start("migrating...");
|
|
1019
|
+
await runMigrations();
|
|
1020
|
+
spinner.stop();
|
|
1021
|
+
logger.info("\u{1F680} migration was completed successfully!");
|
|
1022
|
+
process.exit(0);
|
|
1023
|
+
}
|
|
1024
|
+
const migrate = new Command("migrate").option(
|
|
1025
|
+
"-c, --cwd <cwd>",
|
|
1026
|
+
"the working directory. defaults to the current directory.",
|
|
1027
|
+
process.cwd()
|
|
1028
|
+
).option(
|
|
1029
|
+
"--config <config>",
|
|
1030
|
+
"the path to the configuration file. defaults to the first configuration file found."
|
|
1031
|
+
).option(
|
|
1032
|
+
"-y, --y",
|
|
1033
|
+
"automatically accept and run migrations without prompting",
|
|
1034
|
+
false
|
|
1035
|
+
).action(migrateAction);
|
|
1036
|
+
|
|
1037
|
+
const generateSecret = new Command("secret").action(() => {
|
|
1038
|
+
const secret = Crypto.randomBytes(32).toString("hex");
|
|
1039
|
+
logger.info(`
|
|
1040
|
+
Add the following to your .env file:
|
|
1041
|
+
${chalk.gray("# Auth Secret") + chalk.green(`
|
|
1042
|
+
C15T_SECRET=${secret}`)}`);
|
|
1043
|
+
});
|
|
1044
|
+
|
|
1045
|
+
function getPackageInfo() {
|
|
1046
|
+
const packageJsonPath = path.join("package.json");
|
|
1047
|
+
return fs$2.readJSONSync(packageJsonPath);
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
process.on("SIGINT", () => process.exit(0));
|
|
1051
|
+
process.on("SIGTERM", () => process.exit(0));
|
|
1052
|
+
async function main() {
|
|
1053
|
+
const program = new Command("c15t");
|
|
1054
|
+
const packageInfo = await getPackageInfo();
|
|
1055
|
+
program.addCommand(migrate).addCommand(generate).addCommand(generateSecret).version(packageInfo.version || "1.1.2").description("c15t CLI");
|
|
1056
|
+
program.parse();
|
|
1057
|
+
}
|
|
1058
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@c15t/cli",
|
|
3
|
+
"version": "1.2.0-beta.17",
|
|
4
|
+
"description": "The CLI for c15t",
|
|
5
|
+
"exports": "./dist/index.mjs",
|
|
6
|
+
"main": "./dist/index.mjs",
|
|
7
|
+
"module": "dist/index.mjs",
|
|
8
|
+
"bin": "./dist/index.mjs",
|
|
9
|
+
"files": [
|
|
10
|
+
"dist"
|
|
11
|
+
],
|
|
12
|
+
"scripts": {
|
|
13
|
+
"build": "unbuild",
|
|
14
|
+
"fmt": "pnpm biome format --write . && biome check --formatter-enabled=false --linter-enabled=false --organize-imports-enabled=true --write",
|
|
15
|
+
"lint": "pnpm biome lint ./src",
|
|
16
|
+
"start": "node ./dist/index.mjs",
|
|
17
|
+
"stub": "unbuild --stub",
|
|
18
|
+
"test": "vitest"
|
|
19
|
+
},
|
|
20
|
+
"dependencies": {
|
|
21
|
+
"@babel/preset-react": "^7.26.3",
|
|
22
|
+
"@babel/preset-typescript": "^7.26.0",
|
|
23
|
+
"@c15t/backend": "workspace:*",
|
|
24
|
+
"@mrleebo/prisma-ast": "^0.12.1",
|
|
25
|
+
"@prisma/client": "^6.5.0",
|
|
26
|
+
"@types/better-sqlite3": "^7.6.12",
|
|
27
|
+
"@types/prompts": "^2.4.9",
|
|
28
|
+
"better-sqlite3": "^11.8.1",
|
|
29
|
+
"c12": "^3.0.2",
|
|
30
|
+
"chalk": "^5.4.1",
|
|
31
|
+
"commander": "^13.1.0",
|
|
32
|
+
"dotenv": "^16.4.7",
|
|
33
|
+
"drizzle-orm": "^0.40.0",
|
|
34
|
+
"fs-extra": "^11.3.0",
|
|
35
|
+
"get-tsconfig": "^4.10.0",
|
|
36
|
+
"prisma": "^6.4.1",
|
|
37
|
+
"prompts": "^2.4.2",
|
|
38
|
+
"tinyexec": "^0.3.2",
|
|
39
|
+
"unbuild": "^3.5.0",
|
|
40
|
+
"yocto-spinner": "^0.2.1",
|
|
41
|
+
"zod": "^3.24.2"
|
|
42
|
+
},
|
|
43
|
+
"devDependencies": {
|
|
44
|
+
"@types/fs-extra": "^11.0.4",
|
|
45
|
+
"vitest": "^3.0.8"
|
|
46
|
+
},
|
|
47
|
+
"publishConfig": {
|
|
48
|
+
"access": "public",
|
|
49
|
+
"executableFiles": [
|
|
50
|
+
"./dist/index.mjs"
|
|
51
|
+
]
|
|
52
|
+
}
|
|
53
|
+
}
|