@zenstackhq/cli 3.3.3 → 3.4.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2722 -129
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +2702 -109
- package/dist/index.js.map +1 -1
- package/package.json +11 -10
package/dist/index.cjs
CHANGED
|
@@ -29,8 +29,8 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
29
29
|
|
|
30
30
|
// src/index.ts
|
|
31
31
|
var import_config = require("dotenv/config");
|
|
32
|
-
var
|
|
33
|
-
var
|
|
32
|
+
var import_language5 = require("@zenstackhq/language");
|
|
33
|
+
var import_colors12 = __toESM(require("colors"), 1);
|
|
34
34
|
var import_commander = require("commander");
|
|
35
35
|
|
|
36
36
|
// src/actions/check.ts
|
|
@@ -42,6 +42,7 @@ var import_ast = require("@zenstackhq/language/ast");
|
|
|
42
42
|
var import_sdk = require("@zenstackhq/sdk");
|
|
43
43
|
var import_colors = __toESM(require("colors"), 1);
|
|
44
44
|
var import_node_fs = __toESM(require("fs"), 1);
|
|
45
|
+
var import_node_module = require("module");
|
|
45
46
|
var import_node_path = __toESM(require("path"), 1);
|
|
46
47
|
|
|
47
48
|
// src/cli-error.ts
|
|
@@ -83,8 +84,10 @@ function getSchemaFile(file) {
|
|
|
83
84
|
}
|
|
84
85
|
}
|
|
85
86
|
__name(getSchemaFile, "getSchemaFile");
|
|
86
|
-
async function loadSchemaDocument(schemaFile) {
|
|
87
|
-
const
|
|
87
|
+
async function loadSchemaDocument(schemaFile, opts = {}) {
|
|
88
|
+
const returnServices = opts.returnServices ?? false;
|
|
89
|
+
const mergeImports = opts.mergeImports ?? true;
|
|
90
|
+
const loadResult = await (0, import_language.loadDocument)(schemaFile, [], mergeImports);
|
|
88
91
|
if (!loadResult.success) {
|
|
89
92
|
loadResult.errors.forEach((err) => {
|
|
90
93
|
console.error(import_colors.default.red(err));
|
|
@@ -94,6 +97,10 @@ async function loadSchemaDocument(schemaFile) {
|
|
|
94
97
|
loadResult.warnings.forEach((warn) => {
|
|
95
98
|
console.warn(import_colors.default.yellow(warn));
|
|
96
99
|
});
|
|
100
|
+
if (returnServices) return {
|
|
101
|
+
model: loadResult.model,
|
|
102
|
+
services: loadResult.services
|
|
103
|
+
};
|
|
97
104
|
return loadResult.model;
|
|
98
105
|
}
|
|
99
106
|
__name(loadSchemaDocument, "loadSchemaDocument");
|
|
@@ -151,10 +158,10 @@ function findUp(names, cwd = process.cwd(), multiple = false, result = []) {
|
|
|
151
158
|
}
|
|
152
159
|
const target = names.find((name) => import_node_fs.default.existsSync(import_node_path.default.join(cwd, name)));
|
|
153
160
|
if (multiple === false && target) {
|
|
154
|
-
return import_node_path.default.
|
|
161
|
+
return import_node_path.default.resolve(cwd, target);
|
|
155
162
|
}
|
|
156
163
|
if (target) {
|
|
157
|
-
result.push(import_node_path.default.
|
|
164
|
+
result.push(import_node_path.default.resolve(cwd, target));
|
|
158
165
|
}
|
|
159
166
|
const up = import_node_path.default.resolve(cwd, "..");
|
|
160
167
|
if (up === cwd) {
|
|
@@ -183,6 +190,44 @@ function getOutputPath(options, schemaFile) {
|
|
|
183
190
|
}
|
|
184
191
|
}
|
|
185
192
|
__name(getOutputPath, "getOutputPath");
|
|
193
|
+
async function getZenStackPackages(searchPath) {
|
|
194
|
+
const pkgJsonFile = findUp([
|
|
195
|
+
"package.json"
|
|
196
|
+
], searchPath, false);
|
|
197
|
+
if (!pkgJsonFile) {
|
|
198
|
+
return [];
|
|
199
|
+
}
|
|
200
|
+
let pkgJson;
|
|
201
|
+
try {
|
|
202
|
+
pkgJson = JSON.parse(import_node_fs.default.readFileSync(pkgJsonFile, "utf8"));
|
|
203
|
+
} catch {
|
|
204
|
+
return [];
|
|
205
|
+
}
|
|
206
|
+
const packages = Array.from(new Set([
|
|
207
|
+
...Object.keys(pkgJson.dependencies ?? {}),
|
|
208
|
+
...Object.keys(pkgJson.devDependencies ?? {})
|
|
209
|
+
].filter((p) => p.startsWith("@zenstackhq/")))).sort();
|
|
210
|
+
const require2 = (0, import_node_module.createRequire)(pkgJsonFile);
|
|
211
|
+
const result = packages.map((pkg) => {
|
|
212
|
+
try {
|
|
213
|
+
const depPkgJson = require2(`${pkg}/package.json`);
|
|
214
|
+
if (depPkgJson.private) {
|
|
215
|
+
return void 0;
|
|
216
|
+
}
|
|
217
|
+
return {
|
|
218
|
+
pkg,
|
|
219
|
+
version: depPkgJson.version
|
|
220
|
+
};
|
|
221
|
+
} catch {
|
|
222
|
+
return {
|
|
223
|
+
pkg,
|
|
224
|
+
version: void 0
|
|
225
|
+
};
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
return result.filter((p) => !!p);
|
|
229
|
+
}
|
|
230
|
+
__name(getZenStackPackages, "getZenStackPackages");
|
|
186
231
|
|
|
187
232
|
// src/actions/check.ts
|
|
188
233
|
async function run(options) {
|
|
@@ -198,7 +243,12 @@ async function run(options) {
|
|
|
198
243
|
__name(run, "run");
|
|
199
244
|
|
|
200
245
|
// src/actions/db.ts
|
|
246
|
+
var import_language2 = require("@zenstackhq/language");
|
|
247
|
+
var import_ast4 = require("@zenstackhq/language/ast");
|
|
248
|
+
var import_colors4 = __toESM(require("colors"), 1);
|
|
201
249
|
var import_node_fs2 = __toESM(require("fs"), 1);
|
|
250
|
+
var import_node_path2 = __toESM(require("path"), 1);
|
|
251
|
+
var import_ora = __toESM(require("ora"), 1);
|
|
202
252
|
|
|
203
253
|
// src/utils/exec-utils.ts
|
|
204
254
|
var import_child_process = require("child_process");
|
|
@@ -248,12 +298,2156 @@ function execPrisma(args, options) {
|
|
|
248
298
|
}
|
|
249
299
|
__name(execPrisma, "execPrisma");
|
|
250
300
|
|
|
301
|
+
// src/actions/pull/index.ts
|
|
302
|
+
var import_colors3 = __toESM(require("colors"), 1);
|
|
303
|
+
var import_ast3 = require("@zenstackhq/language/ast");
|
|
304
|
+
var import_factory = require("@zenstackhq/language/factory");
|
|
305
|
+
var import_langium = require("langium");
|
|
306
|
+
var import_common_helpers = require("@zenstackhq/common-helpers");
|
|
307
|
+
|
|
308
|
+
// src/actions/pull/utils.ts
|
|
309
|
+
var import_ast2 = require("@zenstackhq/language/ast");
|
|
310
|
+
var import_utils = require("@zenstackhq/language/utils");
|
|
311
|
+
function isDatabaseManagedAttribute(name) {
|
|
312
|
+
return [
|
|
313
|
+
"@relation",
|
|
314
|
+
"@id",
|
|
315
|
+
"@unique"
|
|
316
|
+
].includes(name) || name.startsWith("@db.");
|
|
317
|
+
}
|
|
318
|
+
__name(isDatabaseManagedAttribute, "isDatabaseManagedAttribute");
|
|
319
|
+
function getDatasource(model) {
|
|
320
|
+
const datasource = model.declarations.find((d) => d.$type === "DataSource");
|
|
321
|
+
if (!datasource) {
|
|
322
|
+
throw new CliError("No datasource declaration found in the schema.");
|
|
323
|
+
}
|
|
324
|
+
const urlField = datasource.fields.find((f) => f.name === "url");
|
|
325
|
+
if (!urlField) throw new CliError(`No url field found in the datasource declaration.`);
|
|
326
|
+
let url = (0, import_utils.getStringLiteral)(urlField.value);
|
|
327
|
+
if (!url && (0, import_ast2.isInvocationExpr)(urlField.value)) {
|
|
328
|
+
const envName = (0, import_utils.getStringLiteral)(urlField.value.args[0]?.value);
|
|
329
|
+
if (!envName) {
|
|
330
|
+
throw new CliError("The url field must be a string literal or an env().");
|
|
331
|
+
}
|
|
332
|
+
if (!process.env[envName]) {
|
|
333
|
+
throw new CliError(`Environment variable ${envName} is not set, please set it to the database connection string.`);
|
|
334
|
+
}
|
|
335
|
+
url = process.env[envName];
|
|
336
|
+
}
|
|
337
|
+
if (!url) {
|
|
338
|
+
throw new CliError("The url field must be a string literal or an env().");
|
|
339
|
+
}
|
|
340
|
+
if (url.startsWith("file:")) {
|
|
341
|
+
url = new URL(url, `file:${model.$document.uri.path}`).pathname;
|
|
342
|
+
if (process.platform === "win32" && url[0] === "/") url = url.slice(1);
|
|
343
|
+
}
|
|
344
|
+
const defaultSchemaField = datasource.fields.find((f) => f.name === "defaultSchema");
|
|
345
|
+
const defaultSchema = defaultSchemaField && (0, import_utils.getStringLiteral)(defaultSchemaField.value) || "public";
|
|
346
|
+
const schemasField = datasource.fields.find((f) => f.name === "schemas");
|
|
347
|
+
const schemas = schemasField && (0, import_utils.getLiteralArray)(schemasField.value)?.filter((s) => s !== void 0) || [];
|
|
348
|
+
const provider = (0, import_utils.getStringLiteral)(datasource.fields.find((f) => f.name === "provider")?.value);
|
|
349
|
+
if (!provider) {
|
|
350
|
+
throw new CliError(`Datasource "${datasource.name}" is missing a "provider" field.`);
|
|
351
|
+
}
|
|
352
|
+
return {
|
|
353
|
+
name: datasource.name,
|
|
354
|
+
provider,
|
|
355
|
+
url,
|
|
356
|
+
defaultSchema,
|
|
357
|
+
schemas,
|
|
358
|
+
allSchemas: [
|
|
359
|
+
defaultSchema,
|
|
360
|
+
...schemas
|
|
361
|
+
]
|
|
362
|
+
};
|
|
363
|
+
}
|
|
364
|
+
__name(getDatasource, "getDatasource");
|
|
365
|
+
function getDbName(decl, includeSchema = false) {
|
|
366
|
+
if (!("attributes" in decl)) return decl.name;
|
|
367
|
+
const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@schema");
|
|
368
|
+
let schema = "public";
|
|
369
|
+
if (schemaAttr) {
|
|
370
|
+
const schemaAttrValue = schemaAttr.args[0]?.value;
|
|
371
|
+
if (schemaAttrValue?.$type === "StringLiteral") {
|
|
372
|
+
schema = schemaAttrValue.value;
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
const formatName = /* @__PURE__ */ __name((name) => `${schema && includeSchema ? `${schema}.` : ""}${name}`, "formatName");
|
|
376
|
+
const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@map" || a.decl.ref?.name === "@map");
|
|
377
|
+
if (!nameAttr) return formatName(decl.name);
|
|
378
|
+
const attrValue = nameAttr.args[0]?.value;
|
|
379
|
+
if (attrValue?.$type !== "StringLiteral") return formatName(decl.name);
|
|
380
|
+
return formatName(attrValue.value);
|
|
381
|
+
}
|
|
382
|
+
__name(getDbName, "getDbName");
|
|
383
|
+
function getRelationFkName(decl) {
|
|
384
|
+
const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
|
|
385
|
+
const schemaAttrValue = relationAttr?.args.find((a) => a.name === "map")?.value;
|
|
386
|
+
return schemaAttrValue?.value;
|
|
387
|
+
}
|
|
388
|
+
__name(getRelationFkName, "getRelationFkName");
|
|
389
|
+
function getRelationFieldsKey(decl) {
|
|
390
|
+
const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
|
|
391
|
+
if (!relationAttr) return void 0;
|
|
392
|
+
const fieldsArg = relationAttr.args.find((a) => a.name === "fields")?.value;
|
|
393
|
+
if (!fieldsArg || fieldsArg.$type !== "ArrayExpr") return void 0;
|
|
394
|
+
const fieldNames = fieldsArg.items.filter((item) => item.$type === "ReferenceExpr").map((item) => item.target?.$refText || item.target?.ref?.name).filter((name) => !!name).sort();
|
|
395
|
+
return fieldNames.length > 0 ? fieldNames.join(",") : void 0;
|
|
396
|
+
}
|
|
397
|
+
__name(getRelationFieldsKey, "getRelationFieldsKey");
|
|
398
|
+
function getDeclarationRef(type2, name, services) {
|
|
399
|
+
const node = services.shared.workspace.IndexManager.allElements(type2).find((m) => m.node && getDbName(m.node) === name)?.node;
|
|
400
|
+
if (!node) throw new CliError(`Declaration not found: ${name}`);
|
|
401
|
+
return node;
|
|
402
|
+
}
|
|
403
|
+
__name(getDeclarationRef, "getDeclarationRef");
|
|
404
|
+
function getEnumRef(name, services) {
|
|
405
|
+
return getDeclarationRef("Enum", name, services);
|
|
406
|
+
}
|
|
407
|
+
__name(getEnumRef, "getEnumRef");
|
|
408
|
+
function getAttributeRef(name, services) {
|
|
409
|
+
return getDeclarationRef("Attribute", name, services);
|
|
410
|
+
}
|
|
411
|
+
__name(getAttributeRef, "getAttributeRef");
|
|
412
|
+
function getFunctionRef(name, services) {
|
|
413
|
+
return getDeclarationRef("FunctionDecl", name, services);
|
|
414
|
+
}
|
|
415
|
+
__name(getFunctionRef, "getFunctionRef");
|
|
416
|
+
function normalizeFloatDefault(val) {
|
|
417
|
+
if (/^-?\d+$/.test(val)) {
|
|
418
|
+
return (ab) => ab.NumberLiteral.setValue(val + ".0");
|
|
419
|
+
}
|
|
420
|
+
if (/^-?\d+\.\d+$/.test(val)) {
|
|
421
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
422
|
+
}
|
|
423
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
424
|
+
}
|
|
425
|
+
__name(normalizeFloatDefault, "normalizeFloatDefault");
|
|
426
|
+
function normalizeDecimalDefault(val) {
|
|
427
|
+
if (/^-?\d+$/.test(val)) {
|
|
428
|
+
return (ab) => ab.NumberLiteral.setValue(val + ".00");
|
|
429
|
+
}
|
|
430
|
+
if (/^-?\d+\.\d+$/.test(val)) {
|
|
431
|
+
const [integerPart, fractionalPart] = val.split(".");
|
|
432
|
+
let normalized = fractionalPart.replace(/0+$/, "");
|
|
433
|
+
if (normalized.length < 2) {
|
|
434
|
+
normalized = normalized.padEnd(2, "0");
|
|
435
|
+
}
|
|
436
|
+
return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`);
|
|
437
|
+
}
|
|
438
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
439
|
+
}
|
|
440
|
+
__name(normalizeDecimalDefault, "normalizeDecimalDefault");
|
|
441
|
+
|
|
442
|
+
// src/actions/pull/casing.ts
|
|
443
|
+
function resolveNameCasing(casing, originalName) {
|
|
444
|
+
let name = originalName;
|
|
445
|
+
const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? "_" : "";
|
|
446
|
+
switch (casing) {
|
|
447
|
+
case "pascal":
|
|
448
|
+
name = toPascalCase(originalName);
|
|
449
|
+
break;
|
|
450
|
+
case "camel":
|
|
451
|
+
name = toCamelCase(originalName);
|
|
452
|
+
break;
|
|
453
|
+
case "snake":
|
|
454
|
+
name = toSnakeCase(originalName);
|
|
455
|
+
break;
|
|
456
|
+
}
|
|
457
|
+
return {
|
|
458
|
+
modified: name !== originalName || fieldPrefix !== "",
|
|
459
|
+
name: `${fieldPrefix}${name}`
|
|
460
|
+
};
|
|
461
|
+
}
|
|
462
|
+
__name(resolveNameCasing, "resolveNameCasing");
|
|
463
|
+
function isAllUpperCase(str) {
|
|
464
|
+
return str === str.toUpperCase();
|
|
465
|
+
}
|
|
466
|
+
__name(isAllUpperCase, "isAllUpperCase");
|
|
467
|
+
function toPascalCase(str) {
|
|
468
|
+
if (isAllUpperCase(str)) return str;
|
|
469
|
+
return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase());
|
|
470
|
+
}
|
|
471
|
+
__name(toPascalCase, "toPascalCase");
|
|
472
|
+
function toCamelCase(str) {
|
|
473
|
+
if (isAllUpperCase(str)) return str;
|
|
474
|
+
return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase());
|
|
475
|
+
}
|
|
476
|
+
__name(toCamelCase, "toCamelCase");
|
|
477
|
+
function toSnakeCase(str) {
|
|
478
|
+
if (isAllUpperCase(str)) return str;
|
|
479
|
+
return str.replace(/[- ]+/g, "_").replace(/([a-z0-9])([A-Z])/g, "$1_$2").toLowerCase();
|
|
480
|
+
}
|
|
481
|
+
__name(toSnakeCase, "toSnakeCase");
|
|
482
|
+
|
|
483
|
+
// src/actions/pull/index.ts
|
|
484
|
+
function syncEnums({ dbEnums, model, oldModel, provider, options, services, defaultSchema }) {
|
|
485
|
+
if (provider.isSupportedFeature("NativeEnum")) {
|
|
486
|
+
for (const dbEnum of dbEnums) {
|
|
487
|
+
const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type);
|
|
488
|
+
if (modified) console.log(import_colors3.default.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`));
|
|
489
|
+
const factory = new import_factory.EnumFactory().setName(name);
|
|
490
|
+
if (modified || options.alwaysMap) factory.addAttribute((builder) => builder.setDecl(getAttributeRef("@@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)));
|
|
491
|
+
dbEnum.values.forEach((v) => {
|
|
492
|
+
const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, v);
|
|
493
|
+
factory.addField((builder) => {
|
|
494
|
+
builder.setName(name2);
|
|
495
|
+
if (modified2 || options.alwaysMap) builder.addAttribute((builder2) => builder2.setDecl(getAttributeRef("@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)));
|
|
496
|
+
return builder;
|
|
497
|
+
});
|
|
498
|
+
});
|
|
499
|
+
if (dbEnum.schema_name && dbEnum.schema_name !== "" && dbEnum.schema_name !== defaultSchema) {
|
|
500
|
+
factory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)));
|
|
501
|
+
}
|
|
502
|
+
model.declarations.push(factory.get({
|
|
503
|
+
$container: model
|
|
504
|
+
}));
|
|
505
|
+
}
|
|
506
|
+
} else {
|
|
507
|
+
const dummyBuildReference = /* @__PURE__ */ __name((_node, _property, _refNode, refText) => ({
|
|
508
|
+
$refText: refText
|
|
509
|
+
}), "dummyBuildReference");
|
|
510
|
+
oldModel.declarations.filter((d) => (0, import_ast3.isEnum)(d)).forEach((d) => {
|
|
511
|
+
const copy = import_langium.AstUtils.copyAstNode(d, dummyBuildReference);
|
|
512
|
+
copy.$container = model;
|
|
513
|
+
model.declarations.push(copy);
|
|
514
|
+
});
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
__name(syncEnums, "syncEnums");
|
|
518
|
+
function syncTable({ model, provider, table, services, options, defaultSchema }) {
|
|
519
|
+
const idAttribute = getAttributeRef("@id", services);
|
|
520
|
+
const modelIdAttribute = getAttributeRef("@@id", services);
|
|
521
|
+
const uniqueAttribute = getAttributeRef("@unique", services);
|
|
522
|
+
const modelUniqueAttribute = getAttributeRef("@@unique", services);
|
|
523
|
+
const fieldMapAttribute = getAttributeRef("@map", services);
|
|
524
|
+
const tableMapAttribute = getAttributeRef("@@map", services);
|
|
525
|
+
const modelindexAttribute = getAttributeRef("@@index", services);
|
|
526
|
+
const relations = [];
|
|
527
|
+
const { name, modified } = resolveNameCasing(options.modelCasing, table.name);
|
|
528
|
+
const multiPk = table.columns.filter((c) => c.pk).length > 1;
|
|
529
|
+
const modelFactory = new import_factory.DataModelFactory().setName(name).setIsView(table.type === "view");
|
|
530
|
+
modelFactory.setContainer(model);
|
|
531
|
+
if (modified || options.alwaysMap) {
|
|
532
|
+
modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)));
|
|
533
|
+
}
|
|
534
|
+
const fkGroups = /* @__PURE__ */ new Map();
|
|
535
|
+
table.columns.forEach((column) => {
|
|
536
|
+
if (column.foreign_key_table && column.foreign_key_name) {
|
|
537
|
+
const group = fkGroups.get(column.foreign_key_name) ?? [];
|
|
538
|
+
group.push(column);
|
|
539
|
+
fkGroups.set(column.foreign_key_name, group);
|
|
540
|
+
}
|
|
541
|
+
});
|
|
542
|
+
for (const [fkName, fkColumns] of fkGroups) {
|
|
543
|
+
const firstCol = fkColumns[0];
|
|
544
|
+
const isSingleColumnPk = fkColumns.length === 1 && !multiPk && firstCol.pk;
|
|
545
|
+
const isUniqueRelation = fkColumns.length === 1 && firstCol.unique || isSingleColumnPk;
|
|
546
|
+
relations.push({
|
|
547
|
+
schema: table.schema,
|
|
548
|
+
table: table.name,
|
|
549
|
+
columns: fkColumns.map((c) => c.name),
|
|
550
|
+
type: "one",
|
|
551
|
+
fk_name: fkName,
|
|
552
|
+
foreign_key_on_delete: firstCol.foreign_key_on_delete,
|
|
553
|
+
foreign_key_on_update: firstCol.foreign_key_on_update,
|
|
554
|
+
nullable: firstCol.nullable,
|
|
555
|
+
references: {
|
|
556
|
+
schema: firstCol.foreign_key_schema,
|
|
557
|
+
table: firstCol.foreign_key_table,
|
|
558
|
+
columns: fkColumns.map((c) => c.foreign_key_column),
|
|
559
|
+
type: isUniqueRelation ? "one" : "many"
|
|
560
|
+
}
|
|
561
|
+
});
|
|
562
|
+
}
|
|
563
|
+
table.columns.forEach((column) => {
|
|
564
|
+
const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, column.name);
|
|
565
|
+
const builtinType = provider.getBuiltinType(column.datatype);
|
|
566
|
+
modelFactory.addField((builder) => {
|
|
567
|
+
builder.setName(name2);
|
|
568
|
+
builder.setType((typeBuilder) => {
|
|
569
|
+
typeBuilder.setArray(builtinType.isArray);
|
|
570
|
+
typeBuilder.setOptional(builtinType.isArray ? false : column.nullable);
|
|
571
|
+
if (column.computed) {
|
|
572
|
+
typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
|
|
573
|
+
} else if (column.datatype === "enum") {
|
|
574
|
+
const ref = model.declarations.find((d) => (0, import_ast3.isEnum)(d) && getDbName(d) === column.datatype_name);
|
|
575
|
+
if (!ref) {
|
|
576
|
+
throw new CliError(`Enum ${column.datatype_name} not found`);
|
|
577
|
+
}
|
|
578
|
+
typeBuilder.setReference(ref);
|
|
579
|
+
} else {
|
|
580
|
+
if (builtinType.type !== "Unsupported") {
|
|
581
|
+
typeBuilder.setType(builtinType.type);
|
|
582
|
+
} else {
|
|
583
|
+
typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
return typeBuilder;
|
|
587
|
+
});
|
|
588
|
+
if (column.pk && !multiPk) {
|
|
589
|
+
builder.addAttribute((b) => b.setDecl(idAttribute));
|
|
590
|
+
}
|
|
591
|
+
const fieldAttrs = provider.getFieldAttributes({
|
|
592
|
+
fieldName: column.name,
|
|
593
|
+
fieldType: builtinType.type,
|
|
594
|
+
datatype: column.datatype,
|
|
595
|
+
length: column.length,
|
|
596
|
+
precision: column.precision,
|
|
597
|
+
services
|
|
598
|
+
});
|
|
599
|
+
fieldAttrs.forEach(builder.addAttribute.bind(builder));
|
|
600
|
+
if (column.default && !column.computed) {
|
|
601
|
+
const defaultExprBuilder = provider.getDefaultValue({
|
|
602
|
+
fieldType: builtinType.type,
|
|
603
|
+
datatype: column.datatype,
|
|
604
|
+
datatype_name: column.datatype_name,
|
|
605
|
+
defaultValue: column.default,
|
|
606
|
+
services,
|
|
607
|
+
enums: model.declarations.filter((d) => d.$type === "Enum")
|
|
608
|
+
});
|
|
609
|
+
if (defaultExprBuilder) {
|
|
610
|
+
const defaultAttr = new import_factory.DataFieldAttributeFactory().setDecl(getAttributeRef("@default", services)).addArg(defaultExprBuilder);
|
|
611
|
+
builder.addAttribute(defaultAttr);
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
if (column.unique && !column.pk) {
|
|
615
|
+
builder.addAttribute((b) => {
|
|
616
|
+
b.setDecl(uniqueAttribute);
|
|
617
|
+
const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name;
|
|
618
|
+
if (!isDefaultName) {
|
|
619
|
+
b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name), "map");
|
|
620
|
+
}
|
|
621
|
+
return b;
|
|
622
|
+
});
|
|
623
|
+
}
|
|
624
|
+
if (modified2 || options.alwaysMap) {
|
|
625
|
+
builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab2) => ab2.StringLiteral.setValue(column.name)));
|
|
626
|
+
}
|
|
627
|
+
return builder;
|
|
628
|
+
});
|
|
629
|
+
});
|
|
630
|
+
const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name);
|
|
631
|
+
if (multiPk) {
|
|
632
|
+
modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => {
|
|
633
|
+
const arrayExpr = argBuilder.ArrayExpr;
|
|
634
|
+
pkColumns.forEach((c) => {
|
|
635
|
+
const ref = modelFactory.node.fields.find((f) => getDbName(f) === c);
|
|
636
|
+
if (!ref) {
|
|
637
|
+
throw new CliError(`Field ${c} not found`);
|
|
638
|
+
}
|
|
639
|
+
arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref));
|
|
640
|
+
});
|
|
641
|
+
return arrayExpr;
|
|
642
|
+
}));
|
|
643
|
+
}
|
|
644
|
+
const hasUniqueConstraint = table.columns.some((c) => c.unique || c.pk) || table.indexes.some((i) => i.unique);
|
|
645
|
+
if (!hasUniqueConstraint) {
|
|
646
|
+
modelFactory.addAttribute((a) => a.setDecl(getAttributeRef("@@ignore", services)));
|
|
647
|
+
modelFactory.addComment("/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.");
|
|
648
|
+
}
|
|
649
|
+
const sortedIndexes = table.indexes.reverse().sort((a, b) => {
|
|
650
|
+
if (a.unique && !b.unique) return -1;
|
|
651
|
+
if (!a.unique && b.unique) return 1;
|
|
652
|
+
return 0;
|
|
653
|
+
});
|
|
654
|
+
sortedIndexes.forEach((index) => {
|
|
655
|
+
if (index.predicate) {
|
|
656
|
+
console.warn(import_colors3.default.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
|
|
657
|
+
- Model: "${table.name}", constraint: "${index.name}"`));
|
|
658
|
+
return;
|
|
659
|
+
}
|
|
660
|
+
if (index.columns.find((c) => c.expression)) {
|
|
661
|
+
console.warn(import_colors3.default.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
|
|
662
|
+
- Model: "${table.name}", constraint: "${index.name}"`));
|
|
663
|
+
return;
|
|
664
|
+
}
|
|
665
|
+
if (index.primary) {
|
|
666
|
+
return;
|
|
667
|
+
}
|
|
668
|
+
if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) {
|
|
669
|
+
return;
|
|
670
|
+
}
|
|
671
|
+
modelFactory.addAttribute((builder) => {
|
|
672
|
+
const attr = builder.setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute).addArg((argBuilder) => {
|
|
673
|
+
const arrayExpr = argBuilder.ArrayExpr;
|
|
674
|
+
index.columns.forEach((c) => {
|
|
675
|
+
const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name);
|
|
676
|
+
if (!ref) {
|
|
677
|
+
throw new CliError(`Column ${c.name} not found in model ${table.name}`);
|
|
678
|
+
}
|
|
679
|
+
arrayExpr.addItem((itemBuilder) => {
|
|
680
|
+
const refExpr = itemBuilder.ReferenceExpr.setTarget(ref);
|
|
681
|
+
if (c.order && c.order !== "ASC") refExpr.addArg((ab) => ab.StringLiteral.setValue("DESC"), "sort");
|
|
682
|
+
return refExpr;
|
|
683
|
+
});
|
|
684
|
+
});
|
|
685
|
+
return arrayExpr;
|
|
686
|
+
});
|
|
687
|
+
const suffix = index.unique ? "_key" : "_idx";
|
|
688
|
+
if (index.name !== `${table.name}_${index.columns.map((c) => c.name).join("_")}${suffix}`) {
|
|
689
|
+
attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), "map");
|
|
690
|
+
}
|
|
691
|
+
return attr;
|
|
692
|
+
});
|
|
693
|
+
});
|
|
694
|
+
if (table.schema && table.schema !== "" && table.schema !== defaultSchema) {
|
|
695
|
+
modelFactory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(table.schema)));
|
|
696
|
+
}
|
|
697
|
+
model.declarations.push(modelFactory.node);
|
|
698
|
+
return relations;
|
|
699
|
+
}
|
|
700
|
+
__name(syncTable, "syncTable");
|
|
701
|
+
function syncRelation({ model, relation, services, options, selfRelation, similarRelations }) {
|
|
702
|
+
const idAttribute = getAttributeRef("@id", services);
|
|
703
|
+
const uniqueAttribute = getAttributeRef("@unique", services);
|
|
704
|
+
const relationAttribute = getAttributeRef("@relation", services);
|
|
705
|
+
const fieldMapAttribute = getAttributeRef("@map", services);
|
|
706
|
+
const tableMapAttribute = getAttributeRef("@@map", services);
|
|
707
|
+
const includeRelationName = selfRelation || similarRelations > 0;
|
|
708
|
+
if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) {
|
|
709
|
+
throw new CliError("Cannot find required attributes in the model.");
|
|
710
|
+
}
|
|
711
|
+
const sourceModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.table);
|
|
712
|
+
if (!sourceModel) return;
|
|
713
|
+
const sourceFields = [];
|
|
714
|
+
for (const colName of relation.columns) {
|
|
715
|
+
const idx = sourceModel.fields.findIndex((f) => getDbName(f) === colName);
|
|
716
|
+
const field = sourceModel.fields[idx];
|
|
717
|
+
if (!field) return;
|
|
718
|
+
sourceFields.push({
|
|
719
|
+
field,
|
|
720
|
+
index: idx
|
|
721
|
+
});
|
|
722
|
+
}
|
|
723
|
+
const targetModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.references.table);
|
|
724
|
+
if (!targetModel) return;
|
|
725
|
+
const targetFields = [];
|
|
726
|
+
for (const colName of relation.references.columns) {
|
|
727
|
+
const field = targetModel.fields.find((f) => getDbName(f) === colName);
|
|
728
|
+
if (!field) return;
|
|
729
|
+
targetFields.push(field);
|
|
730
|
+
}
|
|
731
|
+
const firstSourceField = sourceFields[0].field;
|
|
732
|
+
const firstSourceFieldId = sourceFields[0].index;
|
|
733
|
+
const firstColumn = relation.columns[0];
|
|
734
|
+
const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? "_" : "";
|
|
735
|
+
const relationName = `${relation.table}${similarRelations > 0 ? `_${firstColumn}` : ""}To${relation.references.table}`;
|
|
736
|
+
const sourceNameFromReference = firstSourceField.name.toLowerCase().endsWith("id") ? `${resolveNameCasing(options.fieldCasing, firstSourceField.name.slice(0, -2)).name}${relation.type === "many" ? "s" : ""}` : void 0;
|
|
737
|
+
const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference);
|
|
738
|
+
let { name: sourceFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${fieldPrefix}${(0, import_common_helpers.lowerCaseFirst)(sourceModel.name)}_${firstColumn}` : `${(!sourceFieldFromReference ? sourceNameFromReference : void 0) || (0, import_common_helpers.lowerCaseFirst)(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === "many" ? "s" : ""}`);
|
|
739
|
+
if (sourceModel.fields.find((f) => f.name === sourceFieldName)) {
|
|
740
|
+
sourceFieldName = `${sourceFieldName}To${(0, import_common_helpers.lowerCaseFirst)(targetModel.name)}_${relation.references.columns[0]}`;
|
|
741
|
+
}
|
|
742
|
+
const sourceFieldFactory = new import_factory.DataFieldFactory().setContainer(sourceModel).setName(sourceFieldName).setType((tb) => tb.setOptional(relation.nullable).setArray(relation.type === "many").setReference(targetModel));
|
|
743
|
+
sourceFieldFactory.addAttribute((ab) => {
|
|
744
|
+
ab.setDecl(relationAttribute);
|
|
745
|
+
if (includeRelationName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relationName));
|
|
746
|
+
ab.addArg((ab2) => {
|
|
747
|
+
const arrayExpr = ab2.ArrayExpr;
|
|
748
|
+
for (const { field } of sourceFields) {
|
|
749
|
+
arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
|
|
750
|
+
}
|
|
751
|
+
return arrayExpr;
|
|
752
|
+
}, "fields");
|
|
753
|
+
ab.addArg((ab2) => {
|
|
754
|
+
const arrayExpr = ab2.ArrayExpr;
|
|
755
|
+
for (const field of targetFields) {
|
|
756
|
+
arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
|
|
757
|
+
}
|
|
758
|
+
return arrayExpr;
|
|
759
|
+
}, "references");
|
|
760
|
+
const onDeleteDefault = relation.nullable ? "SET NULL" : "RESTRICT";
|
|
761
|
+
if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) {
|
|
762
|
+
const enumRef = getEnumRef("ReferentialAction", services);
|
|
763
|
+
if (!enumRef) throw new CliError("ReferentialAction enum not found");
|
|
764
|
+
const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_delete.replace(/ /g, "").toLowerCase());
|
|
765
|
+
if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_delete} not found`);
|
|
766
|
+
ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onDelete");
|
|
767
|
+
}
|
|
768
|
+
if (relation.foreign_key_on_update && relation.foreign_key_on_update !== "CASCADE") {
|
|
769
|
+
const enumRef = getEnumRef("ReferentialAction", services);
|
|
770
|
+
if (!enumRef) throw new CliError("ReferentialAction enum not found");
|
|
771
|
+
const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_update.replace(/ /g, "").toLowerCase());
|
|
772
|
+
if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_update} not found`);
|
|
773
|
+
ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onUpdate");
|
|
774
|
+
}
|
|
775
|
+
const defaultFkName = `${relation.table}_${relation.columns.join("_")}_fkey`;
|
|
776
|
+
if (relation.fk_name && relation.fk_name !== defaultFkName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relation.fk_name), "map");
|
|
777
|
+
return ab;
|
|
778
|
+
});
|
|
779
|
+
sourceModel.fields.splice(firstSourceFieldId, 0, sourceFieldFactory.node);
|
|
780
|
+
const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? "_" : "";
|
|
781
|
+
let { name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${oppositeFieldPrefix}${(0, import_common_helpers.lowerCaseFirst)(sourceModel.name)}_${firstColumn}` : `${(0, import_common_helpers.lowerCaseFirst)(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === "many" ? "s" : ""}`);
|
|
782
|
+
if (targetModel.fields.find((f) => f.name === oppositeFieldName)) {
|
|
783
|
+
({ name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, `${(0, import_common_helpers.lowerCaseFirst)(sourceModel.name)}_${firstColumn}To${relation.references.table}_${relation.references.columns[0]}`));
|
|
784
|
+
}
|
|
785
|
+
const targetFieldFactory = new import_factory.DataFieldFactory().setContainer(targetModel).setName(oppositeFieldName).setType((tb) => tb.setOptional(relation.references.type === "one").setArray(relation.references.type === "many").setReference(sourceModel));
|
|
786
|
+
if (includeRelationName) targetFieldFactory.addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab2) => ab2.StringLiteral.setValue(relationName)));
|
|
787
|
+
targetModel.fields.push(targetFieldFactory.node);
|
|
788
|
+
}
|
|
789
|
+
__name(syncRelation, "syncRelation");
|
|
790
|
+
function consolidateEnums({ newModel, oldModel }) {
|
|
791
|
+
const newEnums = newModel.declarations.filter((d) => (0, import_ast3.isEnum)(d));
|
|
792
|
+
const newDataModels = newModel.declarations.filter((d) => d.$type === "DataModel");
|
|
793
|
+
const oldDataModels = oldModel.declarations.filter((d) => d.$type === "DataModel");
|
|
794
|
+
const enumMapping = /* @__PURE__ */ new Map();
|
|
795
|
+
for (const newEnum of newEnums) {
|
|
796
|
+
for (const newDM of newDataModels) {
|
|
797
|
+
for (const field of newDM.fields) {
|
|
798
|
+
if (field.$type !== "DataField" || field.type.reference?.ref !== newEnum) continue;
|
|
799
|
+
const oldDM = oldDataModels.find((d) => getDbName(d) === getDbName(newDM));
|
|
800
|
+
if (!oldDM) continue;
|
|
801
|
+
const oldField = oldDM.fields.find((f) => getDbName(f) === getDbName(field));
|
|
802
|
+
if (!oldField || oldField.$type !== "DataField" || !oldField.type.reference?.ref) continue;
|
|
803
|
+
const oldEnum = oldField.type.reference.ref;
|
|
804
|
+
if (!(0, import_ast3.isEnum)(oldEnum)) continue;
|
|
805
|
+
enumMapping.set(newEnum, oldEnum);
|
|
806
|
+
break;
|
|
807
|
+
}
|
|
808
|
+
if (enumMapping.has(newEnum)) break;
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
const reverseMapping = /* @__PURE__ */ new Map();
|
|
812
|
+
for (const [newEnum, oldEnum] of enumMapping) {
|
|
813
|
+
if (!reverseMapping.has(oldEnum)) {
|
|
814
|
+
reverseMapping.set(oldEnum, []);
|
|
815
|
+
}
|
|
816
|
+
reverseMapping.get(oldEnum).push(newEnum);
|
|
817
|
+
}
|
|
818
|
+
for (const [oldEnum, newEnumsGroup] of reverseMapping) {
|
|
819
|
+
const keepEnum = newEnumsGroup[0];
|
|
820
|
+
if (newEnumsGroup.length === 1 && keepEnum.name === oldEnum.name) continue;
|
|
821
|
+
const oldValues = new Set(oldEnum.fields.map((f) => getDbName(f)));
|
|
822
|
+
const allMatch = newEnumsGroup.every((ne) => {
|
|
823
|
+
const newValues = new Set(ne.fields.map((f) => getDbName(f)));
|
|
824
|
+
return oldValues.size === newValues.size && [
|
|
825
|
+
...oldValues
|
|
826
|
+
].every((v) => newValues.has(v));
|
|
827
|
+
});
|
|
828
|
+
if (!allMatch) continue;
|
|
829
|
+
keepEnum.name = oldEnum.name;
|
|
830
|
+
keepEnum.attributes = oldEnum.attributes.map((attr) => {
|
|
831
|
+
const copy = {
|
|
832
|
+
...attr,
|
|
833
|
+
$container: keepEnum
|
|
834
|
+
};
|
|
835
|
+
return copy;
|
|
836
|
+
});
|
|
837
|
+
for (let i = 1; i < newEnumsGroup.length; i++) {
|
|
838
|
+
const idx = newModel.declarations.indexOf(newEnumsGroup[i]);
|
|
839
|
+
if (idx >= 0) {
|
|
840
|
+
newModel.declarations.splice(idx, 1);
|
|
841
|
+
}
|
|
842
|
+
}
|
|
843
|
+
for (const newDM of newDataModels) {
|
|
844
|
+
for (const field of newDM.fields) {
|
|
845
|
+
if (field.$type !== "DataField") continue;
|
|
846
|
+
const ref = field.type.reference?.ref;
|
|
847
|
+
if (ref && newEnumsGroup.includes(ref)) {
|
|
848
|
+
field.type.reference = {
|
|
849
|
+
ref: keepEnum,
|
|
850
|
+
$refText: keepEnum.name
|
|
851
|
+
};
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
console.log(import_colors3.default.gray(`Consolidated enum${newEnumsGroup.length > 1 ? "s" : ""} ${newEnumsGroup.map((e) => e.name).join(", ")} \u2192 ${oldEnum.name}`));
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
__name(consolidateEnums, "consolidateEnums");
|
|
859
|
+
|
|
860
|
+
// src/actions/pull/provider/mysql.ts
|
|
861
|
+
var import_factory2 = require("@zenstackhq/language/factory");
|
|
862
|
+
function normalizeGenerationExpression(typeDef) {
|
|
863
|
+
return typeDef.replace(/_([0-9A-Za-z_]+)\\?'/g, "'").replace(/\\'/g, "'");
|
|
864
|
+
}
|
|
865
|
+
__name(normalizeGenerationExpression, "normalizeGenerationExpression");
|
|
866
|
+
var mysql = {
|
|
867
|
+
isSupportedFeature(feature) {
|
|
868
|
+
switch (feature) {
|
|
869
|
+
case "NativeEnum":
|
|
870
|
+
return true;
|
|
871
|
+
case "Schema":
|
|
872
|
+
default:
|
|
873
|
+
return false;
|
|
874
|
+
}
|
|
875
|
+
},
|
|
876
|
+
getBuiltinType(type2) {
|
|
877
|
+
const t = (type2 || "").toLowerCase().trim();
|
|
878
|
+
const isArray = false;
|
|
879
|
+
switch (t) {
|
|
880
|
+
// integers
|
|
881
|
+
case "tinyint":
|
|
882
|
+
case "smallint":
|
|
883
|
+
case "mediumint":
|
|
884
|
+
case "int":
|
|
885
|
+
case "integer":
|
|
886
|
+
return {
|
|
887
|
+
type: "Int",
|
|
888
|
+
isArray
|
|
889
|
+
};
|
|
890
|
+
case "bigint":
|
|
891
|
+
return {
|
|
892
|
+
type: "BigInt",
|
|
893
|
+
isArray
|
|
894
|
+
};
|
|
895
|
+
// decimals and floats
|
|
896
|
+
case "decimal":
|
|
897
|
+
case "numeric":
|
|
898
|
+
return {
|
|
899
|
+
type: "Decimal",
|
|
900
|
+
isArray
|
|
901
|
+
};
|
|
902
|
+
case "float":
|
|
903
|
+
case "double":
|
|
904
|
+
case "real":
|
|
905
|
+
return {
|
|
906
|
+
type: "Float",
|
|
907
|
+
isArray
|
|
908
|
+
};
|
|
909
|
+
// boolean (MySQL uses TINYINT(1) for boolean)
|
|
910
|
+
case "boolean":
|
|
911
|
+
case "bool":
|
|
912
|
+
return {
|
|
913
|
+
type: "Boolean",
|
|
914
|
+
isArray
|
|
915
|
+
};
|
|
916
|
+
// strings
|
|
917
|
+
case "char":
|
|
918
|
+
case "varchar":
|
|
919
|
+
case "tinytext":
|
|
920
|
+
case "text":
|
|
921
|
+
case "mediumtext":
|
|
922
|
+
case "longtext":
|
|
923
|
+
return {
|
|
924
|
+
type: "String",
|
|
925
|
+
isArray
|
|
926
|
+
};
|
|
927
|
+
// dates/times
|
|
928
|
+
case "date":
|
|
929
|
+
case "time":
|
|
930
|
+
case "datetime":
|
|
931
|
+
case "timestamp":
|
|
932
|
+
case "year":
|
|
933
|
+
return {
|
|
934
|
+
type: "DateTime",
|
|
935
|
+
isArray
|
|
936
|
+
};
|
|
937
|
+
// binary
|
|
938
|
+
case "binary":
|
|
939
|
+
case "varbinary":
|
|
940
|
+
case "tinyblob":
|
|
941
|
+
case "blob":
|
|
942
|
+
case "mediumblob":
|
|
943
|
+
case "longblob":
|
|
944
|
+
return {
|
|
945
|
+
type: "Bytes",
|
|
946
|
+
isArray
|
|
947
|
+
};
|
|
948
|
+
// json
|
|
949
|
+
case "json":
|
|
950
|
+
return {
|
|
951
|
+
type: "Json",
|
|
952
|
+
isArray
|
|
953
|
+
};
|
|
954
|
+
default:
|
|
955
|
+
if (t.startsWith("enum(")) {
|
|
956
|
+
return {
|
|
957
|
+
type: "String",
|
|
958
|
+
isArray
|
|
959
|
+
};
|
|
960
|
+
}
|
|
961
|
+
if (t.startsWith("set(")) {
|
|
962
|
+
return {
|
|
963
|
+
type: "String",
|
|
964
|
+
isArray
|
|
965
|
+
};
|
|
966
|
+
}
|
|
967
|
+
return {
|
|
968
|
+
type: "Unsupported",
|
|
969
|
+
isArray
|
|
970
|
+
};
|
|
971
|
+
}
|
|
972
|
+
},
|
|
973
|
+
getDefaultDatabaseType(type2) {
|
|
974
|
+
switch (type2) {
|
|
975
|
+
case "String":
|
|
976
|
+
return {
|
|
977
|
+
type: "varchar",
|
|
978
|
+
precision: 191
|
|
979
|
+
};
|
|
980
|
+
case "Boolean":
|
|
981
|
+
return {
|
|
982
|
+
type: "boolean"
|
|
983
|
+
};
|
|
984
|
+
case "Int":
|
|
985
|
+
return {
|
|
986
|
+
type: "int"
|
|
987
|
+
};
|
|
988
|
+
case "BigInt":
|
|
989
|
+
return {
|
|
990
|
+
type: "bigint"
|
|
991
|
+
};
|
|
992
|
+
case "Float":
|
|
993
|
+
return {
|
|
994
|
+
type: "double"
|
|
995
|
+
};
|
|
996
|
+
case "Decimal":
|
|
997
|
+
return {
|
|
998
|
+
type: "decimal",
|
|
999
|
+
precision: 65
|
|
1000
|
+
};
|
|
1001
|
+
case "DateTime":
|
|
1002
|
+
return {
|
|
1003
|
+
type: "datetime",
|
|
1004
|
+
precision: 3
|
|
1005
|
+
};
|
|
1006
|
+
case "Json":
|
|
1007
|
+
return {
|
|
1008
|
+
type: "json"
|
|
1009
|
+
};
|
|
1010
|
+
case "Bytes":
|
|
1011
|
+
return {
|
|
1012
|
+
type: "longblob"
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
},
|
|
1016
|
+
async introspect(connectionString, options) {
|
|
1017
|
+
const mysql2 = await import("mysql2/promise");
|
|
1018
|
+
const connection = await mysql2.createConnection(connectionString);
|
|
1019
|
+
try {
|
|
1020
|
+
const url = new URL(connectionString);
|
|
1021
|
+
const databaseName = url.pathname.replace("/", "");
|
|
1022
|
+
if (!databaseName) {
|
|
1023
|
+
throw new CliError("Database name not found in connection string");
|
|
1024
|
+
}
|
|
1025
|
+
const [tableRows] = await connection.execute(getTableIntrospectionQuery(), [
|
|
1026
|
+
databaseName
|
|
1027
|
+
]);
|
|
1028
|
+
const tables = [];
|
|
1029
|
+
for (const row of tableRows) {
|
|
1030
|
+
const columns = typeof row.columns === "string" ? JSON.parse(row.columns) : row.columns;
|
|
1031
|
+
const indexes = typeof row.indexes === "string" ? JSON.parse(row.indexes) : row.indexes;
|
|
1032
|
+
const sortedColumns = (columns || []).sort((a, b) => (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0)).map((col) => {
|
|
1033
|
+
if (col.datatype === "enum" && col.datatype_name) {
|
|
1034
|
+
return {
|
|
1035
|
+
...col,
|
|
1036
|
+
datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name
|
|
1037
|
+
};
|
|
1038
|
+
}
|
|
1039
|
+
if (col.computed && typeof col.datatype === "string") {
|
|
1040
|
+
return {
|
|
1041
|
+
...col,
|
|
1042
|
+
datatype: normalizeGenerationExpression(col.datatype)
|
|
1043
|
+
};
|
|
1044
|
+
}
|
|
1045
|
+
return col;
|
|
1046
|
+
});
|
|
1047
|
+
const filteredIndexes = (indexes || []).filter((idx) => !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`));
|
|
1048
|
+
tables.push({
|
|
1049
|
+
schema: "",
|
|
1050
|
+
name: row.name,
|
|
1051
|
+
type: row.type,
|
|
1052
|
+
definition: row.definition,
|
|
1053
|
+
columns: sortedColumns,
|
|
1054
|
+
indexes: filteredIndexes
|
|
1055
|
+
});
|
|
1056
|
+
}
|
|
1057
|
+
const [enumRows] = await connection.execute(getEnumIntrospectionQuery(), [
|
|
1058
|
+
databaseName
|
|
1059
|
+
]);
|
|
1060
|
+
const enums = enumRows.map((row) => {
|
|
1061
|
+
const values = parseEnumValues(row.column_type);
|
|
1062
|
+
const syntheticName = `${row.table_name}_${row.column_name}`;
|
|
1063
|
+
const { name } = resolveNameCasing(options.modelCasing, syntheticName);
|
|
1064
|
+
return {
|
|
1065
|
+
schema_name: "",
|
|
1066
|
+
enum_type: name,
|
|
1067
|
+
values
|
|
1068
|
+
};
|
|
1069
|
+
});
|
|
1070
|
+
return {
|
|
1071
|
+
tables,
|
|
1072
|
+
enums
|
|
1073
|
+
};
|
|
1074
|
+
} finally {
|
|
1075
|
+
await connection.end();
|
|
1076
|
+
}
|
|
1077
|
+
},
|
|
1078
|
+
getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
|
|
1079
|
+
const val = defaultValue.trim();
|
|
1080
|
+
if (val.toUpperCase() === "NULL") {
|
|
1081
|
+
return null;
|
|
1082
|
+
}
|
|
1083
|
+
if (datatype === "enum" && datatype_name) {
|
|
1084
|
+
const enumDef = enums.find((e) => getDbName(e) === datatype_name);
|
|
1085
|
+
if (enumDef) {
|
|
1086
|
+
const enumValue = val.startsWith("'") && val.endsWith("'") ? val.slice(1, -1) : val;
|
|
1087
|
+
const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
|
|
1088
|
+
if (enumField) {
|
|
1089
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
1090
|
+
}
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
switch (fieldType) {
|
|
1094
|
+
case "DateTime":
|
|
1095
|
+
if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === "current_timestamp()" || val.toLowerCase() === "now()") {
|
|
1096
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
1097
|
+
}
|
|
1098
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1099
|
+
case "Int":
|
|
1100
|
+
case "BigInt":
|
|
1101
|
+
if (val.toLowerCase() === "auto_increment") {
|
|
1102
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
1103
|
+
}
|
|
1104
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
1105
|
+
case "Float":
|
|
1106
|
+
return normalizeFloatDefault(val);
|
|
1107
|
+
case "Decimal":
|
|
1108
|
+
return normalizeDecimalDefault(val);
|
|
1109
|
+
case "Boolean":
|
|
1110
|
+
return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === "true" || val === "1" || val === "b'1'");
|
|
1111
|
+
case "String":
|
|
1112
|
+
if (val.toLowerCase() === "uuid()") {
|
|
1113
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("uuid", services));
|
|
1114
|
+
}
|
|
1115
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1116
|
+
case "Json":
|
|
1117
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1118
|
+
case "Bytes":
|
|
1119
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1120
|
+
}
|
|
1121
|
+
if (val.includes("(") && val.includes(")")) {
|
|
1122
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
1123
|
+
}
|
|
1124
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
1125
|
+
return null;
|
|
1126
|
+
},
|
|
1127
|
+
getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
|
|
1128
|
+
const factories = [];
|
|
1129
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
1130
|
+
factories.push(new import_factory2.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
|
|
1131
|
+
}
|
|
1132
|
+
const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`)?.node;
|
|
1133
|
+
const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
|
|
1134
|
+
if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
|
|
1135
|
+
const dbAttrFactory = new import_factory2.DataFieldAttributeFactory().setDecl(dbAttr);
|
|
1136
|
+
const sizeValue = length ?? precision;
|
|
1137
|
+
if (sizeValue !== void 0 && sizeValue !== null) {
|
|
1138
|
+
dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(sizeValue));
|
|
1139
|
+
}
|
|
1140
|
+
factories.push(dbAttrFactory);
|
|
1141
|
+
}
|
|
1142
|
+
return factories;
|
|
1143
|
+
}
|
|
1144
|
+
};
|
|
1145
|
+
function getTableIntrospectionQuery() {
|
|
1146
|
+
return `
|
|
1147
|
+
-- Main query: one row per table/view with columns and indexes as nested JSON arrays.
|
|
1148
|
+
-- Uses INFORMATION_SCHEMA which is MySQL's standard metadata catalog.
|
|
1149
|
+
SELECT
|
|
1150
|
+
t.TABLE_NAME AS \`name\`, -- table or view name
|
|
1151
|
+
CASE t.TABLE_TYPE -- map MySQL table type strings to our internal types
|
|
1152
|
+
WHEN 'BASE TABLE' THEN 'table'
|
|
1153
|
+
WHEN 'VIEW' THEN 'view'
|
|
1154
|
+
ELSE NULL
|
|
1155
|
+
END AS \`type\`,
|
|
1156
|
+
CASE -- for views, retrieve the SQL definition
|
|
1157
|
+
WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION
|
|
1158
|
+
ELSE NULL
|
|
1159
|
+
END AS \`definition\`,
|
|
1160
|
+
|
|
1161
|
+
-- ===== COLUMNS subquery =====
|
|
1162
|
+
-- Wraps an ordered subquery in JSON_ARRAYAGG to produce a JSON array of column objects.
|
|
1163
|
+
(
|
|
1164
|
+
SELECT JSON_ARRAYAGG(col_json)
|
|
1165
|
+
FROM (
|
|
1166
|
+
SELECT JSON_OBJECT(
|
|
1167
|
+
'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting)
|
|
1168
|
+
'name', c.COLUMN_NAME, -- column name
|
|
1169
|
+
|
|
1170
|
+
-- datatype: for generated/computed columns, construct the full DDL-like type definition
|
|
1171
|
+
-- (e.g., "int GENERATED ALWAYS AS (col1 + col2) STORED") so it can be rendered as
|
|
1172
|
+
-- Unsupported("..."); special-case tinyint(1) as 'boolean' (MySQL's boolean convention);
|
|
1173
|
+
-- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime').
|
|
1174
|
+
'datatype', CASE
|
|
1175
|
+
WHEN c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '' THEN
|
|
1176
|
+
CONCAT(
|
|
1177
|
+
c.COLUMN_TYPE,
|
|
1178
|
+
' GENERATED ALWAYS AS (',
|
|
1179
|
+
c.GENERATION_EXPRESSION,
|
|
1180
|
+
') ',
|
|
1181
|
+
CASE
|
|
1182
|
+
WHEN c.EXTRA LIKE '%STORED GENERATED%' THEN 'STORED'
|
|
1183
|
+
ELSE 'VIRTUAL'
|
|
1184
|
+
END
|
|
1185
|
+
)
|
|
1186
|
+
WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean'
|
|
1187
|
+
ELSE c.DATA_TYPE
|
|
1188
|
+
END,
|
|
1189
|
+
|
|
1190
|
+
-- datatype_name: for enum columns, generate a synthetic name "TableName_ColumnName"
|
|
1191
|
+
-- (MySQL doesn't have named enum types like PostgreSQL)
|
|
1192
|
+
'datatype_name', CASE
|
|
1193
|
+
WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME)
|
|
1194
|
+
ELSE NULL
|
|
1195
|
+
END,
|
|
1196
|
+
|
|
1197
|
+
'datatype_schema', '', -- MySQL doesn't support multi-schema
|
|
1198
|
+
'length', c.CHARACTER_MAXIMUM_LENGTH, -- max length for string types (e.g., VARCHAR(255) -> 255)
|
|
1199
|
+
'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), -- numeric or datetime precision
|
|
1200
|
+
|
|
1201
|
+
'nullable', c.IS_NULLABLE = 'YES', -- true if column allows NULL
|
|
1202
|
+
|
|
1203
|
+
-- default: for auto_increment columns, report 'auto_increment' instead of NULL;
|
|
1204
|
+
-- otherwise use the COLUMN_DEFAULT value
|
|
1205
|
+
'default', CASE
|
|
1206
|
+
WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment'
|
|
1207
|
+
ELSE c.COLUMN_DEFAULT
|
|
1208
|
+
END,
|
|
1209
|
+
|
|
1210
|
+
'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key
|
|
1211
|
+
|
|
1212
|
+
-- unique: true if the column has a single-column unique index.
|
|
1213
|
+
-- COLUMN_KEY = 'UNI' covers most cases, but may not be set when the column
|
|
1214
|
+
-- also participates in other indexes (showing 'MUL' instead on some MySQL versions).
|
|
1215
|
+
-- Also check INFORMATION_SCHEMA.STATISTICS for single-column unique indexes
|
|
1216
|
+
-- (NON_UNIQUE = 0) to match the PostgreSQL introspection behavior.
|
|
1217
|
+
'unique', (
|
|
1218
|
+
c.COLUMN_KEY = 'UNI'
|
|
1219
|
+
OR EXISTS (
|
|
1220
|
+
SELECT 1
|
|
1221
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_uni
|
|
1222
|
+
WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
|
|
1223
|
+
AND s_uni.TABLE_NAME = c.TABLE_NAME
|
|
1224
|
+
AND s_uni.COLUMN_NAME = c.COLUMN_NAME
|
|
1225
|
+
AND s_uni.NON_UNIQUE = 0
|
|
1226
|
+
AND s_uni.INDEX_NAME != 'PRIMARY'
|
|
1227
|
+
AND (
|
|
1228
|
+
SELECT COUNT(*)
|
|
1229
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_cnt
|
|
1230
|
+
WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
|
|
1231
|
+
AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
|
|
1232
|
+
AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
|
|
1233
|
+
) = 1
|
|
1234
|
+
)
|
|
1235
|
+
),
|
|
1236
|
+
'unique_name', (
|
|
1237
|
+
SELECT COALESCE(
|
|
1238
|
+
CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END,
|
|
1239
|
+
(
|
|
1240
|
+
SELECT s_uni.INDEX_NAME
|
|
1241
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_uni
|
|
1242
|
+
WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
|
|
1243
|
+
AND s_uni.TABLE_NAME = c.TABLE_NAME
|
|
1244
|
+
AND s_uni.COLUMN_NAME = c.COLUMN_NAME
|
|
1245
|
+
AND s_uni.NON_UNIQUE = 0
|
|
1246
|
+
AND s_uni.INDEX_NAME != 'PRIMARY'
|
|
1247
|
+
AND (
|
|
1248
|
+
SELECT COUNT(*)
|
|
1249
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_cnt
|
|
1250
|
+
WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
|
|
1251
|
+
AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
|
|
1252
|
+
AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
|
|
1253
|
+
) = 1
|
|
1254
|
+
LIMIT 1
|
|
1255
|
+
)
|
|
1256
|
+
)
|
|
1257
|
+
),
|
|
1258
|
+
|
|
1259
|
+
-- computed: true if column has a generation expression (virtual or stored)
|
|
1260
|
+
'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '',
|
|
1261
|
+
|
|
1262
|
+
-- options: for enum columns, the full COLUMN_TYPE string (e.g., "enum('a','b','c')")
|
|
1263
|
+
-- which gets parsed into individual values later
|
|
1264
|
+
'options', CASE
|
|
1265
|
+
WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE
|
|
1266
|
+
ELSE NULL
|
|
1267
|
+
END,
|
|
1268
|
+
|
|
1269
|
+
-- Foreign key info (NULL if column is not part of a FK)
|
|
1270
|
+
'foreign_key_schema', NULL, -- MySQL doesn't support cross-schema FKs here
|
|
1271
|
+
'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, -- referenced table
|
|
1272
|
+
'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, -- referenced column
|
|
1273
|
+
'foreign_key_name', kcu_fk.CONSTRAINT_NAME, -- FK constraint name
|
|
1274
|
+
'foreign_key_on_update', rc.UPDATE_RULE, -- referential action on update (CASCADE, SET NULL, etc.)
|
|
1275
|
+
'foreign_key_on_delete', rc.DELETE_RULE -- referential action on delete
|
|
1276
|
+
) AS col_json
|
|
1277
|
+
|
|
1278
|
+
FROM INFORMATION_SCHEMA.COLUMNS c -- one row per column in the database
|
|
1279
|
+
|
|
1280
|
+
-- Join KEY_COLUMN_USAGE to find foreign key references for this column.
|
|
1281
|
+
-- Filter to only FK entries (REFERENCED_TABLE_NAME IS NOT NULL).
|
|
1282
|
+
LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk
|
|
1283
|
+
ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA
|
|
1284
|
+
AND c.TABLE_NAME = kcu_fk.TABLE_NAME
|
|
1285
|
+
AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME
|
|
1286
|
+
AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL
|
|
1287
|
+
|
|
1288
|
+
-- Join REFERENTIAL_CONSTRAINTS to get ON UPDATE / ON DELETE rules for the FK.
|
|
1289
|
+
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc
|
|
1290
|
+
ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
|
|
1291
|
+
AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
|
|
1292
|
+
|
|
1293
|
+
WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA
|
|
1294
|
+
AND c.TABLE_NAME = t.TABLE_NAME
|
|
1295
|
+
ORDER BY c.ORDINAL_POSITION -- preserve original column order
|
|
1296
|
+
) AS cols_ordered
|
|
1297
|
+
) AS \`columns\`,
|
|
1298
|
+
|
|
1299
|
+
-- ===== INDEXES subquery =====
|
|
1300
|
+
-- Aggregates all indexes for this table into a JSON array.
|
|
1301
|
+
(
|
|
1302
|
+
SELECT JSON_ARRAYAGG(idx_json)
|
|
1303
|
+
FROM (
|
|
1304
|
+
SELECT JSON_OBJECT(
|
|
1305
|
+
'name', s.INDEX_NAME, -- index name (e.g., 'PRIMARY', 'idx_email')
|
|
1306
|
+
'method', s.INDEX_TYPE, -- index type (e.g., 'BTREE', 'HASH', 'FULLTEXT')
|
|
1307
|
+
'unique', s.NON_UNIQUE = 0, -- NON_UNIQUE=0 means it IS unique
|
|
1308
|
+
'primary', s.INDEX_NAME = 'PRIMARY', -- MySQL names the PK index 'PRIMARY'
|
|
1309
|
+
'valid', TRUE, -- MySQL doesn't expose index validity status
|
|
1310
|
+
'ready', TRUE, -- MySQL doesn't expose index readiness status
|
|
1311
|
+
'partial', FALSE, -- MySQL doesn't support partial indexes
|
|
1312
|
+
'predicate', NULL, -- no WHERE clause on indexes in MySQL
|
|
1313
|
+
|
|
1314
|
+
-- Index columns: nested subquery for columns in this index
|
|
1315
|
+
'columns', (
|
|
1316
|
+
SELECT JSON_ARRAYAGG(idx_col_json)
|
|
1317
|
+
FROM (
|
|
1318
|
+
SELECT JSON_OBJECT(
|
|
1319
|
+
'name', s2.COLUMN_NAME, -- column name in the index
|
|
1320
|
+
'expression', NULL, -- MySQL doesn't expose expression indexes via STATISTICS
|
|
1321
|
+
-- COLLATION: 'A' = ascending, 'D' = descending, NULL = not sorted
|
|
1322
|
+
'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END,
|
|
1323
|
+
'nulls', NULL -- MySQL doesn't expose NULLS FIRST/LAST
|
|
1324
|
+
) AS idx_col_json
|
|
1325
|
+
FROM INFORMATION_SCHEMA.STATISTICS s2 -- one row per column per index
|
|
1326
|
+
WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA
|
|
1327
|
+
AND s2.TABLE_NAME = s.TABLE_NAME
|
|
1328
|
+
AND s2.INDEX_NAME = s.INDEX_NAME
|
|
1329
|
+
ORDER BY s2.SEQ_IN_INDEX -- preserve column order within the index
|
|
1330
|
+
) AS idx_cols_ordered
|
|
1331
|
+
)
|
|
1332
|
+
) AS idx_json
|
|
1333
|
+
FROM (
|
|
1334
|
+
-- Deduplicate: STATISTICS has one row per (index, column), but we need one row per index.
|
|
1335
|
+
-- DISTINCT on INDEX_NAME gives us one entry per index with its metadata.
|
|
1336
|
+
SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME
|
|
1337
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
1338
|
+
WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME
|
|
1339
|
+
) s
|
|
1340
|
+
) AS idxs_ordered
|
|
1341
|
+
) AS \`indexes\`
|
|
1342
|
+
|
|
1343
|
+
-- === Main FROM: INFORMATION_SCHEMA.TABLES lists all tables and views ===
|
|
1344
|
+
FROM INFORMATION_SCHEMA.TABLES t
|
|
1345
|
+
-- Join VIEWS to get VIEW_DEFINITION for view tables
|
|
1346
|
+
LEFT JOIN INFORMATION_SCHEMA.VIEWS v
|
|
1347
|
+
ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME
|
|
1348
|
+
WHERE t.TABLE_SCHEMA = ? -- only the target database
|
|
1349
|
+
AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW
|
|
1350
|
+
AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table
|
|
1351
|
+
ORDER BY t.TABLE_NAME;
|
|
1352
|
+
`;
|
|
1353
|
+
}
|
|
1354
|
+
__name(getTableIntrospectionQuery, "getTableIntrospectionQuery");
|
|
1355
|
+
function getEnumIntrospectionQuery() {
|
|
1356
|
+
return `
|
|
1357
|
+
SELECT
|
|
1358
|
+
c.TABLE_NAME AS table_name, -- table containing the enum column
|
|
1359
|
+
c.COLUMN_NAME AS column_name, -- column name
|
|
1360
|
+
c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')")
|
|
1361
|
+
FROM INFORMATION_SCHEMA.COLUMNS c
|
|
1362
|
+
WHERE c.TABLE_SCHEMA = ? -- only the target database
|
|
1363
|
+
AND c.DATA_TYPE = 'enum' -- only enum columns
|
|
1364
|
+
ORDER BY c.TABLE_NAME, c.COLUMN_NAME;
|
|
1365
|
+
`;
|
|
1366
|
+
}
|
|
1367
|
+
__name(getEnumIntrospectionQuery, "getEnumIntrospectionQuery");
|
|
1368
|
+
function parseEnumValues(columnType) {
|
|
1369
|
+
const match = columnType.match(/^enum\((.+)\)$/i);
|
|
1370
|
+
if (!match || !match[1]) return [];
|
|
1371
|
+
const valuesString = match[1];
|
|
1372
|
+
const values = [];
|
|
1373
|
+
let current = "";
|
|
1374
|
+
let inQuote = false;
|
|
1375
|
+
let i = 0;
|
|
1376
|
+
while (i < valuesString.length) {
|
|
1377
|
+
const char = valuesString[i];
|
|
1378
|
+
if (char === "'" && !inQuote) {
|
|
1379
|
+
inQuote = true;
|
|
1380
|
+
i++;
|
|
1381
|
+
continue;
|
|
1382
|
+
}
|
|
1383
|
+
if (char === "'" && inQuote) {
|
|
1384
|
+
if (valuesString[i + 1] === "'") {
|
|
1385
|
+
current += "'";
|
|
1386
|
+
i += 2;
|
|
1387
|
+
continue;
|
|
1388
|
+
}
|
|
1389
|
+
values.push(current);
|
|
1390
|
+
current = "";
|
|
1391
|
+
inQuote = false;
|
|
1392
|
+
i++;
|
|
1393
|
+
while (i < valuesString.length && (valuesString[i] === "," || valuesString[i] === " ")) {
|
|
1394
|
+
i++;
|
|
1395
|
+
}
|
|
1396
|
+
continue;
|
|
1397
|
+
}
|
|
1398
|
+
if (inQuote) {
|
|
1399
|
+
current += char;
|
|
1400
|
+
}
|
|
1401
|
+
i++;
|
|
1402
|
+
}
|
|
1403
|
+
return values;
|
|
1404
|
+
}
|
|
1405
|
+
__name(parseEnumValues, "parseEnumValues");
|
|
1406
|
+
|
|
1407
|
+
// src/actions/pull/provider/postgresql.ts
|
|
1408
|
+
var import_factory3 = require("@zenstackhq/language/factory");
|
|
1409
|
+
var import_pg = require("pg");
|
|
1410
|
+
var pgTypnameToStandard = {
|
|
1411
|
+
int2: "smallint",
|
|
1412
|
+
int4: "integer",
|
|
1413
|
+
int8: "bigint",
|
|
1414
|
+
float4: "real",
|
|
1415
|
+
float8: "double precision",
|
|
1416
|
+
bool: "boolean",
|
|
1417
|
+
bpchar: "character",
|
|
1418
|
+
numeric: "decimal"
|
|
1419
|
+
};
|
|
1420
|
+
var standardTypePrecisions = {
|
|
1421
|
+
int2: 16,
|
|
1422
|
+
smallint: 16,
|
|
1423
|
+
int4: 32,
|
|
1424
|
+
integer: 32,
|
|
1425
|
+
int8: 64,
|
|
1426
|
+
bigint: 64,
|
|
1427
|
+
float4: 24,
|
|
1428
|
+
real: 24,
|
|
1429
|
+
float8: 53,
|
|
1430
|
+
"double precision": 53
|
|
1431
|
+
};
|
|
1432
|
+
var pgTypnameToZenStackNativeType = {
|
|
1433
|
+
// integers
|
|
1434
|
+
int2: "SmallInt",
|
|
1435
|
+
smallint: "SmallInt",
|
|
1436
|
+
int4: "Integer",
|
|
1437
|
+
integer: "Integer",
|
|
1438
|
+
int8: "BigInt",
|
|
1439
|
+
bigint: "BigInt",
|
|
1440
|
+
// decimals and floats
|
|
1441
|
+
numeric: "Decimal",
|
|
1442
|
+
decimal: "Decimal",
|
|
1443
|
+
float4: "Real",
|
|
1444
|
+
real: "Real",
|
|
1445
|
+
float8: "DoublePrecision",
|
|
1446
|
+
"double precision": "DoublePrecision",
|
|
1447
|
+
// boolean
|
|
1448
|
+
bool: "Boolean",
|
|
1449
|
+
boolean: "Boolean",
|
|
1450
|
+
// strings
|
|
1451
|
+
text: "Text",
|
|
1452
|
+
varchar: "VarChar",
|
|
1453
|
+
"character varying": "VarChar",
|
|
1454
|
+
bpchar: "Char",
|
|
1455
|
+
character: "Char",
|
|
1456
|
+
// uuid
|
|
1457
|
+
uuid: "Uuid",
|
|
1458
|
+
// dates/times
|
|
1459
|
+
date: "Date",
|
|
1460
|
+
time: "Time",
|
|
1461
|
+
timetz: "Timetz",
|
|
1462
|
+
timestamp: "Timestamp",
|
|
1463
|
+
timestamptz: "Timestamptz",
|
|
1464
|
+
// binary
|
|
1465
|
+
bytea: "ByteA",
|
|
1466
|
+
// json
|
|
1467
|
+
json: "Json",
|
|
1468
|
+
jsonb: "JsonB",
|
|
1469
|
+
// xml
|
|
1470
|
+
xml: "Xml",
|
|
1471
|
+
// network types
|
|
1472
|
+
inet: "Inet",
|
|
1473
|
+
// bit strings
|
|
1474
|
+
bit: "Bit",
|
|
1475
|
+
varbit: "VarBit",
|
|
1476
|
+
// oid
|
|
1477
|
+
oid: "Oid",
|
|
1478
|
+
// money
|
|
1479
|
+
money: "Money",
|
|
1480
|
+
// citext extension
|
|
1481
|
+
citext: "Citext"
|
|
1482
|
+
};
|
|
1483
|
+
var postgresql = {
|
|
1484
|
+
isSupportedFeature(feature) {
|
|
1485
|
+
const supportedFeatures = [
|
|
1486
|
+
"Schema",
|
|
1487
|
+
"NativeEnum"
|
|
1488
|
+
];
|
|
1489
|
+
return supportedFeatures.includes(feature);
|
|
1490
|
+
},
|
|
1491
|
+
getBuiltinType(type2) {
|
|
1492
|
+
const t = (type2 || "").toLowerCase();
|
|
1493
|
+
const isArray = t.startsWith("_");
|
|
1494
|
+
switch (t.replace(/^_/, "")) {
|
|
1495
|
+
// integers
|
|
1496
|
+
case "int2":
|
|
1497
|
+
case "smallint":
|
|
1498
|
+
case "int4":
|
|
1499
|
+
case "integer":
|
|
1500
|
+
return {
|
|
1501
|
+
type: "Int",
|
|
1502
|
+
isArray
|
|
1503
|
+
};
|
|
1504
|
+
case "int8":
|
|
1505
|
+
case "bigint":
|
|
1506
|
+
return {
|
|
1507
|
+
type: "BigInt",
|
|
1508
|
+
isArray
|
|
1509
|
+
};
|
|
1510
|
+
// decimals and floats
|
|
1511
|
+
case "numeric":
|
|
1512
|
+
case "decimal":
|
|
1513
|
+
return {
|
|
1514
|
+
type: "Decimal",
|
|
1515
|
+
isArray
|
|
1516
|
+
};
|
|
1517
|
+
case "float4":
|
|
1518
|
+
case "real":
|
|
1519
|
+
case "float8":
|
|
1520
|
+
case "double precision":
|
|
1521
|
+
return {
|
|
1522
|
+
type: "Float",
|
|
1523
|
+
isArray
|
|
1524
|
+
};
|
|
1525
|
+
// boolean
|
|
1526
|
+
case "bool":
|
|
1527
|
+
case "boolean":
|
|
1528
|
+
return {
|
|
1529
|
+
type: "Boolean",
|
|
1530
|
+
isArray
|
|
1531
|
+
};
|
|
1532
|
+
// strings
|
|
1533
|
+
case "text":
|
|
1534
|
+
case "varchar":
|
|
1535
|
+
case "bpchar":
|
|
1536
|
+
case "character varying":
|
|
1537
|
+
case "character":
|
|
1538
|
+
return {
|
|
1539
|
+
type: "String",
|
|
1540
|
+
isArray
|
|
1541
|
+
};
|
|
1542
|
+
// uuid
|
|
1543
|
+
case "uuid":
|
|
1544
|
+
return {
|
|
1545
|
+
type: "String",
|
|
1546
|
+
isArray
|
|
1547
|
+
};
|
|
1548
|
+
// dates/times
|
|
1549
|
+
case "date":
|
|
1550
|
+
case "time":
|
|
1551
|
+
case "timetz":
|
|
1552
|
+
case "timestamp":
|
|
1553
|
+
case "timestamptz":
|
|
1554
|
+
return {
|
|
1555
|
+
type: "DateTime",
|
|
1556
|
+
isArray
|
|
1557
|
+
};
|
|
1558
|
+
// binary
|
|
1559
|
+
case "bytea":
|
|
1560
|
+
return {
|
|
1561
|
+
type: "Bytes",
|
|
1562
|
+
isArray
|
|
1563
|
+
};
|
|
1564
|
+
// json
|
|
1565
|
+
case "json":
|
|
1566
|
+
case "jsonb":
|
|
1567
|
+
return {
|
|
1568
|
+
type: "Json",
|
|
1569
|
+
isArray
|
|
1570
|
+
};
|
|
1571
|
+
default:
|
|
1572
|
+
return {
|
|
1573
|
+
type: "Unsupported",
|
|
1574
|
+
isArray
|
|
1575
|
+
};
|
|
1576
|
+
}
|
|
1577
|
+
},
|
|
1578
|
+
async introspect(connectionString, options) {
|
|
1579
|
+
const client = new import_pg.Client({
|
|
1580
|
+
connectionString
|
|
1581
|
+
});
|
|
1582
|
+
await client.connect();
|
|
1583
|
+
try {
|
|
1584
|
+
const { rows: tables } = await client.query(tableIntrospectionQuery);
|
|
1585
|
+
const { rows: enums } = await client.query(enumIntrospectionQuery);
|
|
1586
|
+
const filteredTables = tables.filter((t) => options.schemas.includes(t.schema));
|
|
1587
|
+
const filteredEnums = enums.filter((e) => options.schemas.includes(e.schema_name));
|
|
1588
|
+
return {
|
|
1589
|
+
enums: filteredEnums,
|
|
1590
|
+
tables: filteredTables
|
|
1591
|
+
};
|
|
1592
|
+
} finally {
|
|
1593
|
+
await client.end();
|
|
1594
|
+
}
|
|
1595
|
+
},
|
|
1596
|
+
getDefaultDatabaseType(type2) {
|
|
1597
|
+
switch (type2) {
|
|
1598
|
+
case "String":
|
|
1599
|
+
return {
|
|
1600
|
+
type: "text"
|
|
1601
|
+
};
|
|
1602
|
+
case "Boolean":
|
|
1603
|
+
return {
|
|
1604
|
+
type: "boolean"
|
|
1605
|
+
};
|
|
1606
|
+
case "Int":
|
|
1607
|
+
return {
|
|
1608
|
+
type: "integer"
|
|
1609
|
+
};
|
|
1610
|
+
case "BigInt":
|
|
1611
|
+
return {
|
|
1612
|
+
type: "bigint"
|
|
1613
|
+
};
|
|
1614
|
+
case "Float":
|
|
1615
|
+
return {
|
|
1616
|
+
type: "double precision"
|
|
1617
|
+
};
|
|
1618
|
+
case "Decimal":
|
|
1619
|
+
return {
|
|
1620
|
+
type: "decimal"
|
|
1621
|
+
};
|
|
1622
|
+
case "DateTime":
|
|
1623
|
+
return {
|
|
1624
|
+
type: "timestamp",
|
|
1625
|
+
precision: 3
|
|
1626
|
+
};
|
|
1627
|
+
case "Json":
|
|
1628
|
+
return {
|
|
1629
|
+
type: "jsonb"
|
|
1630
|
+
};
|
|
1631
|
+
case "Bytes":
|
|
1632
|
+
return {
|
|
1633
|
+
type: "bytea"
|
|
1634
|
+
};
|
|
1635
|
+
}
|
|
1636
|
+
},
|
|
1637
|
+
getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
|
|
1638
|
+
const val = defaultValue.trim();
|
|
1639
|
+
if (datatype === "enum" && datatype_name) {
|
|
1640
|
+
const enumDef = enums.find((e) => getDbName(e) === datatype_name);
|
|
1641
|
+
if (enumDef) {
|
|
1642
|
+
const enumValue = val.replace(/'/g, "").split("::")[0]?.trim();
|
|
1643
|
+
const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
|
|
1644
|
+
if (enumField) {
|
|
1645
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
1646
|
+
}
|
|
1647
|
+
}
|
|
1648
|
+
return typeCastingConvert({
|
|
1649
|
+
defaultValue,
|
|
1650
|
+
enums,
|
|
1651
|
+
val,
|
|
1652
|
+
services
|
|
1653
|
+
});
|
|
1654
|
+
}
|
|
1655
|
+
switch (fieldType) {
|
|
1656
|
+
case "DateTime":
|
|
1657
|
+
if (val === "CURRENT_TIMESTAMP" || val === "now()") {
|
|
1658
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
1659
|
+
}
|
|
1660
|
+
if (val.includes("::")) {
|
|
1661
|
+
return typeCastingConvert({
|
|
1662
|
+
defaultValue,
|
|
1663
|
+
enums,
|
|
1664
|
+
val,
|
|
1665
|
+
services
|
|
1666
|
+
});
|
|
1667
|
+
}
|
|
1668
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1669
|
+
case "Int":
|
|
1670
|
+
case "BigInt":
|
|
1671
|
+
if (val.startsWith("nextval(")) {
|
|
1672
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
1673
|
+
}
|
|
1674
|
+
if (val.includes("::")) {
|
|
1675
|
+
return typeCastingConvert({
|
|
1676
|
+
defaultValue,
|
|
1677
|
+
enums,
|
|
1678
|
+
val,
|
|
1679
|
+
services
|
|
1680
|
+
});
|
|
1681
|
+
}
|
|
1682
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
1683
|
+
case "Float":
|
|
1684
|
+
if (val.includes("::")) {
|
|
1685
|
+
return typeCastingConvert({
|
|
1686
|
+
defaultValue,
|
|
1687
|
+
enums,
|
|
1688
|
+
val,
|
|
1689
|
+
services
|
|
1690
|
+
});
|
|
1691
|
+
}
|
|
1692
|
+
return normalizeFloatDefault(val);
|
|
1693
|
+
case "Decimal":
|
|
1694
|
+
if (val.includes("::")) {
|
|
1695
|
+
return typeCastingConvert({
|
|
1696
|
+
defaultValue,
|
|
1697
|
+
enums,
|
|
1698
|
+
val,
|
|
1699
|
+
services
|
|
1700
|
+
});
|
|
1701
|
+
}
|
|
1702
|
+
return normalizeDecimalDefault(val);
|
|
1703
|
+
case "Boolean":
|
|
1704
|
+
return (ab) => ab.BooleanLiteral.setValue(val === "true");
|
|
1705
|
+
case "String":
|
|
1706
|
+
if (val.includes("::")) {
|
|
1707
|
+
return typeCastingConvert({
|
|
1708
|
+
defaultValue,
|
|
1709
|
+
enums,
|
|
1710
|
+
val,
|
|
1711
|
+
services
|
|
1712
|
+
});
|
|
1713
|
+
}
|
|
1714
|
+
if (val.startsWith("'") && val.endsWith("'")) {
|
|
1715
|
+
return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'"));
|
|
1716
|
+
}
|
|
1717
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1718
|
+
case "Json":
|
|
1719
|
+
if (val.includes("::")) {
|
|
1720
|
+
return typeCastingConvert({
|
|
1721
|
+
defaultValue,
|
|
1722
|
+
enums,
|
|
1723
|
+
val,
|
|
1724
|
+
services
|
|
1725
|
+
});
|
|
1726
|
+
}
|
|
1727
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1728
|
+
case "Bytes":
|
|
1729
|
+
if (val.includes("::")) {
|
|
1730
|
+
return typeCastingConvert({
|
|
1731
|
+
defaultValue,
|
|
1732
|
+
enums,
|
|
1733
|
+
val,
|
|
1734
|
+
services
|
|
1735
|
+
});
|
|
1736
|
+
}
|
|
1737
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1738
|
+
}
|
|
1739
|
+
if (val.includes("(") && val.includes(")")) {
|
|
1740
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
1741
|
+
}
|
|
1742
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
1743
|
+
return null;
|
|
1744
|
+
},
|
|
1745
|
+
getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
|
|
1746
|
+
const factories = [];
|
|
1747
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
1748
|
+
factories.push(new import_factory3.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
|
|
1749
|
+
}
|
|
1750
|
+
const nativeTypeName = pgTypnameToZenStackNativeType[datatype.toLowerCase()] ?? datatype;
|
|
1751
|
+
const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${nativeTypeName.toLowerCase()}`)?.node;
|
|
1752
|
+
const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
|
|
1753
|
+
const normalizedDatatype = pgTypnameToStandard[datatype.toLowerCase()] ?? datatype.toLowerCase();
|
|
1754
|
+
const standardPrecision = standardTypePrecisions[datatype.toLowerCase()];
|
|
1755
|
+
const isStandardPrecision = standardPrecision !== void 0 && precision === standardPrecision;
|
|
1756
|
+
if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== normalizedDatatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
|
|
1757
|
+
const dbAttrFactory = new import_factory3.DataFieldAttributeFactory().setDecl(dbAttr);
|
|
1758
|
+
if ((length || precision) && !isStandardPrecision) {
|
|
1759
|
+
dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length || precision));
|
|
1760
|
+
}
|
|
1761
|
+
factories.push(dbAttrFactory);
|
|
1762
|
+
}
|
|
1763
|
+
return factories;
|
|
1764
|
+
}
|
|
1765
|
+
};
|
|
1766
|
+
var enumIntrospectionQuery = `
|
|
1767
|
+
SELECT
|
|
1768
|
+
n.nspname AS schema_name, -- schema the enum belongs to (e.g., 'public')
|
|
1769
|
+
t.typname AS enum_type, -- enum type name as defined in CREATE TYPE
|
|
1770
|
+
coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -- ordered list of enum labels as JSON array
|
|
1771
|
+
FROM pg_type t -- pg_type: catalog of all data types
|
|
1772
|
+
JOIN pg_enum e ON t.oid = e.enumtypid -- pg_enum: one row per enum label; join to get labels for this enum type
|
|
1773
|
+
JOIN pg_namespace n ON n.oid = t.typnamespace -- pg_namespace: schema info; join to get the schema name
|
|
1774
|
+
GROUP BY schema_name, enum_type -- one row per enum type, with all labels aggregated
|
|
1775
|
+
ORDER BY schema_name, enum_type;`;
|
|
1776
|
+
var tableIntrospectionQuery = `
|
|
1777
|
+
-- Main query: one row per table/view with columns and indexes as nested JSON arrays.
|
|
1778
|
+
-- Joins pg_class (tables/views) with pg_namespace (schemas).
|
|
1779
|
+
SELECT
|
|
1780
|
+
"ns"."nspname" AS "schema", -- schema name (e.g., 'public')
|
|
1781
|
+
"cls"."relname" AS "name", -- table or view name
|
|
1782
|
+
CASE "cls"."relkind" -- relkind: 'r' = ordinary table, 'v' = view
|
|
1783
|
+
WHEN 'r' THEN 'table'
|
|
1784
|
+
WHEN 'v' THEN 'view'
|
|
1785
|
+
ELSE NULL
|
|
1786
|
+
END AS "type",
|
|
1787
|
+
CASE -- for views, retrieve the SQL definition
|
|
1788
|
+
WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true)
|
|
1789
|
+
ELSE NULL
|
|
1790
|
+
END AS "definition",
|
|
1791
|
+
|
|
1792
|
+
-- ===== COLUMNS subquery =====
|
|
1793
|
+
-- Aggregates all columns for this table into a JSON array.
|
|
1794
|
+
(
|
|
1795
|
+
SELECT coalesce(json_agg(agg), '[]')
|
|
1796
|
+
FROM (
|
|
1797
|
+
SELECT
|
|
1798
|
+
"att"."attname" AS "name", -- column name
|
|
1799
|
+
|
|
1800
|
+
-- datatype: if the type is an enum, report 'enum';
|
|
1801
|
+
-- if the column is generated/computed, construct the full DDL-like type definition
|
|
1802
|
+
-- (e.g., "text GENERATED ALWAYS AS (expr) STORED") so it can be rendered as Unsupported("...");
|
|
1803
|
+
-- otherwise use the pg_type name.
|
|
1804
|
+
CASE
|
|
1805
|
+
WHEN EXISTS (
|
|
1806
|
+
SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
|
|
1807
|
+
WHERE "e"."enumtypid" = "typ"."oid"
|
|
1808
|
+
) THEN 'enum'
|
|
1809
|
+
WHEN "att"."attgenerated" != '' THEN
|
|
1810
|
+
format_type("att"."atttypid", "att"."atttypmod")
|
|
1811
|
+
|| ' GENERATED ALWAYS AS ('
|
|
1812
|
+
|| pg_get_expr("def"."adbin", "def"."adrelid")
|
|
1813
|
+
|| ') '
|
|
1814
|
+
|| CASE "att"."attgenerated"
|
|
1815
|
+
WHEN 's' THEN 'STORED'
|
|
1816
|
+
WHEN 'v' THEN 'VIRTUAL'
|
|
1817
|
+
ELSE 'STORED'
|
|
1818
|
+
END
|
|
1819
|
+
ELSE "typ"."typname"::text -- internal type name (e.g., 'int4', 'varchar', 'text'); cast to text to prevent CASE from coercing result to name type (max 63 chars)
|
|
1820
|
+
END AS "datatype",
|
|
1821
|
+
|
|
1822
|
+
-- datatype_name: for enums only, the actual enum type name (used to look up the enum definition)
|
|
1823
|
+
CASE
|
|
1824
|
+
WHEN EXISTS (
|
|
1825
|
+
SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
|
|
1826
|
+
WHERE "e"."enumtypid" = "typ"."oid"
|
|
1827
|
+
) THEN "typ"."typname"
|
|
1828
|
+
ELSE NULL
|
|
1829
|
+
END AS "datatype_name",
|
|
1830
|
+
|
|
1831
|
+
"tns"."nspname" AS "datatype_schema", -- schema where the data type is defined
|
|
1832
|
+
"c"."character_maximum_length" AS "length", -- max length for char/varchar types (from information_schema)
|
|
1833
|
+
COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", -- numeric or datetime precision
|
|
1834
|
+
|
|
1835
|
+
-- Foreign key info (NULL if column is not part of a FK constraint)
|
|
1836
|
+
"fk_ns"."nspname" AS "foreign_key_schema", -- schema of the referenced table
|
|
1837
|
+
"fk_cls"."relname" AS "foreign_key_table", -- referenced table name
|
|
1838
|
+
"fk_att"."attname" AS "foreign_key_column", -- referenced column name
|
|
1839
|
+
"fk_con"."conname" AS "foreign_key_name", -- FK constraint name
|
|
1840
|
+
|
|
1841
|
+
-- FK referential actions: decode single-char codes to human-readable strings
|
|
1842
|
+
CASE "fk_con"."confupdtype"
|
|
1843
|
+
WHEN 'a' THEN 'NO ACTION'
|
|
1844
|
+
WHEN 'r' THEN 'RESTRICT'
|
|
1845
|
+
WHEN 'c' THEN 'CASCADE'
|
|
1846
|
+
WHEN 'n' THEN 'SET NULL'
|
|
1847
|
+
WHEN 'd' THEN 'SET DEFAULT'
|
|
1848
|
+
ELSE NULL
|
|
1849
|
+
END AS "foreign_key_on_update",
|
|
1850
|
+
CASE "fk_con"."confdeltype"
|
|
1851
|
+
WHEN 'a' THEN 'NO ACTION'
|
|
1852
|
+
WHEN 'r' THEN 'RESTRICT'
|
|
1853
|
+
WHEN 'c' THEN 'CASCADE'
|
|
1854
|
+
WHEN 'n' THEN 'SET NULL'
|
|
1855
|
+
WHEN 'd' THEN 'SET DEFAULT'
|
|
1856
|
+
ELSE NULL
|
|
1857
|
+
END AS "foreign_key_on_delete",
|
|
1858
|
+
|
|
1859
|
+
-- pk: true if this column is part of the table's primary key constraint
|
|
1860
|
+
"pk_con"."conkey" IS NOT NULL AS "pk",
|
|
1861
|
+
|
|
1862
|
+
-- unique: true if the column has a single-column UNIQUE constraint OR a single-column unique index
|
|
1863
|
+
(
|
|
1864
|
+
-- Check for a single-column UNIQUE constraint (contype = 'u')
|
|
1865
|
+
EXISTS (
|
|
1866
|
+
SELECT 1
|
|
1867
|
+
FROM "pg_catalog"."pg_constraint" AS "u_con"
|
|
1868
|
+
WHERE "u_con"."contype" = 'u' -- 'u' = unique constraint
|
|
1869
|
+
AND "u_con"."conrelid" = "cls"."oid" -- on this table
|
|
1870
|
+
AND array_length("u_con"."conkey", 1) = 1 -- single-column only
|
|
1871
|
+
AND "att"."attnum" = ANY ("u_con"."conkey") -- this column is in the constraint
|
|
1872
|
+
)
|
|
1873
|
+
OR
|
|
1874
|
+
-- Check for a single-column unique index (may exist without an explicit constraint)
|
|
1875
|
+
EXISTS (
|
|
1876
|
+
SELECT 1
|
|
1877
|
+
FROM "pg_catalog"."pg_index" AS "u_idx"
|
|
1878
|
+
WHERE "u_idx"."indrelid" = "cls"."oid" -- on this table
|
|
1879
|
+
AND "u_idx"."indisunique" = TRUE -- it's a unique index
|
|
1880
|
+
AND "u_idx"."indnkeyatts" = 1 -- single key column
|
|
1881
|
+
AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) -- this column is the key
|
|
1882
|
+
)
|
|
1883
|
+
) AS "unique",
|
|
1884
|
+
|
|
1885
|
+
-- unique_name: the name of the unique constraint or index (whichever exists first)
|
|
1886
|
+
(
|
|
1887
|
+
SELECT COALESCE(
|
|
1888
|
+
-- Try constraint name first
|
|
1889
|
+
(
|
|
1890
|
+
SELECT "u_con"."conname"
|
|
1891
|
+
FROM "pg_catalog"."pg_constraint" AS "u_con"
|
|
1892
|
+
WHERE "u_con"."contype" = 'u'
|
|
1893
|
+
AND "u_con"."conrelid" = "cls"."oid"
|
|
1894
|
+
AND array_length("u_con"."conkey", 1) = 1
|
|
1895
|
+
AND "att"."attnum" = ANY ("u_con"."conkey")
|
|
1896
|
+
LIMIT 1
|
|
1897
|
+
),
|
|
1898
|
+
-- Fall back to unique index name
|
|
1899
|
+
(
|
|
1900
|
+
SELECT "u_idx_cls"."relname"
|
|
1901
|
+
FROM "pg_catalog"."pg_index" AS "u_idx"
|
|
1902
|
+
JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid"
|
|
1903
|
+
WHERE "u_idx"."indrelid" = "cls"."oid"
|
|
1904
|
+
AND "u_idx"."indisunique" = TRUE
|
|
1905
|
+
AND "u_idx"."indnkeyatts" = 1
|
|
1906
|
+
AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[])
|
|
1907
|
+
LIMIT 1
|
|
1908
|
+
)
|
|
1909
|
+
)
|
|
1910
|
+
) AS "unique_name",
|
|
1911
|
+
|
|
1912
|
+
"att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column
|
|
1913
|
+
-- For generated columns, pg_attrdef stores the generation expression (not a default),
|
|
1914
|
+
-- so we must null it out to avoid emitting a spurious @default(dbgenerated(...)) attribute.
|
|
1915
|
+
CASE
|
|
1916
|
+
WHEN "att"."attgenerated" != '' THEN NULL
|
|
1917
|
+
ELSE pg_get_expr("def"."adbin", "def"."adrelid")
|
|
1918
|
+
END AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()')
|
|
1919
|
+
"att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values
|
|
1920
|
+
|
|
1921
|
+
-- options: for enum columns, aggregates all allowed enum labels into a JSON array
|
|
1922
|
+
coalesce(
|
|
1923
|
+
(
|
|
1924
|
+
SELECT json_agg("enm"."enumlabel") AS "o"
|
|
1925
|
+
FROM "pg_catalog"."pg_enum" AS "enm"
|
|
1926
|
+
WHERE "enm"."enumtypid" = "typ"."oid"
|
|
1927
|
+
),
|
|
1928
|
+
'[]'
|
|
1929
|
+
) AS "options"
|
|
1930
|
+
|
|
1931
|
+
-- === FROM / JOINs for the columns subquery ===
|
|
1932
|
+
|
|
1933
|
+
-- pg_attribute: one row per table column (attnum >= 0 excludes system columns)
|
|
1934
|
+
FROM "pg_catalog"."pg_attribute" AS "att"
|
|
1935
|
+
|
|
1936
|
+
-- pg_type: data type of the column (e.g., int4, text, custom_enum)
|
|
1937
|
+
INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid"
|
|
1938
|
+
|
|
1939
|
+
-- pg_namespace for the type: needed to determine which schema the type lives in
|
|
1940
|
+
INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace"
|
|
1941
|
+
|
|
1942
|
+
-- information_schema.columns: provides length/precision info not easily available from pg_catalog
|
|
1943
|
+
LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname"
|
|
1944
|
+
AND "c"."table_name" = "cls"."relname"
|
|
1945
|
+
AND "c"."column_name" = "att"."attname"
|
|
1946
|
+
|
|
1947
|
+
-- pg_constraint (primary key): join on contype='p' to detect if column is part of PK
|
|
1948
|
+
LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p'
|
|
1949
|
+
AND "pk_con"."conrelid" = "cls"."oid"
|
|
1950
|
+
AND "att"."attnum" = ANY ("pk_con"."conkey")
|
|
1951
|
+
|
|
1952
|
+
-- pg_constraint (foreign key): join on contype='f' to get FK details for this column
|
|
1953
|
+
LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f'
|
|
1954
|
+
AND "fk_con"."conrelid" = "cls"."oid"
|
|
1955
|
+
AND "att"."attnum" = ANY ("fk_con"."conkey")
|
|
1956
|
+
|
|
1957
|
+
-- pg_class for FK target table: resolve the referenced table's OID to its name
|
|
1958
|
+
LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid"
|
|
1959
|
+
|
|
1960
|
+
-- pg_namespace for FK target: get the schema of the referenced table
|
|
1961
|
+
LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace"
|
|
1962
|
+
|
|
1963
|
+
-- pg_attribute for FK target column: resolve the referenced column number to its name.
|
|
1964
|
+
-- Use array_position to correlate by position: find this source column's index in conkey,
|
|
1965
|
+
-- then pick the referenced attnum at that same index from confkey.
|
|
1966
|
+
-- This ensures composite FKs correctly map each source column to its corresponding target column.
|
|
1967
|
+
LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid"
|
|
1968
|
+
AND "fk_att"."attnum" = "fk_con"."confkey"[array_position("fk_con"."conkey", "att"."attnum")]
|
|
1969
|
+
|
|
1970
|
+
-- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr()
|
|
1971
|
+
LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum"
|
|
1972
|
+
|
|
1973
|
+
WHERE
|
|
1974
|
+
"att"."attrelid" = "cls"."oid" -- only columns belonging to this table
|
|
1975
|
+
AND "att"."attnum" >= 0 -- exclude system columns (ctid, xmin, etc. have attnum < 0)
|
|
1976
|
+
AND "att"."attisdropped" != TRUE -- exclude dropped (deleted) columns
|
|
1977
|
+
ORDER BY "att"."attnum" -- preserve original column order
|
|
1978
|
+
) AS agg
|
|
1979
|
+
) AS "columns",
|
|
1980
|
+
|
|
1981
|
+
-- ===== INDEXES subquery =====
|
|
1982
|
+
-- Aggregates all indexes for this table into a JSON array.
|
|
1983
|
+
(
|
|
1984
|
+
SELECT coalesce(json_agg(agg), '[]')
|
|
1985
|
+
FROM (
|
|
1986
|
+
SELECT
|
|
1987
|
+
"idx_cls"."relname" AS "name", -- index name
|
|
1988
|
+
"am"."amname" AS "method", -- access method (e.g., 'btree', 'hash', 'gin', 'gist')
|
|
1989
|
+
"idx"."indisunique" AS "unique", -- true if unique index
|
|
1990
|
+
"idx"."indisprimary" AS "primary", -- true if this is the PK index
|
|
1991
|
+
"idx"."indisvalid" AS "valid", -- false during concurrent index builds
|
|
1992
|
+
"idx"."indisready" AS "ready", -- true when index is ready for inserts
|
|
1993
|
+
("idx"."indpred" IS NOT NULL) AS "partial", -- true if index has a WHERE clause (partial index)
|
|
1994
|
+
pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", -- the WHERE clause expression for partial indexes
|
|
1995
|
+
|
|
1996
|
+
-- Index columns: iterate over each position in the index key array
|
|
1997
|
+
(
|
|
1998
|
+
SELECT json_agg(
|
|
1999
|
+
json_build_object(
|
|
2000
|
+
-- 'name': column name, or for expression indexes the expression text
|
|
2001
|
+
'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)),
|
|
2002
|
+
-- 'expression': non-null only for expression-based index columns (e.g., lower(name))
|
|
2003
|
+
'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END,
|
|
2004
|
+
-- 'order': sort direction; bit 0 of indoption = 1 means DESC
|
|
2005
|
+
'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END,
|
|
2006
|
+
-- 'nulls': null ordering; bit 1 of indoption = 1 means NULLS FIRST
|
|
2007
|
+
'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END
|
|
2008
|
+
)
|
|
2009
|
+
ORDER BY "s"."i" -- preserve column order within the index
|
|
2010
|
+
)
|
|
2011
|
+
-- generate_subscripts creates one row per index key position (1-based)
|
|
2012
|
+
FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i")
|
|
2013
|
+
-- Join to pg_attribute to resolve column numbers to names
|
|
2014
|
+
-- NULL attname means it's an expression index column
|
|
2015
|
+
LEFT JOIN "pg_catalog"."pg_attribute" AS "att"
|
|
2016
|
+
ON "att"."attrelid" = "cls"."oid"
|
|
2017
|
+
AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"]
|
|
2018
|
+
) AS "columns"
|
|
2019
|
+
|
|
2020
|
+
FROM "pg_catalog"."pg_index" AS "idx" -- pg_index: one row per index
|
|
2021
|
+
JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" -- index's own pg_class entry (for the name)
|
|
2022
|
+
JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" -- access method catalog
|
|
2023
|
+
WHERE "idx"."indrelid" = "cls"."oid" -- only indexes on this table
|
|
2024
|
+
ORDER BY "idx_cls"."relname"
|
|
2025
|
+
) AS agg
|
|
2026
|
+
) AS "indexes"
|
|
2027
|
+
|
|
2028
|
+
-- === Main FROM: pg_class (tables and views) joined with pg_namespace (schemas) ===
|
|
2029
|
+
FROM "pg_catalog"."pg_class" AS "cls"
|
|
2030
|
+
INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid"
|
|
2031
|
+
WHERE
|
|
2032
|
+
"ns"."nspname" !~ '^pg_' -- exclude PostgreSQL internal schemas (pg_catalog, pg_toast, etc.)
|
|
2033
|
+
AND "ns"."nspname" != 'information_schema' -- exclude the information_schema
|
|
2034
|
+
AND "cls"."relkind" IN ('r', 'v') -- only tables ('r') and views ('v')
|
|
2035
|
+
AND "cls"."relname" !~ '^pg_' -- exclude system tables starting with pg_
|
|
2036
|
+
AND "cls"."relname" !~ '_prisma_migrations' -- exclude Prisma migration tracking table
|
|
2037
|
+
ORDER BY "ns"."nspname", "cls"."relname" ASC;
|
|
2038
|
+
`;
|
|
2039
|
+
function typeCastingConvert({ defaultValue, enums, val, services }) {
|
|
2040
|
+
const [value, type2] = val.replace(/'/g, "").split("::").map((s) => s.trim());
|
|
2041
|
+
switch (type2) {
|
|
2042
|
+
case "character varying":
|
|
2043
|
+
case "uuid":
|
|
2044
|
+
case "json":
|
|
2045
|
+
case "jsonb":
|
|
2046
|
+
case "text":
|
|
2047
|
+
if (value === "NULL") return null;
|
|
2048
|
+
return (ab) => ab.StringLiteral.setValue(value);
|
|
2049
|
+
case "real":
|
|
2050
|
+
return (ab) => ab.NumberLiteral.setValue(value);
|
|
2051
|
+
default: {
|
|
2052
|
+
const enumDef = enums.find((e) => getDbName(e, true) === type2);
|
|
2053
|
+
if (!enumDef) {
|
|
2054
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
2055
|
+
}
|
|
2056
|
+
const enumField = enumDef.fields.find((v) => getDbName(v) === value);
|
|
2057
|
+
if (!enumField) {
|
|
2058
|
+
throw new CliError(`Enum value ${value} not found in enum ${type2} for default value ${defaultValue}`);
|
|
2059
|
+
}
|
|
2060
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
2061
|
+
}
|
|
2062
|
+
}
|
|
2063
|
+
}
|
|
2064
|
+
__name(typeCastingConvert, "typeCastingConvert");
|
|
2065
|
+
|
|
2066
|
+
// src/actions/pull/provider/sqlite.ts
|
|
2067
|
+
var import_factory4 = require("@zenstackhq/language/factory");
|
|
2068
|
+
var sqlite = {
|
|
2069
|
+
isSupportedFeature(feature) {
|
|
2070
|
+
switch (feature) {
|
|
2071
|
+
case "Schema":
|
|
2072
|
+
return false;
|
|
2073
|
+
case "NativeEnum":
|
|
2074
|
+
return false;
|
|
2075
|
+
default:
|
|
2076
|
+
return false;
|
|
2077
|
+
}
|
|
2078
|
+
},
|
|
2079
|
+
getBuiltinType(type2) {
|
|
2080
|
+
const t = (type2 || "").toLowerCase().trim().replace(/\(.*\)$/, "").trim();
|
|
2081
|
+
const isArray = false;
|
|
2082
|
+
switch (t) {
|
|
2083
|
+
// INTEGER types (SQLite: INT, INTEGER, TINYINT, SMALLINT, MEDIUMINT, INT2, INT8)
|
|
2084
|
+
case "integer":
|
|
2085
|
+
case "int":
|
|
2086
|
+
case "tinyint":
|
|
2087
|
+
case "smallint":
|
|
2088
|
+
case "mediumint":
|
|
2089
|
+
case "int2":
|
|
2090
|
+
case "int8":
|
|
2091
|
+
return {
|
|
2092
|
+
type: "Int",
|
|
2093
|
+
isArray
|
|
2094
|
+
};
|
|
2095
|
+
// BIGINT - map to BigInt for large integers
|
|
2096
|
+
case "bigint":
|
|
2097
|
+
case "unsigned big int":
|
|
2098
|
+
return {
|
|
2099
|
+
type: "BigInt",
|
|
2100
|
+
isArray
|
|
2101
|
+
};
|
|
2102
|
+
// TEXT types (SQLite: CHARACTER, VARCHAR, VARYING CHARACTER, NCHAR, NATIVE CHARACTER, NVARCHAR, TEXT, CLOB)
|
|
2103
|
+
case "text":
|
|
2104
|
+
case "varchar":
|
|
2105
|
+
case "char":
|
|
2106
|
+
case "character":
|
|
2107
|
+
case "varying character":
|
|
2108
|
+
case "nchar":
|
|
2109
|
+
case "native character":
|
|
2110
|
+
case "nvarchar":
|
|
2111
|
+
case "clob":
|
|
2112
|
+
return {
|
|
2113
|
+
type: "String",
|
|
2114
|
+
isArray
|
|
2115
|
+
};
|
|
2116
|
+
// BLOB type
|
|
2117
|
+
case "blob":
|
|
2118
|
+
return {
|
|
2119
|
+
type: "Bytes",
|
|
2120
|
+
isArray
|
|
2121
|
+
};
|
|
2122
|
+
// REAL types (SQLite: REAL, DOUBLE, DOUBLE PRECISION, FLOAT)
|
|
2123
|
+
case "real":
|
|
2124
|
+
case "float":
|
|
2125
|
+
case "double":
|
|
2126
|
+
case "double precision":
|
|
2127
|
+
return {
|
|
2128
|
+
type: "Float",
|
|
2129
|
+
isArray
|
|
2130
|
+
};
|
|
2131
|
+
// NUMERIC types (SQLite: NUMERIC, DECIMAL)
|
|
2132
|
+
case "numeric":
|
|
2133
|
+
case "decimal":
|
|
2134
|
+
return {
|
|
2135
|
+
type: "Decimal",
|
|
2136
|
+
isArray
|
|
2137
|
+
};
|
|
2138
|
+
// DateTime types
|
|
2139
|
+
case "datetime":
|
|
2140
|
+
case "date":
|
|
2141
|
+
case "time":
|
|
2142
|
+
case "timestamp":
|
|
2143
|
+
return {
|
|
2144
|
+
type: "DateTime",
|
|
2145
|
+
isArray
|
|
2146
|
+
};
|
|
2147
|
+
// JSON types
|
|
2148
|
+
case "json":
|
|
2149
|
+
case "jsonb":
|
|
2150
|
+
return {
|
|
2151
|
+
type: "Json",
|
|
2152
|
+
isArray
|
|
2153
|
+
};
|
|
2154
|
+
// Boolean types
|
|
2155
|
+
case "boolean":
|
|
2156
|
+
case "bool":
|
|
2157
|
+
return {
|
|
2158
|
+
type: "Boolean",
|
|
2159
|
+
isArray
|
|
2160
|
+
};
|
|
2161
|
+
default: {
|
|
2162
|
+
if (!t) {
|
|
2163
|
+
return {
|
|
2164
|
+
type: "Bytes",
|
|
2165
|
+
isArray
|
|
2166
|
+
};
|
|
2167
|
+
}
|
|
2168
|
+
if (t.includes("int")) {
|
|
2169
|
+
return {
|
|
2170
|
+
type: "Int",
|
|
2171
|
+
isArray
|
|
2172
|
+
};
|
|
2173
|
+
}
|
|
2174
|
+
if (t.includes("char") || t.includes("clob") || t.includes("text")) {
|
|
2175
|
+
return {
|
|
2176
|
+
type: "String",
|
|
2177
|
+
isArray
|
|
2178
|
+
};
|
|
2179
|
+
}
|
|
2180
|
+
if (t.includes("blob")) {
|
|
2181
|
+
return {
|
|
2182
|
+
type: "Bytes",
|
|
2183
|
+
isArray
|
|
2184
|
+
};
|
|
2185
|
+
}
|
|
2186
|
+
if (t.includes("real") || t.includes("floa") || t.includes("doub")) {
|
|
2187
|
+
return {
|
|
2188
|
+
type: "Float",
|
|
2189
|
+
isArray
|
|
2190
|
+
};
|
|
2191
|
+
}
|
|
2192
|
+
return {
|
|
2193
|
+
type: "Unsupported",
|
|
2194
|
+
isArray
|
|
2195
|
+
};
|
|
2196
|
+
}
|
|
2197
|
+
}
|
|
2198
|
+
},
|
|
2199
|
+
getDefaultDatabaseType() {
|
|
2200
|
+
return void 0;
|
|
2201
|
+
},
|
|
2202
|
+
async introspect(connectionString, _options) {
|
|
2203
|
+
const SQLite2 = (await import("better-sqlite3")).default;
|
|
2204
|
+
const db = new SQLite2(connectionString, {
|
|
2205
|
+
readonly: true
|
|
2206
|
+
});
|
|
2207
|
+
try {
|
|
2208
|
+
const all = /* @__PURE__ */ __name((sql) => {
|
|
2209
|
+
const stmt = db.prepare(sql);
|
|
2210
|
+
return stmt.all();
|
|
2211
|
+
}, "all");
|
|
2212
|
+
const tablesRaw = all("SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name");
|
|
2213
|
+
const autoIncrementTables = /* @__PURE__ */ new Set();
|
|
2214
|
+
for (const t of tablesRaw) {
|
|
2215
|
+
if (t.type === "table" && t.definition) {
|
|
2216
|
+
if (/\bAUTOINCREMENT\b/i.test(t.definition)) {
|
|
2217
|
+
autoIncrementTables.add(t.name);
|
|
2218
|
+
}
|
|
2219
|
+
}
|
|
2220
|
+
}
|
|
2221
|
+
const tables = [];
|
|
2222
|
+
for (const t of tablesRaw) {
|
|
2223
|
+
const tableName = t.name;
|
|
2224
|
+
const schema = "";
|
|
2225
|
+
const hasAutoIncrement = autoIncrementTables.has(tableName);
|
|
2226
|
+
const columnsInfo = all(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`);
|
|
2227
|
+
const tableNameEsc = tableName.replace(/'/g, "''");
|
|
2228
|
+
const idxList = all(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith("sqlite_autoindex_"));
|
|
2229
|
+
const uniqueSingleColumn = /* @__PURE__ */ new Set();
|
|
2230
|
+
const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1);
|
|
2231
|
+
for (const idx of uniqueIndexRows) {
|
|
2232
|
+
const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
|
|
2233
|
+
if (idxCols.length === 1 && idxCols[0]?.name) {
|
|
2234
|
+
uniqueSingleColumn.add(idxCols[0].name);
|
|
2235
|
+
}
|
|
2236
|
+
}
|
|
2237
|
+
const indexes = idxList.map((idx) => {
|
|
2238
|
+
const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
|
|
2239
|
+
return {
|
|
2240
|
+
name: idx.name,
|
|
2241
|
+
method: null,
|
|
2242
|
+
unique: idx.unique === 1,
|
|
2243
|
+
primary: false,
|
|
2244
|
+
valid: true,
|
|
2245
|
+
ready: true,
|
|
2246
|
+
partial: idx.partial === 1,
|
|
2247
|
+
predicate: idx.partial === 1 ? "[partial]" : null,
|
|
2248
|
+
columns: idxCols.map((col) => ({
|
|
2249
|
+
name: col.name,
|
|
2250
|
+
expression: null,
|
|
2251
|
+
order: null,
|
|
2252
|
+
nulls: null
|
|
2253
|
+
}))
|
|
2254
|
+
};
|
|
2255
|
+
});
|
|
2256
|
+
const fkRows = all(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`);
|
|
2257
|
+
const fkConstraintNames = /* @__PURE__ */ new Map();
|
|
2258
|
+
if (t.definition) {
|
|
2259
|
+
const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(([^)]+)\)/gi;
|
|
2260
|
+
let match;
|
|
2261
|
+
while ((match = fkRegex.exec(t.definition)) !== null) {
|
|
2262
|
+
const constraintName = match[1] || match[2];
|
|
2263
|
+
const columnList = match[3];
|
|
2264
|
+
if (constraintName && columnList) {
|
|
2265
|
+
const columns2 = columnList.split(",").map((col) => col.trim().replace(/^["'`]|["'`]$/g, ""));
|
|
2266
|
+
for (const col of columns2) {
|
|
2267
|
+
if (col) {
|
|
2268
|
+
fkConstraintNames.set(col, constraintName);
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
}
|
|
2272
|
+
}
|
|
2273
|
+
}
|
|
2274
|
+
const fkByColumn = /* @__PURE__ */ new Map();
|
|
2275
|
+
for (const fk of fkRows) {
|
|
2276
|
+
fkByColumn.set(fk.from, {
|
|
2277
|
+
foreign_key_schema: "",
|
|
2278
|
+
foreign_key_table: fk.table || null,
|
|
2279
|
+
foreign_key_column: fk.to || null,
|
|
2280
|
+
foreign_key_name: fkConstraintNames.get(fk.from) ?? null,
|
|
2281
|
+
foreign_key_on_update: fk.on_update ?? null,
|
|
2282
|
+
foreign_key_on_delete: fk.on_delete ?? null
|
|
2283
|
+
});
|
|
2284
|
+
}
|
|
2285
|
+
const generatedColDefs = t.definition ? extractColumnTypeDefs(t.definition) : /* @__PURE__ */ new Map();
|
|
2286
|
+
const columns = [];
|
|
2287
|
+
for (const c of columnsInfo) {
|
|
2288
|
+
const hidden = c.hidden ?? 0;
|
|
2289
|
+
if (hidden === 1) continue;
|
|
2290
|
+
const isGenerated = hidden === 2 || hidden === 3;
|
|
2291
|
+
const fk = fkByColumn.get(c.name);
|
|
2292
|
+
let defaultValue = c.dflt_value;
|
|
2293
|
+
if (hasAutoIncrement && c.pk) {
|
|
2294
|
+
defaultValue = "autoincrement";
|
|
2295
|
+
}
|
|
2296
|
+
let datatype = c.type || "";
|
|
2297
|
+
if (isGenerated) {
|
|
2298
|
+
const fullDef = generatedColDefs.get(c.name);
|
|
2299
|
+
if (fullDef) {
|
|
2300
|
+
datatype = fullDef;
|
|
2301
|
+
}
|
|
2302
|
+
}
|
|
2303
|
+
columns.push({
|
|
2304
|
+
name: c.name,
|
|
2305
|
+
datatype,
|
|
2306
|
+
datatype_name: null,
|
|
2307
|
+
length: null,
|
|
2308
|
+
precision: null,
|
|
2309
|
+
datatype_schema: schema,
|
|
2310
|
+
foreign_key_schema: fk?.foreign_key_schema ?? null,
|
|
2311
|
+
foreign_key_table: fk?.foreign_key_table ?? null,
|
|
2312
|
+
foreign_key_column: fk?.foreign_key_column ?? null,
|
|
2313
|
+
foreign_key_name: fk?.foreign_key_name ?? null,
|
|
2314
|
+
foreign_key_on_update: fk?.foreign_key_on_update ?? null,
|
|
2315
|
+
foreign_key_on_delete: fk?.foreign_key_on_delete ?? null,
|
|
2316
|
+
pk: !!c.pk,
|
|
2317
|
+
computed: isGenerated,
|
|
2318
|
+
nullable: c.notnull !== 1,
|
|
2319
|
+
default: defaultValue,
|
|
2320
|
+
unique: uniqueSingleColumn.has(c.name),
|
|
2321
|
+
unique_name: null
|
|
2322
|
+
});
|
|
2323
|
+
}
|
|
2324
|
+
tables.push({
|
|
2325
|
+
schema,
|
|
2326
|
+
name: tableName,
|
|
2327
|
+
columns,
|
|
2328
|
+
type: t.type,
|
|
2329
|
+
definition: t.definition,
|
|
2330
|
+
indexes
|
|
2331
|
+
});
|
|
2332
|
+
}
|
|
2333
|
+
const enums = [];
|
|
2334
|
+
return {
|
|
2335
|
+
tables,
|
|
2336
|
+
enums
|
|
2337
|
+
};
|
|
2338
|
+
} finally {
|
|
2339
|
+
db.close();
|
|
2340
|
+
}
|
|
2341
|
+
},
|
|
2342
|
+
getDefaultValue({ defaultValue, fieldType, services, enums }) {
|
|
2343
|
+
const val = defaultValue.trim();
|
|
2344
|
+
switch (fieldType) {
|
|
2345
|
+
case "DateTime":
|
|
2346
|
+
if (val === "CURRENT_TIMESTAMP" || val === "now()") {
|
|
2347
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
2348
|
+
}
|
|
2349
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2350
|
+
case "Int":
|
|
2351
|
+
case "BigInt":
|
|
2352
|
+
if (val === "autoincrement") {
|
|
2353
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
2354
|
+
}
|
|
2355
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
2356
|
+
case "Float":
|
|
2357
|
+
return normalizeFloatDefault(val);
|
|
2358
|
+
case "Decimal":
|
|
2359
|
+
return normalizeDecimalDefault(val);
|
|
2360
|
+
case "Boolean":
|
|
2361
|
+
return (ab) => ab.BooleanLiteral.setValue(val === "true" || val === "1");
|
|
2362
|
+
case "String":
|
|
2363
|
+
if (val.startsWith("'") && val.endsWith("'")) {
|
|
2364
|
+
const strippedName = val.slice(1, -1);
|
|
2365
|
+
const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName));
|
|
2366
|
+
if (enumDef) {
|
|
2367
|
+
const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName);
|
|
2368
|
+
if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
2369
|
+
}
|
|
2370
|
+
return (ab) => ab.StringLiteral.setValue(strippedName);
|
|
2371
|
+
}
|
|
2372
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2373
|
+
case "Json":
|
|
2374
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2375
|
+
case "Bytes":
|
|
2376
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2377
|
+
}
|
|
2378
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
2379
|
+
return null;
|
|
2380
|
+
},
|
|
2381
|
+
getFieldAttributes({ fieldName, fieldType, services }) {
|
|
2382
|
+
const factories = [];
|
|
2383
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
2384
|
+
factories.push(new import_factory4.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
|
|
2385
|
+
}
|
|
2386
|
+
return factories;
|
|
2387
|
+
}
|
|
2388
|
+
};
|
|
2389
|
+
function extractColumnTypeDefs(ddl) {
|
|
2390
|
+
const openIdx = ddl.indexOf("(");
|
|
2391
|
+
if (openIdx === -1) return /* @__PURE__ */ new Map();
|
|
2392
|
+
let depth = 1;
|
|
2393
|
+
let closeIdx = -1;
|
|
2394
|
+
for (let i = openIdx + 1; i < ddl.length; i++) {
|
|
2395
|
+
if (ddl[i] === "(") depth++;
|
|
2396
|
+
else if (ddl[i] === ")") {
|
|
2397
|
+
depth--;
|
|
2398
|
+
if (depth === 0) {
|
|
2399
|
+
closeIdx = i;
|
|
2400
|
+
break;
|
|
2401
|
+
}
|
|
2402
|
+
}
|
|
2403
|
+
}
|
|
2404
|
+
if (closeIdx === -1) return /* @__PURE__ */ new Map();
|
|
2405
|
+
const content = ddl.substring(openIdx + 1, closeIdx);
|
|
2406
|
+
const defs = [];
|
|
2407
|
+
let current = "";
|
|
2408
|
+
depth = 0;
|
|
2409
|
+
for (const char of content) {
|
|
2410
|
+
if (char === "(") depth++;
|
|
2411
|
+
else if (char === ")") depth--;
|
|
2412
|
+
else if (char === "," && depth === 0) {
|
|
2413
|
+
defs.push(current.trim());
|
|
2414
|
+
current = "";
|
|
2415
|
+
continue;
|
|
2416
|
+
}
|
|
2417
|
+
current += char;
|
|
2418
|
+
}
|
|
2419
|
+
if (current.trim()) defs.push(current.trim());
|
|
2420
|
+
const result = /* @__PURE__ */ new Map();
|
|
2421
|
+
for (const def of defs) {
|
|
2422
|
+
const nameMatch = def.match(/^(?:["'`]([^"'`]+)["'`]|(\w+))\s+(.+)/s);
|
|
2423
|
+
if (nameMatch) {
|
|
2424
|
+
const name = nameMatch[1] || nameMatch[2];
|
|
2425
|
+
const typeDef = nameMatch[3];
|
|
2426
|
+
if (name && typeDef) {
|
|
2427
|
+
result.set(name, typeDef.trim());
|
|
2428
|
+
}
|
|
2429
|
+
}
|
|
2430
|
+
}
|
|
2431
|
+
return result;
|
|
2432
|
+
}
|
|
2433
|
+
__name(extractColumnTypeDefs, "extractColumnTypeDefs");
|
|
2434
|
+
|
|
2435
|
+
// src/actions/pull/provider/index.ts
|
|
2436
|
+
var providers = {
|
|
2437
|
+
mysql,
|
|
2438
|
+
postgresql,
|
|
2439
|
+
sqlite
|
|
2440
|
+
};
|
|
2441
|
+
|
|
251
2442
|
// src/actions/db.ts
|
|
252
2443
|
async function run2(command, options) {
|
|
253
2444
|
switch (command) {
|
|
254
2445
|
case "push":
|
|
255
2446
|
await runPush(options);
|
|
256
2447
|
break;
|
|
2448
|
+
case "pull":
|
|
2449
|
+
await runPull(options);
|
|
2450
|
+
break;
|
|
257
2451
|
}
|
|
258
2452
|
}
|
|
259
2453
|
__name(run2, "run");
|
|
@@ -281,37 +2475,436 @@ async function runPush(options) {
|
|
|
281
2475
|
}
|
|
282
2476
|
}
|
|
283
2477
|
__name(runPush, "runPush");
|
|
2478
|
+
async function runPull(options) {
|
|
2479
|
+
const spinner = (0, import_ora.default)();
|
|
2480
|
+
try {
|
|
2481
|
+
const schemaFile = getSchemaFile(options.schema);
|
|
2482
|
+
const outPath = options.output ? import_node_path2.default.resolve(options.output) : void 0;
|
|
2483
|
+
const treatAsFile = !!outPath && (import_node_fs2.default.existsSync(outPath) && import_node_fs2.default.lstatSync(outPath).isFile() || import_node_path2.default.extname(outPath) !== "");
|
|
2484
|
+
const { model, services } = await loadSchemaDocument(schemaFile, {
|
|
2485
|
+
returnServices: true,
|
|
2486
|
+
mergeImports: treatAsFile
|
|
2487
|
+
});
|
|
2488
|
+
const SUPPORTED_PROVIDERS = Object.keys(providers);
|
|
2489
|
+
const datasource = getDatasource(model);
|
|
2490
|
+
if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) {
|
|
2491
|
+
throw new CliError(`Unsupported datasource provider: ${datasource.provider}`);
|
|
2492
|
+
}
|
|
2493
|
+
const provider = providers[datasource.provider];
|
|
2494
|
+
if (!provider) {
|
|
2495
|
+
throw new CliError(`No introspection provider found for: ${datasource.provider}`);
|
|
2496
|
+
}
|
|
2497
|
+
spinner.start("Introspecting database...");
|
|
2498
|
+
const { enums, tables } = await provider.introspect(datasource.url, {
|
|
2499
|
+
schemas: datasource.allSchemas,
|
|
2500
|
+
modelCasing: options.modelCasing
|
|
2501
|
+
});
|
|
2502
|
+
spinner.succeed("Database introspected");
|
|
2503
|
+
console.log(import_colors4.default.blue("Syncing schema..."));
|
|
2504
|
+
const newModel = {
|
|
2505
|
+
$type: "Model",
|
|
2506
|
+
$container: void 0,
|
|
2507
|
+
$containerProperty: void 0,
|
|
2508
|
+
$containerIndex: void 0,
|
|
2509
|
+
declarations: [
|
|
2510
|
+
...model.declarations.filter((d) => [
|
|
2511
|
+
"DataSource"
|
|
2512
|
+
].includes(d.$type))
|
|
2513
|
+
],
|
|
2514
|
+
imports: model.imports
|
|
2515
|
+
};
|
|
2516
|
+
syncEnums({
|
|
2517
|
+
dbEnums: enums,
|
|
2518
|
+
model: newModel,
|
|
2519
|
+
services,
|
|
2520
|
+
options,
|
|
2521
|
+
defaultSchema: datasource.defaultSchema,
|
|
2522
|
+
oldModel: model,
|
|
2523
|
+
provider
|
|
2524
|
+
});
|
|
2525
|
+
const resolvedRelations = [];
|
|
2526
|
+
for (const table of tables) {
|
|
2527
|
+
const relations = syncTable({
|
|
2528
|
+
table,
|
|
2529
|
+
model: newModel,
|
|
2530
|
+
provider,
|
|
2531
|
+
services,
|
|
2532
|
+
options,
|
|
2533
|
+
defaultSchema: datasource.defaultSchema,
|
|
2534
|
+
oldModel: model
|
|
2535
|
+
});
|
|
2536
|
+
resolvedRelations.push(...relations);
|
|
2537
|
+
}
|
|
2538
|
+
for (const relation of resolvedRelations) {
|
|
2539
|
+
const similarRelations = resolvedRelations.filter((rr) => {
|
|
2540
|
+
return rr !== relation && (rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table || rr.schema === relation.references.schema && rr.columns[0] === relation.references.columns[0] && rr.references.schema === relation.schema && rr.references.table === relation.table);
|
|
2541
|
+
}).length;
|
|
2542
|
+
const selfRelation = relation.references.schema === relation.schema && relation.references.table === relation.table;
|
|
2543
|
+
syncRelation({
|
|
2544
|
+
model: newModel,
|
|
2545
|
+
relation,
|
|
2546
|
+
services,
|
|
2547
|
+
options,
|
|
2548
|
+
selfRelation,
|
|
2549
|
+
similarRelations
|
|
2550
|
+
});
|
|
2551
|
+
}
|
|
2552
|
+
consolidateEnums({
|
|
2553
|
+
newModel,
|
|
2554
|
+
oldModel: model
|
|
2555
|
+
});
|
|
2556
|
+
console.log(import_colors4.default.blue("Schema synced"));
|
|
2557
|
+
const baseDir = import_node_path2.default.dirname(import_node_path2.default.resolve(schemaFile));
|
|
2558
|
+
const baseDirUrlPath = new URL(`file://${baseDir}`).pathname;
|
|
2559
|
+
const docs = services.shared.workspace.LangiumDocuments.all.filter(({ uri }) => uri.path.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())).toArray();
|
|
2560
|
+
const docsSet = new Set(docs.map((d) => d.uri.toString()));
|
|
2561
|
+
console.log(import_colors4.default.bold("\nApplying changes to ZModel..."));
|
|
2562
|
+
const deletedModels = [];
|
|
2563
|
+
const deletedEnums = [];
|
|
2564
|
+
const addedModels = [];
|
|
2565
|
+
const addedEnums = [];
|
|
2566
|
+
const modelChanges = /* @__PURE__ */ new Map();
|
|
2567
|
+
const getModelChanges = /* @__PURE__ */ __name((modelName) => {
|
|
2568
|
+
if (!modelChanges.has(modelName)) {
|
|
2569
|
+
modelChanges.set(modelName, {
|
|
2570
|
+
addedFields: [],
|
|
2571
|
+
deletedFields: [],
|
|
2572
|
+
updatedFields: [],
|
|
2573
|
+
addedAttributes: [],
|
|
2574
|
+
deletedAttributes: [],
|
|
2575
|
+
updatedAttributes: []
|
|
2576
|
+
});
|
|
2577
|
+
}
|
|
2578
|
+
return modelChanges.get(modelName);
|
|
2579
|
+
}, "getModelChanges");
|
|
2580
|
+
services.shared.workspace.IndexManager.allElements("DataModel", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
|
|
2581
|
+
const model2 = decl.node.$container;
|
|
2582
|
+
const index = model2.declarations.findIndex((d) => d === decl.node);
|
|
2583
|
+
model2.declarations.splice(index, 1);
|
|
2584
|
+
deletedModels.push(import_colors4.default.red(`- Model ${decl.name} deleted`));
|
|
2585
|
+
});
|
|
2586
|
+
if (provider.isSupportedFeature("NativeEnum")) services.shared.workspace.IndexManager.allElements("Enum", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
|
|
2587
|
+
const model2 = decl.node.$container;
|
|
2588
|
+
const index = model2.declarations.findIndex((d) => d === decl.node);
|
|
2589
|
+
model2.declarations.splice(index, 1);
|
|
2590
|
+
deletedEnums.push(import_colors4.default.red(`- Enum ${decl.name} deleted`));
|
|
2591
|
+
});
|
|
2592
|
+
newModel.declarations.filter((d) => [
|
|
2593
|
+
import_ast4.DataModel,
|
|
2594
|
+
import_ast4.Enum
|
|
2595
|
+
].includes(d.$type)).forEach((_declaration) => {
|
|
2596
|
+
const newDataModel = _declaration;
|
|
2597
|
+
const declarations = services.shared.workspace.IndexManager.allElements(newDataModel.$type, docsSet).toArray();
|
|
2598
|
+
const originalDataModel = declarations.find((d) => getDbName(d.node) === getDbName(newDataModel))?.node;
|
|
2599
|
+
if (!originalDataModel) {
|
|
2600
|
+
if (newDataModel.$type === "DataModel") {
|
|
2601
|
+
addedModels.push(import_colors4.default.green(`+ Model ${newDataModel.name} added`));
|
|
2602
|
+
} else if (newDataModel.$type === "Enum") {
|
|
2603
|
+
addedEnums.push(import_colors4.default.green(`+ Enum ${newDataModel.name} added`));
|
|
2604
|
+
}
|
|
2605
|
+
model.declarations.push(newDataModel);
|
|
2606
|
+
newDataModel.$container = model;
|
|
2607
|
+
newDataModel.fields.forEach((f) => {
|
|
2608
|
+
if (f.$type === "DataField" && f.type.reference?.ref) {
|
|
2609
|
+
const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
|
|
2610
|
+
if (ref && f.type.reference) {
|
|
2611
|
+
f.type.reference = {
|
|
2612
|
+
ref,
|
|
2613
|
+
$refText: ref.name ?? f.type.reference.$refText
|
|
2614
|
+
};
|
|
2615
|
+
}
|
|
2616
|
+
}
|
|
2617
|
+
});
|
|
2618
|
+
return;
|
|
2619
|
+
}
|
|
2620
|
+
newDataModel.fields.forEach((f) => {
|
|
2621
|
+
let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f));
|
|
2622
|
+
const isRelationField = f.$type === "DataField" && !!f.attributes?.some((a) => a?.decl?.ref?.name === "@relation");
|
|
2623
|
+
if (originalFields.length === 0 && isRelationField && !getRelationFieldsKey(f)) {
|
|
2624
|
+
return;
|
|
2625
|
+
}
|
|
2626
|
+
if (originalFields.length === 0) {
|
|
2627
|
+
const newFieldsKey = getRelationFieldsKey(f);
|
|
2628
|
+
if (newFieldsKey) {
|
|
2629
|
+
originalFields = originalDataModel.fields.filter((d) => getRelationFieldsKey(d) === newFieldsKey);
|
|
2630
|
+
}
|
|
2631
|
+
}
|
|
2632
|
+
if (originalFields.length === 0) {
|
|
2633
|
+
originalFields = originalDataModel.fields.filter((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
|
|
2634
|
+
}
|
|
2635
|
+
if (originalFields.length === 0) {
|
|
2636
|
+
originalFields = originalDataModel.fields.filter((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
|
|
2637
|
+
}
|
|
2638
|
+
if (originalFields.length > 1) {
|
|
2639
|
+
const isBackReferenceField = !getRelationFieldsKey(f);
|
|
2640
|
+
if (!isBackReferenceField) {
|
|
2641
|
+
console.warn(import_colors4.default.yellow(`Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(", ")}](${f.name})`));
|
|
2642
|
+
}
|
|
2643
|
+
return;
|
|
2644
|
+
}
|
|
2645
|
+
const originalField = originalFields.at(0);
|
|
2646
|
+
if (originalField && f.$type === "DataField" && originalField.$type === "DataField") {
|
|
2647
|
+
const newType = f.type;
|
|
2648
|
+
const oldType = originalField.type;
|
|
2649
|
+
const fieldUpdates = [];
|
|
2650
|
+
const isOldTypeEnumWithoutNativeSupport = oldType.reference?.ref?.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
|
|
2651
|
+
if (newType.type && oldType.type !== newType.type && !isOldTypeEnumWithoutNativeSupport) {
|
|
2652
|
+
fieldUpdates.push(`type: ${oldType.type} -> ${newType.type}`);
|
|
2653
|
+
oldType.type = newType.type;
|
|
2654
|
+
}
|
|
2655
|
+
if (newType.reference?.ref && oldType.reference?.ref) {
|
|
2656
|
+
const newRefName = getDbName(newType.reference.ref);
|
|
2657
|
+
const oldRefName = getDbName(oldType.reference.ref);
|
|
2658
|
+
if (newRefName !== oldRefName) {
|
|
2659
|
+
fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`);
|
|
2660
|
+
oldType.reference = {
|
|
2661
|
+
ref: newType.reference.ref,
|
|
2662
|
+
$refText: newType.reference.$refText
|
|
2663
|
+
};
|
|
2664
|
+
}
|
|
2665
|
+
} else if (newType.reference?.ref && !oldType.reference) {
|
|
2666
|
+
fieldUpdates.push(`type: ${oldType.type} -> ${newType.reference.$refText}`);
|
|
2667
|
+
oldType.reference = newType.reference;
|
|
2668
|
+
oldType.type = void 0;
|
|
2669
|
+
} else if (!newType.reference && oldType.reference?.ref && newType.type) {
|
|
2670
|
+
const isEnumWithoutNativeSupport = oldType.reference.ref.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
|
|
2671
|
+
if (!isEnumWithoutNativeSupport) {
|
|
2672
|
+
fieldUpdates.push(`type: ${oldType.reference.$refText} -> ${newType.type}`);
|
|
2673
|
+
oldType.type = newType.type;
|
|
2674
|
+
oldType.reference = void 0;
|
|
2675
|
+
}
|
|
2676
|
+
}
|
|
2677
|
+
if (!!newType.optional !== !!oldType.optional) {
|
|
2678
|
+
fieldUpdates.push(`optional: ${!!oldType.optional} -> ${!!newType.optional}`);
|
|
2679
|
+
oldType.optional = newType.optional;
|
|
2680
|
+
}
|
|
2681
|
+
if (!!newType.array !== !!oldType.array) {
|
|
2682
|
+
fieldUpdates.push(`array: ${!!oldType.array} -> ${!!newType.array}`);
|
|
2683
|
+
oldType.array = newType.array;
|
|
2684
|
+
}
|
|
2685
|
+
if (fieldUpdates.length > 0) {
|
|
2686
|
+
getModelChanges(originalDataModel.name).updatedFields.push(import_colors4.default.yellow(`~ ${originalField.name} (${fieldUpdates.join(", ")})`));
|
|
2687
|
+
}
|
|
2688
|
+
const newDefaultAttr = f.attributes.find((a) => a.decl.$refText === "@default");
|
|
2689
|
+
const oldDefaultAttr = originalField.attributes.find((a) => a.decl.$refText === "@default");
|
|
2690
|
+
if (newDefaultAttr && oldDefaultAttr) {
|
|
2691
|
+
const serializeArgs = /* @__PURE__ */ __name((args) => args.map((arg) => {
|
|
2692
|
+
if (arg.value?.$type === "StringLiteral") return `"${arg.value.value}"`;
|
|
2693
|
+
if (arg.value?.$type === "NumberLiteral") return String(arg.value.value);
|
|
2694
|
+
if (arg.value?.$type === "BooleanLiteral") return String(arg.value.value);
|
|
2695
|
+
if (arg.value?.$type === "InvocationExpr") return arg.value.function?.$refText ?? "";
|
|
2696
|
+
if (arg.value?.$type === "ReferenceExpr") return arg.value.target?.$refText ?? "";
|
|
2697
|
+
if (arg.value?.$type === "ArrayExpr") {
|
|
2698
|
+
return `[${(arg.value.items ?? []).map((item) => {
|
|
2699
|
+
if (item.$type === "ReferenceExpr") return item.target?.$refText ?? "";
|
|
2700
|
+
return item.$type ?? "unknown";
|
|
2701
|
+
}).join(",")}]`;
|
|
2702
|
+
}
|
|
2703
|
+
return arg.value?.$type ?? "unknown";
|
|
2704
|
+
}).join(","), "serializeArgs");
|
|
2705
|
+
const newArgsStr = serializeArgs(newDefaultAttr.args ?? []);
|
|
2706
|
+
const oldArgsStr = serializeArgs(oldDefaultAttr.args ?? []);
|
|
2707
|
+
if (newArgsStr !== oldArgsStr) {
|
|
2708
|
+
oldDefaultAttr.args = newDefaultAttr.args.map((arg) => ({
|
|
2709
|
+
...arg,
|
|
2710
|
+
$container: oldDefaultAttr
|
|
2711
|
+
}));
|
|
2712
|
+
getModelChanges(originalDataModel.name).updatedAttributes.push(import_colors4.default.yellow(`~ @default on ${originalDataModel.name}.${originalField.name}`));
|
|
2713
|
+
}
|
|
2714
|
+
}
|
|
2715
|
+
}
|
|
2716
|
+
if (!originalField) {
|
|
2717
|
+
getModelChanges(originalDataModel.name).addedFields.push(import_colors4.default.green(`+ ${f.name}`));
|
|
2718
|
+
f.$container = originalDataModel;
|
|
2719
|
+
originalDataModel.fields.push(f);
|
|
2720
|
+
if (f.$type === "DataField" && f.type.reference?.ref) {
|
|
2721
|
+
const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
|
|
2722
|
+
if (ref) {
|
|
2723
|
+
f.type.reference = {
|
|
2724
|
+
ref,
|
|
2725
|
+
$refText: ref.name ?? f.type.reference.$refText
|
|
2726
|
+
};
|
|
2727
|
+
}
|
|
2728
|
+
}
|
|
2729
|
+
return;
|
|
2730
|
+
}
|
|
2731
|
+
originalField.attributes.filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && isDatabaseManagedAttribute(attr.decl.$refText)).forEach((attr) => {
|
|
2732
|
+
const field = attr.$container;
|
|
2733
|
+
const index = field.attributes.findIndex((d) => d === attr);
|
|
2734
|
+
field.attributes.splice(index, 1);
|
|
2735
|
+
getModelChanges(originalDataModel.name).deletedAttributes.push(import_colors4.default.yellow(`- ${attr.decl.$refText} from field: ${originalDataModel.name}.${field.name}`));
|
|
2736
|
+
});
|
|
2737
|
+
f.attributes.filter((attr) => !originalField.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && isDatabaseManagedAttribute(attr.decl.$refText)).forEach((attr) => {
|
|
2738
|
+
const cloned = {
|
|
2739
|
+
...attr,
|
|
2740
|
+
$container: originalField
|
|
2741
|
+
};
|
|
2742
|
+
originalField.attributes.push(cloned);
|
|
2743
|
+
getModelChanges(originalDataModel.name).addedAttributes.push(import_colors4.default.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`));
|
|
2744
|
+
});
|
|
2745
|
+
});
|
|
2746
|
+
originalDataModel.fields.filter((f) => {
|
|
2747
|
+
const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f));
|
|
2748
|
+
if (matchByDbName) return false;
|
|
2749
|
+
const originalFieldsKey = getRelationFieldsKey(f);
|
|
2750
|
+
if (originalFieldsKey) {
|
|
2751
|
+
const matchByFieldsKey = newDataModel.fields.find((d) => getRelationFieldsKey(d) === originalFieldsKey);
|
|
2752
|
+
if (matchByFieldsKey) return false;
|
|
2753
|
+
}
|
|
2754
|
+
const matchByFkName = newDataModel.fields.find((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
|
|
2755
|
+
if (matchByFkName) return false;
|
|
2756
|
+
const matchByTypeRef = newDataModel.fields.find((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
|
|
2757
|
+
return !matchByTypeRef;
|
|
2758
|
+
}).forEach((f) => {
|
|
2759
|
+
const _model = f.$container;
|
|
2760
|
+
const index = _model.fields.findIndex((d) => d === f);
|
|
2761
|
+
_model.fields.splice(index, 1);
|
|
2762
|
+
getModelChanges(_model.name).deletedFields.push(import_colors4.default.red(`- ${f.name}`));
|
|
2763
|
+
});
|
|
2764
|
+
});
|
|
2765
|
+
if (deletedModels.length > 0) {
|
|
2766
|
+
console.log(import_colors4.default.bold("\nDeleted Models:"));
|
|
2767
|
+
deletedModels.forEach((msg) => {
|
|
2768
|
+
console.log(msg);
|
|
2769
|
+
});
|
|
2770
|
+
}
|
|
2771
|
+
if (deletedEnums.length > 0) {
|
|
2772
|
+
console.log(import_colors4.default.bold("\nDeleted Enums:"));
|
|
2773
|
+
deletedEnums.forEach((msg) => {
|
|
2774
|
+
console.log(msg);
|
|
2775
|
+
});
|
|
2776
|
+
}
|
|
2777
|
+
if (addedModels.length > 0) {
|
|
2778
|
+
console.log(import_colors4.default.bold("\nAdded Models:"));
|
|
2779
|
+
addedModels.forEach((msg) => {
|
|
2780
|
+
console.log(msg);
|
|
2781
|
+
});
|
|
2782
|
+
}
|
|
2783
|
+
if (addedEnums.length > 0) {
|
|
2784
|
+
console.log(import_colors4.default.bold("\nAdded Enums:"));
|
|
2785
|
+
addedEnums.forEach((msg) => {
|
|
2786
|
+
console.log(msg);
|
|
2787
|
+
});
|
|
2788
|
+
}
|
|
2789
|
+
if (modelChanges.size > 0) {
|
|
2790
|
+
console.log(import_colors4.default.bold("\nModel Changes:"));
|
|
2791
|
+
modelChanges.forEach((changes, modelName) => {
|
|
2792
|
+
const hasChanges = changes.addedFields.length > 0 || changes.deletedFields.length > 0 || changes.updatedFields.length > 0 || changes.addedAttributes.length > 0 || changes.deletedAttributes.length > 0 || changes.updatedAttributes.length > 0;
|
|
2793
|
+
if (hasChanges) {
|
|
2794
|
+
console.log(import_colors4.default.cyan(` ${modelName}:`));
|
|
2795
|
+
if (changes.addedFields.length > 0) {
|
|
2796
|
+
console.log(import_colors4.default.gray(" Added Fields:"));
|
|
2797
|
+
changes.addedFields.forEach((msg) => {
|
|
2798
|
+
console.log(` ${msg}`);
|
|
2799
|
+
});
|
|
2800
|
+
}
|
|
2801
|
+
if (changes.deletedFields.length > 0) {
|
|
2802
|
+
console.log(import_colors4.default.gray(" Deleted Fields:"));
|
|
2803
|
+
changes.deletedFields.forEach((msg) => {
|
|
2804
|
+
console.log(` ${msg}`);
|
|
2805
|
+
});
|
|
2806
|
+
}
|
|
2807
|
+
if (changes.updatedFields.length > 0) {
|
|
2808
|
+
console.log(import_colors4.default.gray(" Updated Fields:"));
|
|
2809
|
+
changes.updatedFields.forEach((msg) => {
|
|
2810
|
+
console.log(` ${msg}`);
|
|
2811
|
+
});
|
|
2812
|
+
}
|
|
2813
|
+
if (changes.addedAttributes.length > 0) {
|
|
2814
|
+
console.log(import_colors4.default.gray(" Added Attributes:"));
|
|
2815
|
+
changes.addedAttributes.forEach((msg) => {
|
|
2816
|
+
console.log(` ${msg}`);
|
|
2817
|
+
});
|
|
2818
|
+
}
|
|
2819
|
+
if (changes.deletedAttributes.length > 0) {
|
|
2820
|
+
console.log(import_colors4.default.gray(" Deleted Attributes:"));
|
|
2821
|
+
changes.deletedAttributes.forEach((msg) => {
|
|
2822
|
+
console.log(` ${msg}`);
|
|
2823
|
+
});
|
|
2824
|
+
}
|
|
2825
|
+
if (changes.updatedAttributes.length > 0) {
|
|
2826
|
+
console.log(import_colors4.default.gray(" Updated Attributes:"));
|
|
2827
|
+
changes.updatedAttributes.forEach((msg) => {
|
|
2828
|
+
console.log(` ${msg}`);
|
|
2829
|
+
});
|
|
2830
|
+
}
|
|
2831
|
+
}
|
|
2832
|
+
});
|
|
2833
|
+
}
|
|
2834
|
+
const generator = new import_language2.ZModelCodeGenerator({
|
|
2835
|
+
quote: options.quote ?? "single",
|
|
2836
|
+
indent: options.indent ?? 4
|
|
2837
|
+
});
|
|
2838
|
+
if (options.output) {
|
|
2839
|
+
if (treatAsFile) {
|
|
2840
|
+
const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(newModel));
|
|
2841
|
+
console.log(import_colors4.default.blue(`Writing to ${outPath}`));
|
|
2842
|
+
import_node_fs2.default.mkdirSync(import_node_path2.default.dirname(outPath), {
|
|
2843
|
+
recursive: true
|
|
2844
|
+
});
|
|
2845
|
+
import_node_fs2.default.writeFileSync(outPath, zmodelSchema);
|
|
2846
|
+
} else {
|
|
2847
|
+
import_node_fs2.default.mkdirSync(outPath, {
|
|
2848
|
+
recursive: true
|
|
2849
|
+
});
|
|
2850
|
+
const baseDir2 = import_node_path2.default.dirname(import_node_path2.default.resolve(schemaFile));
|
|
2851
|
+
for (const { uri, parseResult: { value: documentModel } } of docs) {
|
|
2852
|
+
const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(documentModel));
|
|
2853
|
+
const relPath = import_node_path2.default.relative(baseDir2, uri.fsPath);
|
|
2854
|
+
const targetFile = import_node_path2.default.join(outPath, relPath);
|
|
2855
|
+
import_node_fs2.default.mkdirSync(import_node_path2.default.dirname(targetFile), {
|
|
2856
|
+
recursive: true
|
|
2857
|
+
});
|
|
2858
|
+
console.log(import_colors4.default.blue(`Writing to ${targetFile}`));
|
|
2859
|
+
import_node_fs2.default.writeFileSync(targetFile, zmodelSchema);
|
|
2860
|
+
}
|
|
2861
|
+
}
|
|
2862
|
+
} else {
|
|
2863
|
+
for (const { uri, parseResult: { value: documentModel } } of docs) {
|
|
2864
|
+
const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(documentModel));
|
|
2865
|
+
console.log(import_colors4.default.blue(`Writing to ${import_node_path2.default.relative(process.cwd(), uri.fsPath).replace(/\\/g, "/")}`));
|
|
2866
|
+
import_node_fs2.default.writeFileSync(uri.fsPath, zmodelSchema);
|
|
2867
|
+
}
|
|
2868
|
+
}
|
|
2869
|
+
console.log(import_colors4.default.green.bold("\nPull completed successfully!"));
|
|
2870
|
+
} catch (error) {
|
|
2871
|
+
spinner.fail("Pull failed");
|
|
2872
|
+
console.error(error);
|
|
2873
|
+
throw error;
|
|
2874
|
+
}
|
|
2875
|
+
}
|
|
2876
|
+
__name(runPull, "runPull");
|
|
284
2877
|
|
|
285
2878
|
// src/actions/format.ts
|
|
286
|
-
var
|
|
287
|
-
var
|
|
2879
|
+
var import_language3 = require("@zenstackhq/language");
|
|
2880
|
+
var import_colors5 = __toESM(require("colors"), 1);
|
|
288
2881
|
var import_node_fs3 = __toESM(require("fs"), 1);
|
|
289
2882
|
async function run3(options) {
|
|
290
2883
|
const schemaFile = getSchemaFile(options.schema);
|
|
291
2884
|
let formattedContent;
|
|
292
2885
|
try {
|
|
293
|
-
formattedContent = await (0,
|
|
2886
|
+
formattedContent = await (0, import_language3.formatDocument)(import_node_fs3.default.readFileSync(schemaFile, "utf-8"));
|
|
294
2887
|
} catch (error) {
|
|
295
|
-
console.error(
|
|
2888
|
+
console.error(import_colors5.default.red("\u2717 Schema formatting failed."));
|
|
296
2889
|
throw error;
|
|
297
2890
|
}
|
|
298
2891
|
import_node_fs3.default.writeFileSync(schemaFile, formattedContent, "utf-8");
|
|
299
|
-
console.log(
|
|
2892
|
+
console.log(import_colors5.default.green("\u2713 Schema formatting completed successfully."));
|
|
300
2893
|
}
|
|
301
2894
|
__name(run3, "run");
|
|
302
2895
|
|
|
303
2896
|
// src/actions/generate.ts
|
|
304
|
-
var
|
|
305
|
-
var
|
|
306
|
-
var
|
|
307
|
-
var
|
|
308
|
-
var
|
|
2897
|
+
var import_common_helpers2 = require("@zenstackhq/common-helpers");
|
|
2898
|
+
var import_language4 = require("@zenstackhq/language");
|
|
2899
|
+
var import_ast5 = require("@zenstackhq/language/ast");
|
|
2900
|
+
var import_utils7 = require("@zenstackhq/language/utils");
|
|
2901
|
+
var import_colors6 = __toESM(require("colors"), 1);
|
|
309
2902
|
var import_jiti = require("jiti");
|
|
310
2903
|
var import_node_fs6 = __toESM(require("fs"), 1);
|
|
311
|
-
var
|
|
2904
|
+
var import_node_path5 = __toESM(require("path"), 1);
|
|
312
2905
|
var import_node_url = require("url");
|
|
313
2906
|
var import_chokidar = require("chokidar");
|
|
314
|
-
var
|
|
2907
|
+
var import_ora2 = __toESM(require("ora"), 1);
|
|
315
2908
|
|
|
316
2909
|
// src/plugins/index.ts
|
|
317
2910
|
var plugins_exports = {};
|
|
@@ -323,16 +2916,16 @@ __export(plugins_exports, {
|
|
|
323
2916
|
// src/plugins/prisma.ts
|
|
324
2917
|
var import_sdk2 = require("@zenstackhq/sdk");
|
|
325
2918
|
var import_node_fs4 = __toESM(require("fs"), 1);
|
|
326
|
-
var
|
|
2919
|
+
var import_node_path3 = __toESM(require("path"), 1);
|
|
327
2920
|
var plugin = {
|
|
328
2921
|
name: "Prisma Schema Generator",
|
|
329
2922
|
statusText: "Generating Prisma schema",
|
|
330
2923
|
async generate({ model, defaultOutputPath, pluginOptions }) {
|
|
331
|
-
let outFile =
|
|
2924
|
+
let outFile = import_node_path3.default.join(defaultOutputPath, "schema.prisma");
|
|
332
2925
|
if (typeof pluginOptions["output"] === "string") {
|
|
333
|
-
outFile =
|
|
334
|
-
if (!import_node_fs4.default.existsSync(
|
|
335
|
-
import_node_fs4.default.mkdirSync(
|
|
2926
|
+
outFile = import_node_path3.default.resolve(defaultOutputPath, pluginOptions["output"]);
|
|
2927
|
+
if (!import_node_fs4.default.existsSync(import_node_path3.default.dirname(outFile))) {
|
|
2928
|
+
import_node_fs4.default.mkdirSync(import_node_path3.default.dirname(outFile), {
|
|
336
2929
|
recursive: true
|
|
337
2930
|
});
|
|
338
2931
|
}
|
|
@@ -346,14 +2939,14 @@ var prisma_default = plugin;
|
|
|
346
2939
|
// src/plugins/typescript.ts
|
|
347
2940
|
var import_sdk3 = require("@zenstackhq/sdk");
|
|
348
2941
|
var import_node_fs5 = __toESM(require("fs"), 1);
|
|
349
|
-
var
|
|
2942
|
+
var import_node_path4 = __toESM(require("path"), 1);
|
|
350
2943
|
var plugin2 = {
|
|
351
2944
|
name: "TypeScript Schema Generator",
|
|
352
2945
|
statusText: "Generating TypeScript schema",
|
|
353
2946
|
async generate({ model, defaultOutputPath, pluginOptions }) {
|
|
354
2947
|
let outDir = defaultOutputPath;
|
|
355
2948
|
if (typeof pluginOptions["output"] === "string") {
|
|
356
|
-
outDir =
|
|
2949
|
+
outDir = import_node_path4.default.resolve(defaultOutputPath, pluginOptions["output"]);
|
|
357
2950
|
if (!import_node_fs5.default.existsSync(outDir)) {
|
|
358
2951
|
import_node_fs5.default.mkdirSync(outDir, {
|
|
359
2952
|
recursive: true
|
|
@@ -377,15 +2970,21 @@ var plugin2 = {
|
|
|
377
2970
|
var typescript_default = plugin2;
|
|
378
2971
|
|
|
379
2972
|
// src/actions/generate.ts
|
|
2973
|
+
var import_semver = __toESM(require("semver"), 1);
|
|
380
2974
|
async function run4(options) {
|
|
2975
|
+
try {
|
|
2976
|
+
await checkForMismatchedPackages(process.cwd());
|
|
2977
|
+
} catch (err) {
|
|
2978
|
+
console.warn(import_colors6.default.yellow(`Failed to check for mismatched ZenStack packages: ${err}`));
|
|
2979
|
+
}
|
|
381
2980
|
const model = await pureGenerate(options, false);
|
|
382
2981
|
if (options.watch) {
|
|
383
2982
|
const logsEnabled = !options.silent;
|
|
384
2983
|
if (logsEnabled) {
|
|
385
|
-
console.log(
|
|
2984
|
+
console.log(import_colors6.default.green(`
|
|
386
2985
|
Enabled watch mode!`));
|
|
387
2986
|
}
|
|
388
|
-
const schemaExtensions =
|
|
2987
|
+
const schemaExtensions = import_language4.ZModelLanguageMetaData.fileExtensions;
|
|
389
2988
|
const getRootModelWatchPaths = /* @__PURE__ */ __name((model2) => new Set(model2.declarations.filter((v) => v.$cstNode?.parent?.element.$type === "Model" && !!v.$cstNode.parent.element.$document?.uri?.fsPath).map((v) => v.$cstNode.parent.element.$document.uri.fsPath)), "getRootModelWatchPaths");
|
|
390
2989
|
const watchedPaths = getRootModelWatchPaths(model);
|
|
391
2990
|
if (logsEnabled) {
|
|
@@ -403,7 +3002,7 @@ ${logPaths}`);
|
|
|
403
3002
|
ignorePermissionErrors: true,
|
|
404
3003
|
ignored: /* @__PURE__ */ __name((at) => !schemaExtensions.some((ext) => at.endsWith(ext)), "ignored")
|
|
405
3004
|
});
|
|
406
|
-
const reGenerateSchema = (0,
|
|
3005
|
+
const reGenerateSchema = (0, import_common_helpers2.singleDebounce)(async () => {
|
|
407
3006
|
if (logsEnabled) {
|
|
408
3007
|
console.log("Got changes, run generation!");
|
|
409
3008
|
}
|
|
@@ -459,14 +3058,14 @@ async function pureGenerate(options, fromWatch) {
|
|
|
459
3058
|
const outputPath = getOutputPath(options, schemaFile);
|
|
460
3059
|
await runPlugins(schemaFile, model, outputPath, options);
|
|
461
3060
|
if (!options.silent) {
|
|
462
|
-
console.log(
|
|
3061
|
+
console.log(import_colors6.default.green(`Generation completed successfully in ${Date.now() - start}ms.
|
|
463
3062
|
`));
|
|
464
3063
|
if (!fromWatch) {
|
|
465
3064
|
console.log(`You can now create a ZenStack client with it.
|
|
466
3065
|
|
|
467
3066
|
\`\`\`ts
|
|
468
3067
|
import { ZenStackClient } from '@zenstackhq/orm';
|
|
469
|
-
import { schema } from '${
|
|
3068
|
+
import { schema } from '${import_node_path5.default.relative(".", outputPath)}/schema';
|
|
470
3069
|
|
|
471
3070
|
const client = new ZenStackClient(schema, {
|
|
472
3071
|
dialect: { ... }
|
|
@@ -480,7 +3079,7 @@ Check documentation: https://zenstack.dev/docs/`);
|
|
|
480
3079
|
}
|
|
481
3080
|
__name(pureGenerate, "pureGenerate");
|
|
482
3081
|
async function runPlugins(schemaFile, model, outputPath, options) {
|
|
483
|
-
const plugins = model.declarations.filter(
|
|
3082
|
+
const plugins = model.declarations.filter(import_ast5.isPlugin);
|
|
484
3083
|
const processedPlugins = [];
|
|
485
3084
|
for (const plugin3 of plugins) {
|
|
486
3085
|
const provider = getPluginProvider(plugin3);
|
|
@@ -491,7 +3090,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
|
|
|
491
3090
|
throw new CliError(`Unknown core plugin: ${provider}`);
|
|
492
3091
|
}
|
|
493
3092
|
} else {
|
|
494
|
-
cliPlugin = await loadPluginModule(provider,
|
|
3093
|
+
cliPlugin = await loadPluginModule(provider, import_node_path5.default.dirname(schemaFile));
|
|
495
3094
|
}
|
|
496
3095
|
if (cliPlugin) {
|
|
497
3096
|
const pluginOptions = getPluginOptions(plugin3);
|
|
@@ -527,10 +3126,10 @@ async function runPlugins(schemaFile, model, outputPath, options) {
|
|
|
527
3126
|
}
|
|
528
3127
|
});
|
|
529
3128
|
for (const { cliPlugin, pluginOptions } of processedPlugins) {
|
|
530
|
-
(0,
|
|
3129
|
+
(0, import_common_helpers2.invariant)(typeof cliPlugin.generate === "function", `Plugin ${cliPlugin.name} does not have a generate function`);
|
|
531
3130
|
let spinner;
|
|
532
3131
|
if (!options.silent) {
|
|
533
|
-
spinner = (0,
|
|
3132
|
+
spinner = (0, import_ora2.default)(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
|
|
534
3133
|
}
|
|
535
3134
|
try {
|
|
536
3135
|
await cliPlugin.generate({
|
|
@@ -549,7 +3148,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
|
|
|
549
3148
|
__name(runPlugins, "runPlugins");
|
|
550
3149
|
function getPluginProvider(plugin3) {
|
|
551
3150
|
const providerField = plugin3.fields.find((f) => f.name === "provider");
|
|
552
|
-
(0,
|
|
3151
|
+
(0, import_common_helpers2.invariant)(providerField, `Plugin ${plugin3.name} does not have a provider field`);
|
|
553
3152
|
const provider = providerField.value.value;
|
|
554
3153
|
return provider;
|
|
555
3154
|
}
|
|
@@ -560,7 +3159,7 @@ function getPluginOptions(plugin3) {
|
|
|
560
3159
|
if (field.name === "provider") {
|
|
561
3160
|
continue;
|
|
562
3161
|
}
|
|
563
|
-
const value = (0,
|
|
3162
|
+
const value = (0, import_utils7.getLiteral)(field.value) ?? (0, import_utils7.getLiteralArray)(field.value);
|
|
564
3163
|
if (value === void 0) {
|
|
565
3164
|
console.warn(`Plugin "${plugin3.name}" option "${field.name}" has unsupported value, skipping`);
|
|
566
3165
|
continue;
|
|
@@ -573,7 +3172,7 @@ __name(getPluginOptions, "getPluginOptions");
|
|
|
573
3172
|
async function loadPluginModule(provider, basePath) {
|
|
574
3173
|
let moduleSpec = provider;
|
|
575
3174
|
if (moduleSpec.startsWith(".")) {
|
|
576
|
-
moduleSpec =
|
|
3175
|
+
moduleSpec = import_node_path5.default.resolve(basePath, moduleSpec);
|
|
577
3176
|
}
|
|
578
3177
|
const importAsEsm = /* @__PURE__ */ __name(async (spec) => {
|
|
579
3178
|
try {
|
|
@@ -611,13 +3210,13 @@ async function loadPluginModule(provider, basePath) {
|
|
|
611
3210
|
}
|
|
612
3211
|
}
|
|
613
3212
|
for (const suffix of esmSuffixes) {
|
|
614
|
-
const indexPath =
|
|
3213
|
+
const indexPath = import_node_path5.default.join(moduleSpec, `index${suffix}`);
|
|
615
3214
|
if (import_node_fs6.default.existsSync(indexPath)) {
|
|
616
3215
|
return await importAsEsm((0, import_node_url.pathToFileURL)(indexPath).toString());
|
|
617
3216
|
}
|
|
618
3217
|
}
|
|
619
3218
|
for (const suffix of tsSuffixes) {
|
|
620
|
-
const indexPath =
|
|
3219
|
+
const indexPath = import_node_path5.default.join(moduleSpec, `index${suffix}`);
|
|
621
3220
|
if (import_node_fs6.default.existsSync(indexPath)) {
|
|
622
3221
|
return await importAsTs(indexPath);
|
|
623
3222
|
}
|
|
@@ -629,13 +3228,46 @@ async function loadPluginModule(provider, basePath) {
|
|
|
629
3228
|
}
|
|
630
3229
|
}
|
|
631
3230
|
__name(loadPluginModule, "loadPluginModule");
|
|
3231
|
+
async function checkForMismatchedPackages(projectPath) {
|
|
3232
|
+
const packages = await getZenStackPackages(projectPath);
|
|
3233
|
+
if (!packages.length) {
|
|
3234
|
+
return false;
|
|
3235
|
+
}
|
|
3236
|
+
const versions = /* @__PURE__ */ new Set();
|
|
3237
|
+
for (const { version: version2 } of packages) {
|
|
3238
|
+
if (version2) {
|
|
3239
|
+
versions.add(version2);
|
|
3240
|
+
}
|
|
3241
|
+
}
|
|
3242
|
+
if (versions.size > 1) {
|
|
3243
|
+
const message = "WARNING: Multiple versions of ZenStack packages detected.\n This will probably cause issues and break your types.";
|
|
3244
|
+
const slashes = "/".repeat(73);
|
|
3245
|
+
const latestVersion = import_semver.default.sort(Array.from(versions)).reverse()[0];
|
|
3246
|
+
console.warn(import_colors6.default.yellow(`${slashes}
|
|
3247
|
+
|
|
3248
|
+
${message}
|
|
3249
|
+
`));
|
|
3250
|
+
for (const { pkg, version: version2 } of packages) {
|
|
3251
|
+
if (!version2) continue;
|
|
3252
|
+
if (version2 === latestVersion) {
|
|
3253
|
+
console.log(` ${pkg.padEnd(32)} ${import_colors6.default.green(version2)}`);
|
|
3254
|
+
} else {
|
|
3255
|
+
console.log(` ${pkg.padEnd(32)} ${import_colors6.default.yellow(version2)}`);
|
|
3256
|
+
}
|
|
3257
|
+
}
|
|
3258
|
+
console.warn(`
|
|
3259
|
+
${import_colors6.default.yellow(slashes)}`);
|
|
3260
|
+
return true;
|
|
3261
|
+
}
|
|
3262
|
+
return false;
|
|
3263
|
+
}
|
|
3264
|
+
__name(checkForMismatchedPackages, "checkForMismatchedPackages");
|
|
632
3265
|
|
|
633
3266
|
// src/actions/info.ts
|
|
634
|
-
var
|
|
635
|
-
var import_node_path5 = __toESM(require("path"), 1);
|
|
3267
|
+
var import_colors7 = __toESM(require("colors"), 1);
|
|
636
3268
|
async function run5(projectPath) {
|
|
637
3269
|
const packages = await getZenStackPackages(projectPath);
|
|
638
|
-
if (!packages) {
|
|
3270
|
+
if (!packages.length) {
|
|
639
3271
|
console.error("Unable to locate package.json. Are you in a valid project directory?");
|
|
640
3272
|
return;
|
|
641
3273
|
}
|
|
@@ -645,59 +3277,19 @@ async function run5(projectPath) {
|
|
|
645
3277
|
if (version2) {
|
|
646
3278
|
versions.add(version2);
|
|
647
3279
|
}
|
|
648
|
-
console.log(` ${
|
|
3280
|
+
console.log(` ${import_colors7.default.green(pkg.padEnd(20))} ${version2}`);
|
|
649
3281
|
}
|
|
650
3282
|
if (versions.size > 1) {
|
|
651
|
-
console.warn(
|
|
3283
|
+
console.warn(import_colors7.default.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
|
|
652
3284
|
}
|
|
653
3285
|
}
|
|
654
3286
|
__name(run5, "run");
|
|
655
|
-
async function getZenStackPackages(projectPath) {
|
|
656
|
-
let pkgJson;
|
|
657
|
-
const resolvedPath = import_node_path5.default.resolve(projectPath);
|
|
658
|
-
try {
|
|
659
|
-
pkgJson = (await import(import_node_path5.default.join(resolvedPath, "package.json"), {
|
|
660
|
-
with: {
|
|
661
|
-
type: "json"
|
|
662
|
-
}
|
|
663
|
-
})).default;
|
|
664
|
-
} catch {
|
|
665
|
-
return [];
|
|
666
|
-
}
|
|
667
|
-
const packages = Array.from(new Set([
|
|
668
|
-
...Object.keys(pkgJson.dependencies ?? {}),
|
|
669
|
-
...Object.keys(pkgJson.devDependencies ?? {})
|
|
670
|
-
].filter((p) => p.startsWith("@zenstackhq/") || p === "zenstack"))).sort();
|
|
671
|
-
const result = await Promise.all(packages.map(async (pkg) => {
|
|
672
|
-
try {
|
|
673
|
-
const depPkgJson = (await import(`${pkg}/package.json`, {
|
|
674
|
-
with: {
|
|
675
|
-
type: "json"
|
|
676
|
-
}
|
|
677
|
-
})).default;
|
|
678
|
-
if (depPkgJson.private) {
|
|
679
|
-
return void 0;
|
|
680
|
-
}
|
|
681
|
-
return {
|
|
682
|
-
pkg,
|
|
683
|
-
version: depPkgJson.version
|
|
684
|
-
};
|
|
685
|
-
} catch {
|
|
686
|
-
return {
|
|
687
|
-
pkg,
|
|
688
|
-
version: void 0
|
|
689
|
-
};
|
|
690
|
-
}
|
|
691
|
-
}));
|
|
692
|
-
return result.filter((p) => !!p);
|
|
693
|
-
}
|
|
694
|
-
__name(getZenStackPackages, "getZenStackPackages");
|
|
695
3287
|
|
|
696
3288
|
// src/actions/init.ts
|
|
697
|
-
var
|
|
3289
|
+
var import_colors8 = __toESM(require("colors"), 1);
|
|
698
3290
|
var import_node_fs7 = __toESM(require("fs"), 1);
|
|
699
3291
|
var import_node_path6 = __toESM(require("path"), 1);
|
|
700
|
-
var
|
|
3292
|
+
var import_ora3 = __toESM(require("ora"), 1);
|
|
701
3293
|
var import_package_manager_detector = require("package-manager-detector");
|
|
702
3294
|
|
|
703
3295
|
// src/actions/templates.ts
|
|
@@ -748,7 +3340,7 @@ async function run6(projectPath) {
|
|
|
748
3340
|
name: "npm"
|
|
749
3341
|
};
|
|
750
3342
|
}
|
|
751
|
-
console.log(
|
|
3343
|
+
console.log(import_colors8.default.gray(`Using package manager: ${pm.agent}`));
|
|
752
3344
|
for (const pkg of packages) {
|
|
753
3345
|
const resolved = (0, import_package_manager_detector.resolveCommand)(pm.agent, "add", [
|
|
754
3346
|
pkg.name,
|
|
@@ -759,7 +3351,7 @@ async function run6(projectPath) {
|
|
|
759
3351
|
if (!resolved) {
|
|
760
3352
|
throw new CliError(`Unable to determine how to install package "${pkg.name}". Please install it manually.`);
|
|
761
3353
|
}
|
|
762
|
-
const spinner = (0,
|
|
3354
|
+
const spinner = (0, import_ora3.default)(`Installing "${pkg.name}"`).start();
|
|
763
3355
|
try {
|
|
764
3356
|
execSync(`${resolved.command} ${resolved.args.join(" ")}`, {
|
|
765
3357
|
cwd: projectPath
|
|
@@ -777,11 +3369,11 @@ async function run6(projectPath) {
|
|
|
777
3369
|
if (!import_node_fs7.default.existsSync(import_node_path6.default.join(projectPath, generationFolder, "schema.zmodel"))) {
|
|
778
3370
|
import_node_fs7.default.writeFileSync(import_node_path6.default.join(projectPath, generationFolder, "schema.zmodel"), STARTER_ZMODEL);
|
|
779
3371
|
} else {
|
|
780
|
-
console.log(
|
|
3372
|
+
console.log(import_colors8.default.yellow("Schema file already exists. Skipping generation of sample."));
|
|
781
3373
|
}
|
|
782
|
-
console.log(
|
|
783
|
-
console.log(
|
|
784
|
-
console.log(
|
|
3374
|
+
console.log(import_colors8.default.green("ZenStack project initialized successfully!"));
|
|
3375
|
+
console.log(import_colors8.default.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
|
|
3376
|
+
console.log(import_colors8.default.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
|
|
785
3377
|
}
|
|
786
3378
|
__name(run6, "run");
|
|
787
3379
|
|
|
@@ -790,19 +3382,19 @@ var import_node_fs8 = __toESM(require("fs"), 1);
|
|
|
790
3382
|
var import_node_path7 = __toESM(require("path"), 1);
|
|
791
3383
|
|
|
792
3384
|
// src/actions/seed.ts
|
|
793
|
-
var
|
|
3385
|
+
var import_colors9 = __toESM(require("colors"), 1);
|
|
794
3386
|
var import_execa = require("execa");
|
|
795
3387
|
async function run7(options, args) {
|
|
796
3388
|
const pkgJsonConfig = getPkgJsonConfig(process.cwd());
|
|
797
3389
|
if (!pkgJsonConfig.seed) {
|
|
798
3390
|
if (!options.noWarnings) {
|
|
799
|
-
console.warn(
|
|
3391
|
+
console.warn(import_colors9.default.yellow("No seed script defined in package.json. Skipping seeding."));
|
|
800
3392
|
}
|
|
801
3393
|
return;
|
|
802
3394
|
}
|
|
803
3395
|
const command = `${pkgJsonConfig.seed}${args.length > 0 ? " " + args.join(" ") : ""}`;
|
|
804
3396
|
if (options.printStatus) {
|
|
805
|
-
console.log(
|
|
3397
|
+
console.log(import_colors9.default.gray(`Running seed script "${command}"...`));
|
|
806
3398
|
}
|
|
807
3399
|
try {
|
|
808
3400
|
await (0, import_execa.execaCommand)(command, {
|
|
@@ -810,7 +3402,7 @@ async function run7(options, args) {
|
|
|
810
3402
|
stderr: "inherit"
|
|
811
3403
|
});
|
|
812
3404
|
} catch (err) {
|
|
813
|
-
console.error(
|
|
3405
|
+
console.error(import_colors9.default.red(err instanceof Error ? err.message : String(err)));
|
|
814
3406
|
throw new CliError("Failed to seed the database. Please check the error message above for details.");
|
|
815
3407
|
}
|
|
816
3408
|
}
|
|
@@ -931,29 +3523,29 @@ function handleSubProcessError2(err) {
|
|
|
931
3523
|
__name(handleSubProcessError2, "handleSubProcessError");
|
|
932
3524
|
|
|
933
3525
|
// src/actions/proxy.ts
|
|
934
|
-
var
|
|
935
|
-
var
|
|
3526
|
+
var import_ast6 = require("@zenstackhq/language/ast");
|
|
3527
|
+
var import_utils8 = require("@zenstackhq/language/utils");
|
|
936
3528
|
var import_orm = require("@zenstackhq/orm");
|
|
937
|
-
var
|
|
3529
|
+
var import_mysql2 = require("@zenstackhq/orm/dialects/mysql");
|
|
938
3530
|
var import_postgres = require("@zenstackhq/orm/dialects/postgres");
|
|
939
|
-
var
|
|
3531
|
+
var import_sqlite2 = require("@zenstackhq/orm/dialects/sqlite");
|
|
940
3532
|
var import_api = require("@zenstackhq/server/api");
|
|
941
3533
|
var import_express = require("@zenstackhq/server/express");
|
|
942
3534
|
var import_better_sqlite3 = __toESM(require("better-sqlite3"), 1);
|
|
943
|
-
var
|
|
3535
|
+
var import_colors11 = __toESM(require("colors"), 1);
|
|
944
3536
|
var import_cors = __toESM(require("cors"), 1);
|
|
945
3537
|
var import_express2 = __toESM(require("express"), 1);
|
|
946
3538
|
var import_jiti2 = require("jiti");
|
|
947
|
-
var
|
|
3539
|
+
var import_mysql22 = require("mysql2");
|
|
948
3540
|
var import_node_path9 = __toESM(require("path"), 1);
|
|
949
|
-
var
|
|
3541
|
+
var import_pg2 = require("pg");
|
|
950
3542
|
|
|
951
3543
|
// src/utils/version-utils.ts
|
|
952
|
-
var
|
|
3544
|
+
var import_colors10 = __toESM(require("colors"), 1);
|
|
953
3545
|
var import_node_fs9 = __toESM(require("fs"), 1);
|
|
954
3546
|
var import_node_path8 = __toESM(require("path"), 1);
|
|
955
3547
|
var import_node_url2 = require("url");
|
|
956
|
-
var
|
|
3548
|
+
var import_semver2 = __toESM(require("semver"), 1);
|
|
957
3549
|
var import_meta2 = {};
|
|
958
3550
|
var CHECK_VERSION_TIMEOUT = 2e3;
|
|
959
3551
|
var VERSION_CHECK_TAG = "next";
|
|
@@ -974,8 +3566,8 @@ async function checkNewVersion() {
|
|
|
974
3566
|
} catch {
|
|
975
3567
|
return;
|
|
976
3568
|
}
|
|
977
|
-
if (latestVersion && currVersion &&
|
|
978
|
-
console.log(`A newer version ${
|
|
3569
|
+
if (latestVersion && currVersion && import_semver2.default.gt(latestVersion, currVersion)) {
|
|
3570
|
+
console.log(`A newer version ${import_colors10.default.cyan(latestVersion)} is available.`);
|
|
979
3571
|
}
|
|
980
3572
|
}
|
|
981
3573
|
__name(checkNewVersion, "checkNewVersion");
|
|
@@ -989,7 +3581,7 @@ async function getLatestVersion() {
|
|
|
989
3581
|
if (fetchResult.ok) {
|
|
990
3582
|
const data = await fetchResult.json();
|
|
991
3583
|
const latestVersion = data?.version;
|
|
992
|
-
if (typeof latestVersion === "string" &&
|
|
3584
|
+
if (typeof latestVersion === "string" && import_semver2.default.valid(latestVersion)) {
|
|
993
3585
|
return latestVersion;
|
|
994
3586
|
}
|
|
995
3587
|
}
|
|
@@ -1006,13 +3598,13 @@ async function run9(options) {
|
|
|
1006
3598
|
];
|
|
1007
3599
|
const log = options.logLevel?.filter((level) => allowedLogLevels.includes(level));
|
|
1008
3600
|
const schemaFile = getSchemaFile(options.schema);
|
|
1009
|
-
console.log(
|
|
3601
|
+
console.log(import_colors11.default.gray(`Loading ZModel schema from: ${schemaFile}`));
|
|
1010
3602
|
let outputPath = getOutputPath(options, schemaFile);
|
|
1011
3603
|
if (!import_node_path9.default.isAbsolute(outputPath)) {
|
|
1012
3604
|
outputPath = import_node_path9.default.resolve(process.cwd(), outputPath);
|
|
1013
3605
|
}
|
|
1014
3606
|
const model = await loadSchemaDocument(schemaFile);
|
|
1015
|
-
const dataSource = model.declarations.find(
|
|
3607
|
+
const dataSource = model.declarations.find(import_ast6.isDataSource);
|
|
1016
3608
|
let databaseUrl = options.databaseUrl;
|
|
1017
3609
|
if (!databaseUrl) {
|
|
1018
3610
|
const schemaUrl = dataSource?.fields.find((f) => f.name === "url")?.value;
|
|
@@ -1021,7 +3613,7 @@ async function run9(options) {
|
|
|
1021
3613
|
}
|
|
1022
3614
|
databaseUrl = evaluateUrl(schemaUrl);
|
|
1023
3615
|
}
|
|
1024
|
-
const provider = (0,
|
|
3616
|
+
const provider = (0, import_utils8.getStringLiteral)(dataSource?.fields.find((f) => f.name === "provider")?.value);
|
|
1025
3617
|
const dialect = createDialect(provider, databaseUrl, outputPath);
|
|
1026
3618
|
const jiti = (0, import_jiti2.createJiti)(import_meta3.url);
|
|
1027
3619
|
const schemaModule = await jiti.import(import_node_path9.default.join(outputPath, "schema"));
|
|
@@ -1052,11 +3644,11 @@ async function run9(options) {
|
|
|
1052
3644
|
}
|
|
1053
3645
|
__name(run9, "run");
|
|
1054
3646
|
function evaluateUrl(schemaUrl) {
|
|
1055
|
-
if ((0,
|
|
1056
|
-
return (0,
|
|
1057
|
-
} else if ((0,
|
|
3647
|
+
if ((0, import_ast6.isLiteralExpr)(schemaUrl)) {
|
|
3648
|
+
return (0, import_utils8.getStringLiteral)(schemaUrl);
|
|
3649
|
+
} else if ((0, import_ast6.isInvocationExpr)(schemaUrl)) {
|
|
1058
3650
|
const envFunction = schemaUrl;
|
|
1059
|
-
const envName = (0,
|
|
3651
|
+
const envName = (0, import_utils8.getStringLiteral)(envFunction.args[0]?.value);
|
|
1060
3652
|
const envValue = process.env[envName];
|
|
1061
3653
|
if (!envValue) {
|
|
1062
3654
|
throw new CliError(`Environment variable ${envName} is not set`);
|
|
@@ -1092,22 +3684,22 @@ function createDialect(provider, databaseUrl, outputPath) {
|
|
|
1092
3684
|
resolvedUrl = import_node_path9.default.join(outputPath, filePath);
|
|
1093
3685
|
}
|
|
1094
3686
|
}
|
|
1095
|
-
console.log(
|
|
1096
|
-
return new
|
|
3687
|
+
console.log(import_colors11.default.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
|
|
3688
|
+
return new import_sqlite2.SqliteDialect({
|
|
1097
3689
|
database: new import_better_sqlite3.default(resolvedUrl)
|
|
1098
3690
|
});
|
|
1099
3691
|
}
|
|
1100
3692
|
case "postgresql":
|
|
1101
|
-
console.log(
|
|
3693
|
+
console.log(import_colors11.default.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
|
|
1102
3694
|
return new import_postgres.PostgresDialect({
|
|
1103
|
-
pool: new
|
|
3695
|
+
pool: new import_pg2.Pool({
|
|
1104
3696
|
connectionString: databaseUrl
|
|
1105
3697
|
})
|
|
1106
3698
|
});
|
|
1107
3699
|
case "mysql":
|
|
1108
|
-
console.log(
|
|
1109
|
-
return new
|
|
1110
|
-
pool: (0,
|
|
3700
|
+
console.log(import_colors11.default.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
|
|
3701
|
+
return new import_mysql2.MysqlDialect({
|
|
3702
|
+
pool: (0, import_mysql22.createPool)(databaseUrl)
|
|
1111
3703
|
});
|
|
1112
3704
|
default:
|
|
1113
3705
|
throw new CliError(`Unsupported database provider: ${provider}`);
|
|
@@ -1138,11 +3730,11 @@ function startServer(client, schema, options) {
|
|
|
1138
3730
|
});
|
|
1139
3731
|
const server = app.listen(options.port, () => {
|
|
1140
3732
|
console.log(`ZenStack proxy server is running on port: ${options.port}`);
|
|
1141
|
-
console.log(`You can visit ZenStack Studio at: ${
|
|
3733
|
+
console.log(`You can visit ZenStack Studio at: ${import_colors11.default.blue("https://studio.zenstack.dev")}`);
|
|
1142
3734
|
});
|
|
1143
3735
|
server.on("error", (err) => {
|
|
1144
3736
|
if (err.code === "EADDRINUSE") {
|
|
1145
|
-
console.error(
|
|
3737
|
+
console.error(import_colors11.default.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
|
|
1146
3738
|
} else {
|
|
1147
3739
|
throw new CliError(`Failed to start the server: ${err.message}`);
|
|
1148
3740
|
}
|
|
@@ -1172,7 +3764,7 @@ var import_node_fs13 = __toESM(require("fs"), 1);
|
|
|
1172
3764
|
var os2 = __toESM(require("os"), 1);
|
|
1173
3765
|
|
|
1174
3766
|
// src/constants.ts
|
|
1175
|
-
var TELEMETRY_TRACKING_TOKEN = "
|
|
3767
|
+
var TELEMETRY_TRACKING_TOKEN = "<TELEMETRY_TRACKING_TOKEN>";
|
|
1176
3768
|
|
|
1177
3769
|
// src/utils/is-ci.ts
|
|
1178
3770
|
var import_node_process = require("process");
|
|
@@ -1435,8 +4027,8 @@ var proxyAction = /* @__PURE__ */ __name(async (options) => {
|
|
|
1435
4027
|
}, "proxyAction");
|
|
1436
4028
|
function createProgram() {
|
|
1437
4029
|
const program = new import_commander.Command("zen").alias("zenstack").helpOption("-h, --help", "Show this help message").version(getVersion(), "-v --version", "Show CLI version");
|
|
1438
|
-
const schemaExtensions =
|
|
1439
|
-
program.description(`${
|
|
4030
|
+
const schemaExtensions = import_language5.ZModelLanguageMetaData.fileExtensions.join(", ");
|
|
4031
|
+
program.description(`${import_colors12.default.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
|
|
1440
4032
|
|
|
1441
4033
|
Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAfterError();
|
|
1442
4034
|
const schemaOption = new import_commander.Option("--schema <file>", `schema file (with extension ${schemaExtensions}). Defaults to "zenstack/schema.zmodel" unless specified in package.json.`);
|
|
@@ -1451,6 +4043,7 @@ Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAf
|
|
|
1451
4043
|
migrateCommand.command("resolve").addOption(schemaOption).addOption(noVersionCheckOption).addOption(migrationsOption).addOption(new import_commander.Option("--applied <migration>", "record a specific migration as applied")).addOption(new import_commander.Option("--rolled-back <migration>", "record a specific migration as rolled back")).description("Resolve issues with database migrations in deployment databases").action((options) => migrateAction("resolve", options));
|
|
1452
4044
|
const dbCommand = program.command("db").description("Manage your database schema during development");
|
|
1453
4045
|
dbCommand.command("push").description("Push the state from your schema to your database").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new import_commander.Option("--accept-data-loss", "ignore data loss warnings")).addOption(new import_commander.Option("--force-reset", "force a reset of the database before push")).action((options) => dbAction("push", options));
|
|
4046
|
+
dbCommand.command("pull").description("Introspect your database.").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new import_commander.Option("-o, --output <path>", "set custom output path for the introspected schema. If a file path is provided, all schemas are merged into that single file. If a directory path is provided, files are written to the directory and imports are kept.")).addOption(new import_commander.Option("--model-casing <pascal|camel|snake|none>", "set the casing of generated models").default("pascal")).addOption(new import_commander.Option("--field-casing <pascal|camel|snake|none>", "set the casing of generated fields").default("camel")).addOption(new import_commander.Option("--always-map", "always add @map and @@map attributes to models and fields").default(false)).addOption(new import_commander.Option("--quote <double|single>", "set the quote style of generated schema files").default("single")).addOption(new import_commander.Option("--indent <number>", "set the indentation of the generated schema files").default(4)).action((options) => dbAction("pull", options));
|
|
1454
4047
|
dbCommand.command("seed").description("Seed the database").allowExcessArguments(true).addHelpText("after", `
|
|
1455
4048
|
Seed script is configured under the "zenstack.seed" field in package.json.
|
|
1456
4049
|
E.g.:
|
|
@@ -1487,10 +4080,10 @@ async function main() {
|
|
|
1487
4080
|
if (e instanceof import_commander.CommanderError) {
|
|
1488
4081
|
exitCode = e.exitCode;
|
|
1489
4082
|
} else if (e instanceof CliError) {
|
|
1490
|
-
console.error(
|
|
4083
|
+
console.error(import_colors12.default.red(e.message));
|
|
1491
4084
|
exitCode = 1;
|
|
1492
4085
|
} else {
|
|
1493
|
-
console.error(
|
|
4086
|
+
console.error(import_colors12.default.red(`Unhandled error: ${e}`));
|
|
1494
4087
|
exitCode = 1;
|
|
1495
4088
|
}
|
|
1496
4089
|
}
|