@zenstackhq/cli 3.3.3 → 3.4.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +2554 -101
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +2534 -81
- package/dist/index.js.map +1 -1
- package/package.json +11 -10
package/dist/index.js
CHANGED
|
@@ -14,7 +14,7 @@ var __export = (target, all) => {
|
|
|
14
14
|
// src/index.ts
|
|
15
15
|
import "dotenv/config";
|
|
16
16
|
import { ZModelLanguageMetaData as ZModelLanguageMetaData2 } from "@zenstackhq/language";
|
|
17
|
-
import
|
|
17
|
+
import colors12 from "colors";
|
|
18
18
|
import { Command, CommanderError, Option } from "commander";
|
|
19
19
|
|
|
20
20
|
// src/actions/check.ts
|
|
@@ -67,8 +67,10 @@ function getSchemaFile(file) {
|
|
|
67
67
|
}
|
|
68
68
|
}
|
|
69
69
|
__name(getSchemaFile, "getSchemaFile");
|
|
70
|
-
async function loadSchemaDocument(schemaFile) {
|
|
71
|
-
const
|
|
70
|
+
async function loadSchemaDocument(schemaFile, opts = {}) {
|
|
71
|
+
const returnServices = opts.returnServices ?? false;
|
|
72
|
+
const mergeImports = opts.mergeImports ?? true;
|
|
73
|
+
const loadResult = await loadDocument(schemaFile, [], mergeImports);
|
|
72
74
|
if (!loadResult.success) {
|
|
73
75
|
loadResult.errors.forEach((err) => {
|
|
74
76
|
console.error(colors.red(err));
|
|
@@ -78,6 +80,10 @@ async function loadSchemaDocument(schemaFile) {
|
|
|
78
80
|
loadResult.warnings.forEach((warn) => {
|
|
79
81
|
console.warn(colors.yellow(warn));
|
|
80
82
|
});
|
|
83
|
+
if (returnServices) return {
|
|
84
|
+
model: loadResult.model,
|
|
85
|
+
services: loadResult.services
|
|
86
|
+
};
|
|
81
87
|
return loadResult.model;
|
|
82
88
|
}
|
|
83
89
|
__name(loadSchemaDocument, "loadSchemaDocument");
|
|
@@ -182,7 +188,12 @@ async function run(options) {
|
|
|
182
188
|
__name(run, "run");
|
|
183
189
|
|
|
184
190
|
// src/actions/db.ts
|
|
191
|
+
import { formatDocument, ZModelCodeGenerator } from "@zenstackhq/language";
|
|
192
|
+
import { DataModel, Enum } from "@zenstackhq/language/ast";
|
|
193
|
+
import colors4 from "colors";
|
|
185
194
|
import fs2 from "fs";
|
|
195
|
+
import path2 from "path";
|
|
196
|
+
import ora from "ora";
|
|
186
197
|
|
|
187
198
|
// src/utils/exec-utils.ts
|
|
188
199
|
import { execSync as _exec } from "child_process";
|
|
@@ -231,12 +242,2048 @@ function execPrisma(args, options) {
|
|
|
231
242
|
}
|
|
232
243
|
__name(execPrisma, "execPrisma");
|
|
233
244
|
|
|
245
|
+
// src/actions/pull/index.ts
|
|
246
|
+
import colors3 from "colors";
|
|
247
|
+
import { isEnum } from "@zenstackhq/language/ast";
|
|
248
|
+
import { DataFieldAttributeFactory, DataFieldFactory, DataModelFactory, EnumFactory } from "@zenstackhq/language/factory";
|
|
249
|
+
import { AstUtils } from "langium";
|
|
250
|
+
import { lowerCaseFirst } from "@zenstackhq/common-helpers";
|
|
251
|
+
|
|
252
|
+
// src/actions/pull/utils.ts
|
|
253
|
+
import { isInvocationExpr } from "@zenstackhq/language/ast";
|
|
254
|
+
import { getLiteralArray, getStringLiteral } from "@zenstackhq/language/utils";
|
|
255
|
+
function getDatasource(model) {
|
|
256
|
+
const datasource = model.declarations.find((d) => d.$type === "DataSource");
|
|
257
|
+
if (!datasource) {
|
|
258
|
+
throw new CliError("No datasource declaration found in the schema.");
|
|
259
|
+
}
|
|
260
|
+
const urlField = datasource.fields.find((f) => f.name === "url");
|
|
261
|
+
if (!urlField) throw new CliError(`No url field found in the datasource declaration.`);
|
|
262
|
+
let url = getStringLiteral(urlField.value);
|
|
263
|
+
if (!url && isInvocationExpr(urlField.value)) {
|
|
264
|
+
const envName = getStringLiteral(urlField.value.args[0]?.value);
|
|
265
|
+
if (!envName) {
|
|
266
|
+
throw new CliError("The url field must be a string literal or an env().");
|
|
267
|
+
}
|
|
268
|
+
if (!process.env[envName]) {
|
|
269
|
+
throw new CliError(`Environment variable ${envName} is not set, please set it to the database connection string.`);
|
|
270
|
+
}
|
|
271
|
+
url = process.env[envName];
|
|
272
|
+
}
|
|
273
|
+
if (!url) {
|
|
274
|
+
throw new CliError("The url field must be a string literal or an env().");
|
|
275
|
+
}
|
|
276
|
+
if (url.startsWith("file:")) {
|
|
277
|
+
url = new URL(url, `file:${model.$document.uri.path}`).pathname;
|
|
278
|
+
if (process.platform === "win32" && url[0] === "/") url = url.slice(1);
|
|
279
|
+
}
|
|
280
|
+
const defaultSchemaField = datasource.fields.find((f) => f.name === "defaultSchema");
|
|
281
|
+
const defaultSchema = defaultSchemaField && getStringLiteral(defaultSchemaField.value) || "public";
|
|
282
|
+
const schemasField = datasource.fields.find((f) => f.name === "schemas");
|
|
283
|
+
const schemas = schemasField && getLiteralArray(schemasField.value)?.filter((s) => s !== void 0) || [];
|
|
284
|
+
const provider = getStringLiteral(datasource.fields.find((f) => f.name === "provider")?.value);
|
|
285
|
+
if (!provider) {
|
|
286
|
+
throw new CliError(`Datasource "${datasource.name}" is missing a "provider" field.`);
|
|
287
|
+
}
|
|
288
|
+
return {
|
|
289
|
+
name: datasource.name,
|
|
290
|
+
provider,
|
|
291
|
+
url,
|
|
292
|
+
defaultSchema,
|
|
293
|
+
schemas,
|
|
294
|
+
allSchemas: [
|
|
295
|
+
defaultSchema,
|
|
296
|
+
...schemas
|
|
297
|
+
]
|
|
298
|
+
};
|
|
299
|
+
}
|
|
300
|
+
__name(getDatasource, "getDatasource");
|
|
301
|
+
function getDbName(decl, includeSchema = false) {
|
|
302
|
+
if (!("attributes" in decl)) return decl.name;
|
|
303
|
+
const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@schema");
|
|
304
|
+
let schema = "public";
|
|
305
|
+
if (schemaAttr) {
|
|
306
|
+
const schemaAttrValue = schemaAttr.args[0]?.value;
|
|
307
|
+
if (schemaAttrValue?.$type === "StringLiteral") {
|
|
308
|
+
schema = schemaAttrValue.value;
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
const formatName = /* @__PURE__ */ __name((name) => `${schema && includeSchema ? `${schema}.` : ""}${name}`, "formatName");
|
|
312
|
+
const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@map" || a.decl.ref?.name === "@map");
|
|
313
|
+
if (!nameAttr) return formatName(decl.name);
|
|
314
|
+
const attrValue = nameAttr.args[0]?.value;
|
|
315
|
+
if (attrValue?.$type !== "StringLiteral") return formatName(decl.name);
|
|
316
|
+
return formatName(attrValue.value);
|
|
317
|
+
}
|
|
318
|
+
__name(getDbName, "getDbName");
|
|
319
|
+
function getRelationFkName(decl) {
|
|
320
|
+
const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
|
|
321
|
+
const schemaAttrValue = relationAttr?.args.find((a) => a.name === "map")?.value;
|
|
322
|
+
return schemaAttrValue?.value;
|
|
323
|
+
}
|
|
324
|
+
__name(getRelationFkName, "getRelationFkName");
|
|
325
|
+
function getRelationFieldsKey(decl) {
|
|
326
|
+
const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
|
|
327
|
+
if (!relationAttr) return void 0;
|
|
328
|
+
const fieldsArg = relationAttr.args.find((a) => a.name === "fields")?.value;
|
|
329
|
+
if (!fieldsArg || fieldsArg.$type !== "ArrayExpr") return void 0;
|
|
330
|
+
const fieldNames = fieldsArg.items.filter((item) => item.$type === "ReferenceExpr").map((item) => item.target?.$refText || item.target?.ref?.name).filter((name) => !!name).sort();
|
|
331
|
+
return fieldNames.length > 0 ? fieldNames.join(",") : void 0;
|
|
332
|
+
}
|
|
333
|
+
__name(getRelationFieldsKey, "getRelationFieldsKey");
|
|
334
|
+
function getDeclarationRef(type2, name, services) {
|
|
335
|
+
const node = services.shared.workspace.IndexManager.allElements(type2).find((m) => m.node && getDbName(m.node) === name)?.node;
|
|
336
|
+
if (!node) throw new CliError(`Declaration not found: ${name}`);
|
|
337
|
+
return node;
|
|
338
|
+
}
|
|
339
|
+
__name(getDeclarationRef, "getDeclarationRef");
|
|
340
|
+
function getEnumRef(name, services) {
|
|
341
|
+
return getDeclarationRef("Enum", name, services);
|
|
342
|
+
}
|
|
343
|
+
__name(getEnumRef, "getEnumRef");
|
|
344
|
+
function getAttributeRef(name, services) {
|
|
345
|
+
return getDeclarationRef("Attribute", name, services);
|
|
346
|
+
}
|
|
347
|
+
__name(getAttributeRef, "getAttributeRef");
|
|
348
|
+
function getFunctionRef(name, services) {
|
|
349
|
+
return getDeclarationRef("FunctionDecl", name, services);
|
|
350
|
+
}
|
|
351
|
+
__name(getFunctionRef, "getFunctionRef");
|
|
352
|
+
function normalizeFloatDefault(val) {
|
|
353
|
+
if (/^-?\d+$/.test(val)) {
|
|
354
|
+
return (ab) => ab.NumberLiteral.setValue(val + ".0");
|
|
355
|
+
}
|
|
356
|
+
if (/^-?\d+\.\d+$/.test(val)) {
|
|
357
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
358
|
+
}
|
|
359
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
360
|
+
}
|
|
361
|
+
__name(normalizeFloatDefault, "normalizeFloatDefault");
|
|
362
|
+
function normalizeDecimalDefault(val) {
|
|
363
|
+
if (/^-?\d+$/.test(val)) {
|
|
364
|
+
return (ab) => ab.NumberLiteral.setValue(val + ".00");
|
|
365
|
+
}
|
|
366
|
+
if (/^-?\d+\.\d+$/.test(val)) {
|
|
367
|
+
const [integerPart, fractionalPart] = val.split(".");
|
|
368
|
+
let normalized = fractionalPart.replace(/0+$/, "");
|
|
369
|
+
if (normalized.length < 2) {
|
|
370
|
+
normalized = normalized.padEnd(2, "0");
|
|
371
|
+
}
|
|
372
|
+
return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`);
|
|
373
|
+
}
|
|
374
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
375
|
+
}
|
|
376
|
+
__name(normalizeDecimalDefault, "normalizeDecimalDefault");
|
|
377
|
+
|
|
378
|
+
// src/actions/pull/casing.ts
|
|
379
|
+
function resolveNameCasing(casing, originalName) {
|
|
380
|
+
let name = originalName;
|
|
381
|
+
const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? "_" : "";
|
|
382
|
+
switch (casing) {
|
|
383
|
+
case "pascal":
|
|
384
|
+
name = toPascalCase(originalName);
|
|
385
|
+
break;
|
|
386
|
+
case "camel":
|
|
387
|
+
name = toCamelCase(originalName);
|
|
388
|
+
break;
|
|
389
|
+
case "snake":
|
|
390
|
+
name = toSnakeCase(originalName);
|
|
391
|
+
break;
|
|
392
|
+
}
|
|
393
|
+
return {
|
|
394
|
+
modified: name !== originalName || fieldPrefix !== "",
|
|
395
|
+
name: `${fieldPrefix}${name}`
|
|
396
|
+
};
|
|
397
|
+
}
|
|
398
|
+
__name(resolveNameCasing, "resolveNameCasing");
|
|
399
|
+
function isAllUpperCase(str) {
|
|
400
|
+
return str === str.toUpperCase();
|
|
401
|
+
}
|
|
402
|
+
__name(isAllUpperCase, "isAllUpperCase");
|
|
403
|
+
function toPascalCase(str) {
|
|
404
|
+
if (isAllUpperCase(str)) return str;
|
|
405
|
+
return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase());
|
|
406
|
+
}
|
|
407
|
+
__name(toPascalCase, "toPascalCase");
|
|
408
|
+
function toCamelCase(str) {
|
|
409
|
+
if (isAllUpperCase(str)) return str;
|
|
410
|
+
return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase());
|
|
411
|
+
}
|
|
412
|
+
__name(toCamelCase, "toCamelCase");
|
|
413
|
+
function toSnakeCase(str) {
|
|
414
|
+
if (isAllUpperCase(str)) return str;
|
|
415
|
+
return str.replace(/[- ]+/g, "_").replace(/([a-z0-9])([A-Z])/g, "$1_$2").toLowerCase();
|
|
416
|
+
}
|
|
417
|
+
__name(toSnakeCase, "toSnakeCase");
|
|
418
|
+
|
|
419
|
+
// src/actions/pull/index.ts
|
|
420
|
+
function syncEnums({ dbEnums, model, oldModel, provider, options, services, defaultSchema }) {
|
|
421
|
+
if (provider.isSupportedFeature("NativeEnum")) {
|
|
422
|
+
for (const dbEnum of dbEnums) {
|
|
423
|
+
const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type);
|
|
424
|
+
if (modified) console.log(colors3.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`));
|
|
425
|
+
const factory = new EnumFactory().setName(name);
|
|
426
|
+
if (modified || options.alwaysMap) factory.addAttribute((builder) => builder.setDecl(getAttributeRef("@@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)));
|
|
427
|
+
dbEnum.values.forEach((v) => {
|
|
428
|
+
const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, v);
|
|
429
|
+
factory.addField((builder) => {
|
|
430
|
+
builder.setName(name2);
|
|
431
|
+
if (modified2 || options.alwaysMap) builder.addAttribute((builder2) => builder2.setDecl(getAttributeRef("@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)));
|
|
432
|
+
return builder;
|
|
433
|
+
});
|
|
434
|
+
});
|
|
435
|
+
if (dbEnum.schema_name && dbEnum.schema_name !== "" && dbEnum.schema_name !== defaultSchema) {
|
|
436
|
+
factory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)));
|
|
437
|
+
}
|
|
438
|
+
model.declarations.push(factory.get({
|
|
439
|
+
$container: model
|
|
440
|
+
}));
|
|
441
|
+
}
|
|
442
|
+
} else {
|
|
443
|
+
const dummyBuildReference = /* @__PURE__ */ __name((_node, _property, _refNode, refText) => ({
|
|
444
|
+
$refText: refText
|
|
445
|
+
}), "dummyBuildReference");
|
|
446
|
+
oldModel.declarations.filter((d) => isEnum(d)).forEach((d) => {
|
|
447
|
+
const copy = AstUtils.copyAstNode(d, dummyBuildReference);
|
|
448
|
+
copy.$container = model;
|
|
449
|
+
model.declarations.push(copy);
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
__name(syncEnums, "syncEnums");
|
|
454
|
+
function syncTable({ model, provider, table, services, options, defaultSchema }) {
|
|
455
|
+
const idAttribute = getAttributeRef("@id", services);
|
|
456
|
+
const modelIdAttribute = getAttributeRef("@@id", services);
|
|
457
|
+
const uniqueAttribute = getAttributeRef("@unique", services);
|
|
458
|
+
const modelUniqueAttribute = getAttributeRef("@@unique", services);
|
|
459
|
+
const fieldMapAttribute = getAttributeRef("@map", services);
|
|
460
|
+
const tableMapAttribute = getAttributeRef("@@map", services);
|
|
461
|
+
const modelindexAttribute = getAttributeRef("@@index", services);
|
|
462
|
+
const relations = [];
|
|
463
|
+
const { name, modified } = resolveNameCasing(options.modelCasing, table.name);
|
|
464
|
+
const multiPk = table.columns.filter((c) => c.pk).length > 1;
|
|
465
|
+
const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === "view");
|
|
466
|
+
modelFactory.setContainer(model);
|
|
467
|
+
if (modified || options.alwaysMap) {
|
|
468
|
+
modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)));
|
|
469
|
+
}
|
|
470
|
+
const fkGroups = /* @__PURE__ */ new Map();
|
|
471
|
+
table.columns.forEach((column) => {
|
|
472
|
+
if (column.foreign_key_table && column.foreign_key_name) {
|
|
473
|
+
const group = fkGroups.get(column.foreign_key_name) ?? [];
|
|
474
|
+
group.push(column);
|
|
475
|
+
fkGroups.set(column.foreign_key_name, group);
|
|
476
|
+
}
|
|
477
|
+
});
|
|
478
|
+
for (const [fkName, fkColumns] of fkGroups) {
|
|
479
|
+
const firstCol = fkColumns[0];
|
|
480
|
+
const isSingleColumnPk = fkColumns.length === 1 && !multiPk && firstCol.pk;
|
|
481
|
+
const isUniqueRelation = fkColumns.length === 1 && firstCol.unique || isSingleColumnPk;
|
|
482
|
+
relations.push({
|
|
483
|
+
schema: table.schema,
|
|
484
|
+
table: table.name,
|
|
485
|
+
columns: fkColumns.map((c) => c.name),
|
|
486
|
+
type: "one",
|
|
487
|
+
fk_name: fkName,
|
|
488
|
+
foreign_key_on_delete: firstCol.foreign_key_on_delete,
|
|
489
|
+
foreign_key_on_update: firstCol.foreign_key_on_update,
|
|
490
|
+
nullable: firstCol.nullable,
|
|
491
|
+
references: {
|
|
492
|
+
schema: firstCol.foreign_key_schema,
|
|
493
|
+
table: firstCol.foreign_key_table,
|
|
494
|
+
columns: fkColumns.map((c) => c.foreign_key_column),
|
|
495
|
+
type: isUniqueRelation ? "one" : "many"
|
|
496
|
+
}
|
|
497
|
+
});
|
|
498
|
+
}
|
|
499
|
+
table.columns.forEach((column) => {
|
|
500
|
+
const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, column.name);
|
|
501
|
+
const builtinType = provider.getBuiltinType(column.datatype);
|
|
502
|
+
modelFactory.addField((builder) => {
|
|
503
|
+
builder.setName(name2);
|
|
504
|
+
builder.setType((typeBuilder) => {
|
|
505
|
+
typeBuilder.setArray(builtinType.isArray);
|
|
506
|
+
typeBuilder.setOptional(builtinType.isArray ? false : column.nullable);
|
|
507
|
+
if (column.computed) {
|
|
508
|
+
typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
|
|
509
|
+
} else if (column.datatype === "enum") {
|
|
510
|
+
const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype_name);
|
|
511
|
+
if (!ref) {
|
|
512
|
+
throw new CliError(`Enum ${column.datatype_name} not found`);
|
|
513
|
+
}
|
|
514
|
+
typeBuilder.setReference(ref);
|
|
515
|
+
} else {
|
|
516
|
+
if (builtinType.type !== "Unsupported") {
|
|
517
|
+
typeBuilder.setType(builtinType.type);
|
|
518
|
+
} else {
|
|
519
|
+
typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
return typeBuilder;
|
|
523
|
+
});
|
|
524
|
+
if (column.pk && !multiPk) {
|
|
525
|
+
builder.addAttribute((b) => b.setDecl(idAttribute));
|
|
526
|
+
}
|
|
527
|
+
const fieldAttrs = provider.getFieldAttributes({
|
|
528
|
+
fieldName: column.name,
|
|
529
|
+
fieldType: builtinType.type,
|
|
530
|
+
datatype: column.datatype,
|
|
531
|
+
length: column.length,
|
|
532
|
+
precision: column.precision,
|
|
533
|
+
services
|
|
534
|
+
});
|
|
535
|
+
fieldAttrs.forEach(builder.addAttribute.bind(builder));
|
|
536
|
+
if (column.default && !column.computed) {
|
|
537
|
+
const defaultExprBuilder = provider.getDefaultValue({
|
|
538
|
+
fieldType: builtinType.type,
|
|
539
|
+
datatype: column.datatype,
|
|
540
|
+
datatype_name: column.datatype_name,
|
|
541
|
+
defaultValue: column.default,
|
|
542
|
+
services,
|
|
543
|
+
enums: model.declarations.filter((d) => d.$type === "Enum")
|
|
544
|
+
});
|
|
545
|
+
if (defaultExprBuilder) {
|
|
546
|
+
const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef("@default", services)).addArg(defaultExprBuilder);
|
|
547
|
+
builder.addAttribute(defaultAttr);
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
if (column.unique && !column.pk) {
|
|
551
|
+
builder.addAttribute((b) => {
|
|
552
|
+
b.setDecl(uniqueAttribute);
|
|
553
|
+
const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name;
|
|
554
|
+
if (!isDefaultName) {
|
|
555
|
+
b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name), "map");
|
|
556
|
+
}
|
|
557
|
+
return b;
|
|
558
|
+
});
|
|
559
|
+
}
|
|
560
|
+
if (modified2 || options.alwaysMap) {
|
|
561
|
+
builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab2) => ab2.StringLiteral.setValue(column.name)));
|
|
562
|
+
}
|
|
563
|
+
return builder;
|
|
564
|
+
});
|
|
565
|
+
});
|
|
566
|
+
const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name);
|
|
567
|
+
if (multiPk) {
|
|
568
|
+
modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => {
|
|
569
|
+
const arrayExpr = argBuilder.ArrayExpr;
|
|
570
|
+
pkColumns.forEach((c) => {
|
|
571
|
+
const ref = modelFactory.node.fields.find((f) => getDbName(f) === c);
|
|
572
|
+
if (!ref) {
|
|
573
|
+
throw new CliError(`Field ${c} not found`);
|
|
574
|
+
}
|
|
575
|
+
arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref));
|
|
576
|
+
});
|
|
577
|
+
return arrayExpr;
|
|
578
|
+
}));
|
|
579
|
+
}
|
|
580
|
+
const hasUniqueConstraint = table.columns.some((c) => c.unique || c.pk) || table.indexes.some((i) => i.unique);
|
|
581
|
+
if (!hasUniqueConstraint) {
|
|
582
|
+
modelFactory.addAttribute((a) => a.setDecl(getAttributeRef("@@ignore", services)));
|
|
583
|
+
modelFactory.addComment("/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.");
|
|
584
|
+
}
|
|
585
|
+
const sortedIndexes = table.indexes.reverse().sort((a, b) => {
|
|
586
|
+
if (a.unique && !b.unique) return -1;
|
|
587
|
+
if (!a.unique && b.unique) return 1;
|
|
588
|
+
return 0;
|
|
589
|
+
});
|
|
590
|
+
sortedIndexes.forEach((index) => {
|
|
591
|
+
if (index.predicate) {
|
|
592
|
+
console.warn(colors3.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
|
|
593
|
+
- Model: "${table.name}", constraint: "${index.name}"`));
|
|
594
|
+
return;
|
|
595
|
+
}
|
|
596
|
+
if (index.columns.find((c) => c.expression)) {
|
|
597
|
+
console.warn(colors3.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
|
|
598
|
+
- Model: "${table.name}", constraint: "${index.name}"`));
|
|
599
|
+
return;
|
|
600
|
+
}
|
|
601
|
+
if (index.primary) {
|
|
602
|
+
return;
|
|
603
|
+
}
|
|
604
|
+
if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) {
|
|
605
|
+
return;
|
|
606
|
+
}
|
|
607
|
+
modelFactory.addAttribute((builder) => {
|
|
608
|
+
const attr = builder.setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute).addArg((argBuilder) => {
|
|
609
|
+
const arrayExpr = argBuilder.ArrayExpr;
|
|
610
|
+
index.columns.forEach((c) => {
|
|
611
|
+
const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name);
|
|
612
|
+
if (!ref) {
|
|
613
|
+
throw new CliError(`Column ${c.name} not found in model ${table.name}`);
|
|
614
|
+
}
|
|
615
|
+
arrayExpr.addItem((itemBuilder) => {
|
|
616
|
+
const refExpr = itemBuilder.ReferenceExpr.setTarget(ref);
|
|
617
|
+
if (c.order && c.order !== "ASC") refExpr.addArg((ab) => ab.StringLiteral.setValue("DESC"), "sort");
|
|
618
|
+
return refExpr;
|
|
619
|
+
});
|
|
620
|
+
});
|
|
621
|
+
return arrayExpr;
|
|
622
|
+
});
|
|
623
|
+
const suffix = index.unique ? "_key" : "_idx";
|
|
624
|
+
if (index.name !== `${table.name}_${index.columns.map((c) => c.name).join("_")}${suffix}`) {
|
|
625
|
+
attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), "map");
|
|
626
|
+
}
|
|
627
|
+
return attr;
|
|
628
|
+
});
|
|
629
|
+
});
|
|
630
|
+
if (table.schema && table.schema !== "" && table.schema !== defaultSchema) {
|
|
631
|
+
modelFactory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(table.schema)));
|
|
632
|
+
}
|
|
633
|
+
model.declarations.push(modelFactory.node);
|
|
634
|
+
return relations;
|
|
635
|
+
}
|
|
636
|
+
__name(syncTable, "syncTable");
|
|
637
|
+
function syncRelation({ model, relation, services, options, selfRelation, similarRelations }) {
|
|
638
|
+
const idAttribute = getAttributeRef("@id", services);
|
|
639
|
+
const uniqueAttribute = getAttributeRef("@unique", services);
|
|
640
|
+
const relationAttribute = getAttributeRef("@relation", services);
|
|
641
|
+
const fieldMapAttribute = getAttributeRef("@map", services);
|
|
642
|
+
const tableMapAttribute = getAttributeRef("@@map", services);
|
|
643
|
+
const includeRelationName = selfRelation || similarRelations > 0;
|
|
644
|
+
if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) {
|
|
645
|
+
throw new CliError("Cannot find required attributes in the model.");
|
|
646
|
+
}
|
|
647
|
+
const sourceModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.table);
|
|
648
|
+
if (!sourceModel) return;
|
|
649
|
+
const sourceFields = [];
|
|
650
|
+
for (const colName of relation.columns) {
|
|
651
|
+
const idx = sourceModel.fields.findIndex((f) => getDbName(f) === colName);
|
|
652
|
+
const field = sourceModel.fields[idx];
|
|
653
|
+
if (!field) return;
|
|
654
|
+
sourceFields.push({
|
|
655
|
+
field,
|
|
656
|
+
index: idx
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
const targetModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.references.table);
|
|
660
|
+
if (!targetModel) return;
|
|
661
|
+
const targetFields = [];
|
|
662
|
+
for (const colName of relation.references.columns) {
|
|
663
|
+
const field = targetModel.fields.find((f) => getDbName(f) === colName);
|
|
664
|
+
if (!field) return;
|
|
665
|
+
targetFields.push(field);
|
|
666
|
+
}
|
|
667
|
+
const firstSourceField = sourceFields[0].field;
|
|
668
|
+
const firstSourceFieldId = sourceFields[0].index;
|
|
669
|
+
const firstColumn = relation.columns[0];
|
|
670
|
+
const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? "_" : "";
|
|
671
|
+
const relationName = `${relation.table}${similarRelations > 0 ? `_${firstColumn}` : ""}To${relation.references.table}`;
|
|
672
|
+
const sourceNameFromReference = firstSourceField.name.toLowerCase().endsWith("id") ? `${resolveNameCasing(options.fieldCasing, firstSourceField.name.slice(0, -2)).name}${relation.type === "many" ? "s" : ""}` : void 0;
|
|
673
|
+
const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference);
|
|
674
|
+
let { name: sourceFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${fieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${(!sourceFieldFromReference ? sourceNameFromReference : void 0) || lowerCaseFirst(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === "many" ? "s" : ""}`);
|
|
675
|
+
if (sourceModel.fields.find((f) => f.name === sourceFieldName)) {
|
|
676
|
+
sourceFieldName = `${sourceFieldName}To${lowerCaseFirst(targetModel.name)}_${relation.references.columns[0]}`;
|
|
677
|
+
}
|
|
678
|
+
const sourceFieldFactory = new DataFieldFactory().setContainer(sourceModel).setName(sourceFieldName).setType((tb) => tb.setOptional(relation.nullable).setArray(relation.type === "many").setReference(targetModel));
|
|
679
|
+
sourceFieldFactory.addAttribute((ab) => {
|
|
680
|
+
ab.setDecl(relationAttribute);
|
|
681
|
+
if (includeRelationName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relationName));
|
|
682
|
+
ab.addArg((ab2) => {
|
|
683
|
+
const arrayExpr = ab2.ArrayExpr;
|
|
684
|
+
for (const { field } of sourceFields) {
|
|
685
|
+
arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
|
|
686
|
+
}
|
|
687
|
+
return arrayExpr;
|
|
688
|
+
}, "fields");
|
|
689
|
+
ab.addArg((ab2) => {
|
|
690
|
+
const arrayExpr = ab2.ArrayExpr;
|
|
691
|
+
for (const field of targetFields) {
|
|
692
|
+
arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
|
|
693
|
+
}
|
|
694
|
+
return arrayExpr;
|
|
695
|
+
}, "references");
|
|
696
|
+
const onDeleteDefault = relation.nullable ? "SET NULL" : "RESTRICT";
|
|
697
|
+
if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) {
|
|
698
|
+
const enumRef = getEnumRef("ReferentialAction", services);
|
|
699
|
+
if (!enumRef) throw new CliError("ReferentialAction enum not found");
|
|
700
|
+
const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_delete.replace(/ /g, "").toLowerCase());
|
|
701
|
+
if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_delete} not found`);
|
|
702
|
+
ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onDelete");
|
|
703
|
+
}
|
|
704
|
+
if (relation.foreign_key_on_update && relation.foreign_key_on_update !== "CASCADE") {
|
|
705
|
+
const enumRef = getEnumRef("ReferentialAction", services);
|
|
706
|
+
if (!enumRef) throw new CliError("ReferentialAction enum not found");
|
|
707
|
+
const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_update.replace(/ /g, "").toLowerCase());
|
|
708
|
+
if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_update} not found`);
|
|
709
|
+
ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onUpdate");
|
|
710
|
+
}
|
|
711
|
+
const defaultFkName = `${relation.table}_${relation.columns.join("_")}_fkey`;
|
|
712
|
+
if (relation.fk_name && relation.fk_name !== defaultFkName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relation.fk_name), "map");
|
|
713
|
+
return ab;
|
|
714
|
+
});
|
|
715
|
+
sourceModel.fields.splice(firstSourceFieldId, 0, sourceFieldFactory.node);
|
|
716
|
+
const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? "_" : "";
|
|
717
|
+
const { name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${oppositeFieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${lowerCaseFirst(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === "many" ? "s" : ""}`);
|
|
718
|
+
const targetFieldFactory = new DataFieldFactory().setContainer(targetModel).setName(oppositeFieldName).setType((tb) => tb.setOptional(relation.references.type === "one").setArray(relation.references.type === "many").setReference(sourceModel));
|
|
719
|
+
if (includeRelationName) targetFieldFactory.addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab2) => ab2.StringLiteral.setValue(relationName)));
|
|
720
|
+
targetModel.fields.push(targetFieldFactory.node);
|
|
721
|
+
}
|
|
722
|
+
__name(syncRelation, "syncRelation");
|
|
723
|
+
|
|
724
|
+
// src/actions/pull/provider/mysql.ts
|
|
725
|
+
import { DataFieldAttributeFactory as DataFieldAttributeFactory2 } from "@zenstackhq/language/factory";
|
|
726
|
+
function normalizeGenerationExpression(typeDef) {
|
|
727
|
+
return typeDef.replace(/_([0-9A-Za-z_]+)\\?'/g, "'").replace(/\\'/g, "'");
|
|
728
|
+
}
|
|
729
|
+
__name(normalizeGenerationExpression, "normalizeGenerationExpression");
|
|
730
|
+
var mysql = {
|
|
731
|
+
isSupportedFeature(feature) {
|
|
732
|
+
switch (feature) {
|
|
733
|
+
case "NativeEnum":
|
|
734
|
+
return true;
|
|
735
|
+
case "Schema":
|
|
736
|
+
default:
|
|
737
|
+
return false;
|
|
738
|
+
}
|
|
739
|
+
},
|
|
740
|
+
getBuiltinType(type2) {
|
|
741
|
+
const t = (type2 || "").toLowerCase().trim();
|
|
742
|
+
const isArray = false;
|
|
743
|
+
switch (t) {
|
|
744
|
+
// integers
|
|
745
|
+
case "tinyint":
|
|
746
|
+
case "smallint":
|
|
747
|
+
case "mediumint":
|
|
748
|
+
case "int":
|
|
749
|
+
case "integer":
|
|
750
|
+
return {
|
|
751
|
+
type: "Int",
|
|
752
|
+
isArray
|
|
753
|
+
};
|
|
754
|
+
case "bigint":
|
|
755
|
+
return {
|
|
756
|
+
type: "BigInt",
|
|
757
|
+
isArray
|
|
758
|
+
};
|
|
759
|
+
// decimals and floats
|
|
760
|
+
case "decimal":
|
|
761
|
+
case "numeric":
|
|
762
|
+
return {
|
|
763
|
+
type: "Decimal",
|
|
764
|
+
isArray
|
|
765
|
+
};
|
|
766
|
+
case "float":
|
|
767
|
+
case "double":
|
|
768
|
+
case "real":
|
|
769
|
+
return {
|
|
770
|
+
type: "Float",
|
|
771
|
+
isArray
|
|
772
|
+
};
|
|
773
|
+
// boolean (MySQL uses TINYINT(1) for boolean)
|
|
774
|
+
case "boolean":
|
|
775
|
+
case "bool":
|
|
776
|
+
return {
|
|
777
|
+
type: "Boolean",
|
|
778
|
+
isArray
|
|
779
|
+
};
|
|
780
|
+
// strings
|
|
781
|
+
case "char":
|
|
782
|
+
case "varchar":
|
|
783
|
+
case "tinytext":
|
|
784
|
+
case "text":
|
|
785
|
+
case "mediumtext":
|
|
786
|
+
case "longtext":
|
|
787
|
+
return {
|
|
788
|
+
type: "String",
|
|
789
|
+
isArray
|
|
790
|
+
};
|
|
791
|
+
// dates/times
|
|
792
|
+
case "date":
|
|
793
|
+
case "time":
|
|
794
|
+
case "datetime":
|
|
795
|
+
case "timestamp":
|
|
796
|
+
case "year":
|
|
797
|
+
return {
|
|
798
|
+
type: "DateTime",
|
|
799
|
+
isArray
|
|
800
|
+
};
|
|
801
|
+
// binary
|
|
802
|
+
case "binary":
|
|
803
|
+
case "varbinary":
|
|
804
|
+
case "tinyblob":
|
|
805
|
+
case "blob":
|
|
806
|
+
case "mediumblob":
|
|
807
|
+
case "longblob":
|
|
808
|
+
return {
|
|
809
|
+
type: "Bytes",
|
|
810
|
+
isArray
|
|
811
|
+
};
|
|
812
|
+
// json
|
|
813
|
+
case "json":
|
|
814
|
+
return {
|
|
815
|
+
type: "Json",
|
|
816
|
+
isArray
|
|
817
|
+
};
|
|
818
|
+
default:
|
|
819
|
+
if (t.startsWith("enum(")) {
|
|
820
|
+
return {
|
|
821
|
+
type: "String",
|
|
822
|
+
isArray
|
|
823
|
+
};
|
|
824
|
+
}
|
|
825
|
+
if (t.startsWith("set(")) {
|
|
826
|
+
return {
|
|
827
|
+
type: "String",
|
|
828
|
+
isArray
|
|
829
|
+
};
|
|
830
|
+
}
|
|
831
|
+
return {
|
|
832
|
+
type: "Unsupported",
|
|
833
|
+
isArray
|
|
834
|
+
};
|
|
835
|
+
}
|
|
836
|
+
},
|
|
837
|
+
getDefaultDatabaseType(type2) {
|
|
838
|
+
switch (type2) {
|
|
839
|
+
case "String":
|
|
840
|
+
return {
|
|
841
|
+
type: "varchar",
|
|
842
|
+
precision: 191
|
|
843
|
+
};
|
|
844
|
+
case "Boolean":
|
|
845
|
+
return {
|
|
846
|
+
type: "boolean"
|
|
847
|
+
};
|
|
848
|
+
case "Int":
|
|
849
|
+
return {
|
|
850
|
+
type: "int"
|
|
851
|
+
};
|
|
852
|
+
case "BigInt":
|
|
853
|
+
return {
|
|
854
|
+
type: "bigint"
|
|
855
|
+
};
|
|
856
|
+
case "Float":
|
|
857
|
+
return {
|
|
858
|
+
type: "double"
|
|
859
|
+
};
|
|
860
|
+
case "Decimal":
|
|
861
|
+
return {
|
|
862
|
+
type: "decimal",
|
|
863
|
+
precision: 65
|
|
864
|
+
};
|
|
865
|
+
case "DateTime":
|
|
866
|
+
return {
|
|
867
|
+
type: "datetime",
|
|
868
|
+
precision: 3
|
|
869
|
+
};
|
|
870
|
+
case "Json":
|
|
871
|
+
return {
|
|
872
|
+
type: "json"
|
|
873
|
+
};
|
|
874
|
+
case "Bytes":
|
|
875
|
+
return {
|
|
876
|
+
type: "longblob"
|
|
877
|
+
};
|
|
878
|
+
}
|
|
879
|
+
},
|
|
880
|
+
async introspect(connectionString, options) {
|
|
881
|
+
const mysql2 = await import("mysql2/promise");
|
|
882
|
+
const connection = await mysql2.createConnection(connectionString);
|
|
883
|
+
try {
|
|
884
|
+
const url = new URL(connectionString);
|
|
885
|
+
const databaseName = url.pathname.replace("/", "");
|
|
886
|
+
if (!databaseName) {
|
|
887
|
+
throw new CliError("Database name not found in connection string");
|
|
888
|
+
}
|
|
889
|
+
const [tableRows] = await connection.execute(getTableIntrospectionQuery(), [
|
|
890
|
+
databaseName
|
|
891
|
+
]);
|
|
892
|
+
const tables = [];
|
|
893
|
+
for (const row of tableRows) {
|
|
894
|
+
const columns = typeof row.columns === "string" ? JSON.parse(row.columns) : row.columns;
|
|
895
|
+
const indexes = typeof row.indexes === "string" ? JSON.parse(row.indexes) : row.indexes;
|
|
896
|
+
const sortedColumns = (columns || []).sort((a, b) => (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0)).map((col) => {
|
|
897
|
+
if (col.datatype === "enum" && col.datatype_name) {
|
|
898
|
+
return {
|
|
899
|
+
...col,
|
|
900
|
+
datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name
|
|
901
|
+
};
|
|
902
|
+
}
|
|
903
|
+
if (col.computed && typeof col.datatype === "string") {
|
|
904
|
+
return {
|
|
905
|
+
...col,
|
|
906
|
+
datatype: normalizeGenerationExpression(col.datatype)
|
|
907
|
+
};
|
|
908
|
+
}
|
|
909
|
+
return col;
|
|
910
|
+
});
|
|
911
|
+
const filteredIndexes = (indexes || []).filter((idx) => !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`));
|
|
912
|
+
tables.push({
|
|
913
|
+
schema: "",
|
|
914
|
+
name: row.name,
|
|
915
|
+
type: row.type,
|
|
916
|
+
definition: row.definition,
|
|
917
|
+
columns: sortedColumns,
|
|
918
|
+
indexes: filteredIndexes
|
|
919
|
+
});
|
|
920
|
+
}
|
|
921
|
+
const [enumRows] = await connection.execute(getEnumIntrospectionQuery(), [
|
|
922
|
+
databaseName
|
|
923
|
+
]);
|
|
924
|
+
const enums = enumRows.map((row) => {
|
|
925
|
+
const values = parseEnumValues(row.column_type);
|
|
926
|
+
const syntheticName = `${row.table_name}_${row.column_name}`;
|
|
927
|
+
const { name } = resolveNameCasing(options.modelCasing, syntheticName);
|
|
928
|
+
return {
|
|
929
|
+
schema_name: "",
|
|
930
|
+
enum_type: name,
|
|
931
|
+
values
|
|
932
|
+
};
|
|
933
|
+
});
|
|
934
|
+
return {
|
|
935
|
+
tables,
|
|
936
|
+
enums
|
|
937
|
+
};
|
|
938
|
+
} finally {
|
|
939
|
+
await connection.end();
|
|
940
|
+
}
|
|
941
|
+
},
|
|
942
|
+
getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
|
|
943
|
+
const val = defaultValue.trim();
|
|
944
|
+
if (val.toUpperCase() === "NULL") {
|
|
945
|
+
return null;
|
|
946
|
+
}
|
|
947
|
+
if (datatype === "enum" && datatype_name) {
|
|
948
|
+
const enumDef = enums.find((e) => getDbName(e) === datatype_name);
|
|
949
|
+
if (enumDef) {
|
|
950
|
+
const enumValue = val.startsWith("'") && val.endsWith("'") ? val.slice(1, -1) : val;
|
|
951
|
+
const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
|
|
952
|
+
if (enumField) {
|
|
953
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
954
|
+
}
|
|
955
|
+
}
|
|
956
|
+
}
|
|
957
|
+
switch (fieldType) {
|
|
958
|
+
case "DateTime":
|
|
959
|
+
if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === "current_timestamp()" || val.toLowerCase() === "now()") {
|
|
960
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
961
|
+
}
|
|
962
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
963
|
+
case "Int":
|
|
964
|
+
case "BigInt":
|
|
965
|
+
if (val.toLowerCase() === "auto_increment") {
|
|
966
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
967
|
+
}
|
|
968
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
969
|
+
case "Float":
|
|
970
|
+
return normalizeFloatDefault(val);
|
|
971
|
+
case "Decimal":
|
|
972
|
+
return normalizeDecimalDefault(val);
|
|
973
|
+
case "Boolean":
|
|
974
|
+
return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === "true" || val === "1" || val === "b'1'");
|
|
975
|
+
case "String":
|
|
976
|
+
if (val.toLowerCase() === "uuid()") {
|
|
977
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("uuid", services));
|
|
978
|
+
}
|
|
979
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
980
|
+
}
|
|
981
|
+
if (val.includes("(") && val.includes(")")) {
|
|
982
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
983
|
+
}
|
|
984
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
985
|
+
return null;
|
|
986
|
+
},
|
|
987
|
+
getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
|
|
988
|
+
const factories = [];
|
|
989
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
990
|
+
factories.push(new DataFieldAttributeFactory2().setDecl(getAttributeRef("@updatedAt", services)));
|
|
991
|
+
}
|
|
992
|
+
const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`)?.node;
|
|
993
|
+
const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
|
|
994
|
+
if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
|
|
995
|
+
const dbAttrFactory = new DataFieldAttributeFactory2().setDecl(dbAttr);
|
|
996
|
+
const sizeValue = length ?? precision;
|
|
997
|
+
if (sizeValue !== void 0 && sizeValue !== null) {
|
|
998
|
+
dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(sizeValue));
|
|
999
|
+
}
|
|
1000
|
+
factories.push(dbAttrFactory);
|
|
1001
|
+
}
|
|
1002
|
+
return factories;
|
|
1003
|
+
}
|
|
1004
|
+
};
|
|
1005
|
+
function getTableIntrospectionQuery() {
|
|
1006
|
+
return `
|
|
1007
|
+
-- Main query: one row per table/view with columns and indexes as nested JSON arrays.
|
|
1008
|
+
-- Uses INFORMATION_SCHEMA which is MySQL's standard metadata catalog.
|
|
1009
|
+
SELECT
|
|
1010
|
+
t.TABLE_NAME AS \`name\`, -- table or view name
|
|
1011
|
+
CASE t.TABLE_TYPE -- map MySQL table type strings to our internal types
|
|
1012
|
+
WHEN 'BASE TABLE' THEN 'table'
|
|
1013
|
+
WHEN 'VIEW' THEN 'view'
|
|
1014
|
+
ELSE NULL
|
|
1015
|
+
END AS \`type\`,
|
|
1016
|
+
CASE -- for views, retrieve the SQL definition
|
|
1017
|
+
WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION
|
|
1018
|
+
ELSE NULL
|
|
1019
|
+
END AS \`definition\`,
|
|
1020
|
+
|
|
1021
|
+
-- ===== COLUMNS subquery =====
|
|
1022
|
+
-- Wraps an ordered subquery in JSON_ARRAYAGG to produce a JSON array of column objects.
|
|
1023
|
+
(
|
|
1024
|
+
SELECT JSON_ARRAYAGG(col_json)
|
|
1025
|
+
FROM (
|
|
1026
|
+
SELECT JSON_OBJECT(
|
|
1027
|
+
'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting)
|
|
1028
|
+
'name', c.COLUMN_NAME, -- column name
|
|
1029
|
+
|
|
1030
|
+
-- datatype: for generated/computed columns, construct the full DDL-like type definition
|
|
1031
|
+
-- (e.g., "int GENERATED ALWAYS AS (col1 + col2) STORED") so it can be rendered as
|
|
1032
|
+
-- Unsupported("..."); special-case tinyint(1) as 'boolean' (MySQL's boolean convention);
|
|
1033
|
+
-- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime').
|
|
1034
|
+
'datatype', CASE
|
|
1035
|
+
WHEN c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '' THEN
|
|
1036
|
+
CONCAT(
|
|
1037
|
+
c.COLUMN_TYPE,
|
|
1038
|
+
' GENERATED ALWAYS AS (',
|
|
1039
|
+
c.GENERATION_EXPRESSION,
|
|
1040
|
+
') ',
|
|
1041
|
+
CASE
|
|
1042
|
+
WHEN c.EXTRA LIKE '%STORED GENERATED%' THEN 'STORED'
|
|
1043
|
+
ELSE 'VIRTUAL'
|
|
1044
|
+
END
|
|
1045
|
+
)
|
|
1046
|
+
WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean'
|
|
1047
|
+
ELSE c.DATA_TYPE
|
|
1048
|
+
END,
|
|
1049
|
+
|
|
1050
|
+
-- datatype_name: for enum columns, generate a synthetic name "TableName_ColumnName"
|
|
1051
|
+
-- (MySQL doesn't have named enum types like PostgreSQL)
|
|
1052
|
+
'datatype_name', CASE
|
|
1053
|
+
WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME)
|
|
1054
|
+
ELSE NULL
|
|
1055
|
+
END,
|
|
1056
|
+
|
|
1057
|
+
'datatype_schema', '', -- MySQL doesn't support multi-schema
|
|
1058
|
+
'length', c.CHARACTER_MAXIMUM_LENGTH, -- max length for string types (e.g., VARCHAR(255) -> 255)
|
|
1059
|
+
'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), -- numeric or datetime precision
|
|
1060
|
+
|
|
1061
|
+
'nullable', c.IS_NULLABLE = 'YES', -- true if column allows NULL
|
|
1062
|
+
|
|
1063
|
+
-- default: for auto_increment columns, report 'auto_increment' instead of NULL;
|
|
1064
|
+
-- otherwise use the COLUMN_DEFAULT value
|
|
1065
|
+
'default', CASE
|
|
1066
|
+
WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment'
|
|
1067
|
+
ELSE c.COLUMN_DEFAULT
|
|
1068
|
+
END,
|
|
1069
|
+
|
|
1070
|
+
'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key
|
|
1071
|
+
|
|
1072
|
+
-- unique: true if the column has a single-column unique index.
|
|
1073
|
+
-- COLUMN_KEY = 'UNI' covers most cases, but may not be set when the column
|
|
1074
|
+
-- also participates in other indexes (showing 'MUL' instead on some MySQL versions).
|
|
1075
|
+
-- Also check INFORMATION_SCHEMA.STATISTICS for single-column unique indexes
|
|
1076
|
+
-- (NON_UNIQUE = 0) to match the PostgreSQL introspection behavior.
|
|
1077
|
+
'unique', (
|
|
1078
|
+
c.COLUMN_KEY = 'UNI'
|
|
1079
|
+
OR EXISTS (
|
|
1080
|
+
SELECT 1
|
|
1081
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_uni
|
|
1082
|
+
WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
|
|
1083
|
+
AND s_uni.TABLE_NAME = c.TABLE_NAME
|
|
1084
|
+
AND s_uni.COLUMN_NAME = c.COLUMN_NAME
|
|
1085
|
+
AND s_uni.NON_UNIQUE = 0
|
|
1086
|
+
AND s_uni.INDEX_NAME != 'PRIMARY'
|
|
1087
|
+
AND (
|
|
1088
|
+
SELECT COUNT(*)
|
|
1089
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_cnt
|
|
1090
|
+
WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
|
|
1091
|
+
AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
|
|
1092
|
+
AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
|
|
1093
|
+
) = 1
|
|
1094
|
+
)
|
|
1095
|
+
),
|
|
1096
|
+
'unique_name', (
|
|
1097
|
+
SELECT COALESCE(
|
|
1098
|
+
CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END,
|
|
1099
|
+
(
|
|
1100
|
+
SELECT s_uni.INDEX_NAME
|
|
1101
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_uni
|
|
1102
|
+
WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
|
|
1103
|
+
AND s_uni.TABLE_NAME = c.TABLE_NAME
|
|
1104
|
+
AND s_uni.COLUMN_NAME = c.COLUMN_NAME
|
|
1105
|
+
AND s_uni.NON_UNIQUE = 0
|
|
1106
|
+
AND s_uni.INDEX_NAME != 'PRIMARY'
|
|
1107
|
+
AND (
|
|
1108
|
+
SELECT COUNT(*)
|
|
1109
|
+
FROM INFORMATION_SCHEMA.STATISTICS s_cnt
|
|
1110
|
+
WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
|
|
1111
|
+
AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
|
|
1112
|
+
AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
|
|
1113
|
+
) = 1
|
|
1114
|
+
LIMIT 1
|
|
1115
|
+
)
|
|
1116
|
+
)
|
|
1117
|
+
),
|
|
1118
|
+
|
|
1119
|
+
-- computed: true if column has a generation expression (virtual or stored)
|
|
1120
|
+
'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '',
|
|
1121
|
+
|
|
1122
|
+
-- options: for enum columns, the full COLUMN_TYPE string (e.g., "enum('a','b','c')")
|
|
1123
|
+
-- which gets parsed into individual values later
|
|
1124
|
+
'options', CASE
|
|
1125
|
+
WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE
|
|
1126
|
+
ELSE NULL
|
|
1127
|
+
END,
|
|
1128
|
+
|
|
1129
|
+
-- Foreign key info (NULL if column is not part of a FK)
|
|
1130
|
+
'foreign_key_schema', NULL, -- MySQL doesn't support cross-schema FKs here
|
|
1131
|
+
'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, -- referenced table
|
|
1132
|
+
'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, -- referenced column
|
|
1133
|
+
'foreign_key_name', kcu_fk.CONSTRAINT_NAME, -- FK constraint name
|
|
1134
|
+
'foreign_key_on_update', rc.UPDATE_RULE, -- referential action on update (CASCADE, SET NULL, etc.)
|
|
1135
|
+
'foreign_key_on_delete', rc.DELETE_RULE -- referential action on delete
|
|
1136
|
+
) AS col_json
|
|
1137
|
+
|
|
1138
|
+
FROM INFORMATION_SCHEMA.COLUMNS c -- one row per column in the database
|
|
1139
|
+
|
|
1140
|
+
-- Join KEY_COLUMN_USAGE to find foreign key references for this column.
|
|
1141
|
+
-- Filter to only FK entries (REFERENCED_TABLE_NAME IS NOT NULL).
|
|
1142
|
+
LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk
|
|
1143
|
+
ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA
|
|
1144
|
+
AND c.TABLE_NAME = kcu_fk.TABLE_NAME
|
|
1145
|
+
AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME
|
|
1146
|
+
AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL
|
|
1147
|
+
|
|
1148
|
+
-- Join REFERENTIAL_CONSTRAINTS to get ON UPDATE / ON DELETE rules for the FK.
|
|
1149
|
+
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc
|
|
1150
|
+
ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
|
|
1151
|
+
AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
|
|
1152
|
+
|
|
1153
|
+
WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA
|
|
1154
|
+
AND c.TABLE_NAME = t.TABLE_NAME
|
|
1155
|
+
ORDER BY c.ORDINAL_POSITION -- preserve original column order
|
|
1156
|
+
) AS cols_ordered
|
|
1157
|
+
) AS \`columns\`,
|
|
1158
|
+
|
|
1159
|
+
-- ===== INDEXES subquery =====
|
|
1160
|
+
-- Aggregates all indexes for this table into a JSON array.
|
|
1161
|
+
(
|
|
1162
|
+
SELECT JSON_ARRAYAGG(idx_json)
|
|
1163
|
+
FROM (
|
|
1164
|
+
SELECT JSON_OBJECT(
|
|
1165
|
+
'name', s.INDEX_NAME, -- index name (e.g., 'PRIMARY', 'idx_email')
|
|
1166
|
+
'method', s.INDEX_TYPE, -- index type (e.g., 'BTREE', 'HASH', 'FULLTEXT')
|
|
1167
|
+
'unique', s.NON_UNIQUE = 0, -- NON_UNIQUE=0 means it IS unique
|
|
1168
|
+
'primary', s.INDEX_NAME = 'PRIMARY', -- MySQL names the PK index 'PRIMARY'
|
|
1169
|
+
'valid', TRUE, -- MySQL doesn't expose index validity status
|
|
1170
|
+
'ready', TRUE, -- MySQL doesn't expose index readiness status
|
|
1171
|
+
'partial', FALSE, -- MySQL doesn't support partial indexes
|
|
1172
|
+
'predicate', NULL, -- no WHERE clause on indexes in MySQL
|
|
1173
|
+
|
|
1174
|
+
-- Index columns: nested subquery for columns in this index
|
|
1175
|
+
'columns', (
|
|
1176
|
+
SELECT JSON_ARRAYAGG(idx_col_json)
|
|
1177
|
+
FROM (
|
|
1178
|
+
SELECT JSON_OBJECT(
|
|
1179
|
+
'name', s2.COLUMN_NAME, -- column name in the index
|
|
1180
|
+
'expression', NULL, -- MySQL doesn't expose expression indexes via STATISTICS
|
|
1181
|
+
-- COLLATION: 'A' = ascending, 'D' = descending, NULL = not sorted
|
|
1182
|
+
'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END,
|
|
1183
|
+
'nulls', NULL -- MySQL doesn't expose NULLS FIRST/LAST
|
|
1184
|
+
) AS idx_col_json
|
|
1185
|
+
FROM INFORMATION_SCHEMA.STATISTICS s2 -- one row per column per index
|
|
1186
|
+
WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA
|
|
1187
|
+
AND s2.TABLE_NAME = s.TABLE_NAME
|
|
1188
|
+
AND s2.INDEX_NAME = s.INDEX_NAME
|
|
1189
|
+
ORDER BY s2.SEQ_IN_INDEX -- preserve column order within the index
|
|
1190
|
+
) AS idx_cols_ordered
|
|
1191
|
+
)
|
|
1192
|
+
) AS idx_json
|
|
1193
|
+
FROM (
|
|
1194
|
+
-- Deduplicate: STATISTICS has one row per (index, column), but we need one row per index.
|
|
1195
|
+
-- DISTINCT on INDEX_NAME gives us one entry per index with its metadata.
|
|
1196
|
+
SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME
|
|
1197
|
+
FROM INFORMATION_SCHEMA.STATISTICS
|
|
1198
|
+
WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME
|
|
1199
|
+
) s
|
|
1200
|
+
) AS idxs_ordered
|
|
1201
|
+
) AS \`indexes\`
|
|
1202
|
+
|
|
1203
|
+
-- === Main FROM: INFORMATION_SCHEMA.TABLES lists all tables and views ===
|
|
1204
|
+
FROM INFORMATION_SCHEMA.TABLES t
|
|
1205
|
+
-- Join VIEWS to get VIEW_DEFINITION for view tables
|
|
1206
|
+
LEFT JOIN INFORMATION_SCHEMA.VIEWS v
|
|
1207
|
+
ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME
|
|
1208
|
+
WHERE t.TABLE_SCHEMA = ? -- only the target database
|
|
1209
|
+
AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW
|
|
1210
|
+
AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table
|
|
1211
|
+
ORDER BY t.TABLE_NAME;
|
|
1212
|
+
`;
|
|
1213
|
+
}
|
|
1214
|
+
__name(getTableIntrospectionQuery, "getTableIntrospectionQuery");
|
|
1215
|
+
function getEnumIntrospectionQuery() {
|
|
1216
|
+
return `
|
|
1217
|
+
SELECT
|
|
1218
|
+
c.TABLE_NAME AS table_name, -- table containing the enum column
|
|
1219
|
+
c.COLUMN_NAME AS column_name, -- column name
|
|
1220
|
+
c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')")
|
|
1221
|
+
FROM INFORMATION_SCHEMA.COLUMNS c
|
|
1222
|
+
WHERE c.TABLE_SCHEMA = ? -- only the target database
|
|
1223
|
+
AND c.DATA_TYPE = 'enum' -- only enum columns
|
|
1224
|
+
ORDER BY c.TABLE_NAME, c.COLUMN_NAME;
|
|
1225
|
+
`;
|
|
1226
|
+
}
|
|
1227
|
+
__name(getEnumIntrospectionQuery, "getEnumIntrospectionQuery");
|
|
1228
|
+
function parseEnumValues(columnType) {
|
|
1229
|
+
const match = columnType.match(/^enum\((.+)\)$/i);
|
|
1230
|
+
if (!match || !match[1]) return [];
|
|
1231
|
+
const valuesString = match[1];
|
|
1232
|
+
const values = [];
|
|
1233
|
+
let current = "";
|
|
1234
|
+
let inQuote = false;
|
|
1235
|
+
let i = 0;
|
|
1236
|
+
while (i < valuesString.length) {
|
|
1237
|
+
const char = valuesString[i];
|
|
1238
|
+
if (char === "'" && !inQuote) {
|
|
1239
|
+
inQuote = true;
|
|
1240
|
+
i++;
|
|
1241
|
+
continue;
|
|
1242
|
+
}
|
|
1243
|
+
if (char === "'" && inQuote) {
|
|
1244
|
+
if (valuesString[i + 1] === "'") {
|
|
1245
|
+
current += "'";
|
|
1246
|
+
i += 2;
|
|
1247
|
+
continue;
|
|
1248
|
+
}
|
|
1249
|
+
values.push(current);
|
|
1250
|
+
current = "";
|
|
1251
|
+
inQuote = false;
|
|
1252
|
+
i++;
|
|
1253
|
+
while (i < valuesString.length && (valuesString[i] === "," || valuesString[i] === " ")) {
|
|
1254
|
+
i++;
|
|
1255
|
+
}
|
|
1256
|
+
continue;
|
|
1257
|
+
}
|
|
1258
|
+
if (inQuote) {
|
|
1259
|
+
current += char;
|
|
1260
|
+
}
|
|
1261
|
+
i++;
|
|
1262
|
+
}
|
|
1263
|
+
return values;
|
|
1264
|
+
}
|
|
1265
|
+
__name(parseEnumValues, "parseEnumValues");
|
|
1266
|
+
|
|
1267
|
+
// src/actions/pull/provider/postgresql.ts
|
|
1268
|
+
import { DataFieldAttributeFactory as DataFieldAttributeFactory3 } from "@zenstackhq/language/factory";
|
|
1269
|
+
import { Client } from "pg";
|
|
1270
|
+
var pgTypnameToStandard = {
|
|
1271
|
+
int2: "smallint",
|
|
1272
|
+
int4: "integer",
|
|
1273
|
+
int8: "bigint",
|
|
1274
|
+
float4: "real",
|
|
1275
|
+
float8: "double precision",
|
|
1276
|
+
bool: "boolean",
|
|
1277
|
+
bpchar: "character",
|
|
1278
|
+
numeric: "decimal"
|
|
1279
|
+
};
|
|
1280
|
+
var standardTypePrecisions = {
|
|
1281
|
+
int2: 16,
|
|
1282
|
+
smallint: 16,
|
|
1283
|
+
int4: 32,
|
|
1284
|
+
integer: 32,
|
|
1285
|
+
int8: 64,
|
|
1286
|
+
bigint: 64,
|
|
1287
|
+
float4: 24,
|
|
1288
|
+
real: 24,
|
|
1289
|
+
float8: 53,
|
|
1290
|
+
"double precision": 53
|
|
1291
|
+
};
|
|
1292
|
+
var pgTypnameToZenStackNativeType = {
|
|
1293
|
+
// integers
|
|
1294
|
+
int2: "SmallInt",
|
|
1295
|
+
smallint: "SmallInt",
|
|
1296
|
+
int4: "Integer",
|
|
1297
|
+
integer: "Integer",
|
|
1298
|
+
int8: "BigInt",
|
|
1299
|
+
bigint: "BigInt",
|
|
1300
|
+
// decimals and floats
|
|
1301
|
+
numeric: "Decimal",
|
|
1302
|
+
decimal: "Decimal",
|
|
1303
|
+
float4: "Real",
|
|
1304
|
+
real: "Real",
|
|
1305
|
+
float8: "DoublePrecision",
|
|
1306
|
+
"double precision": "DoublePrecision",
|
|
1307
|
+
// boolean
|
|
1308
|
+
bool: "Boolean",
|
|
1309
|
+
boolean: "Boolean",
|
|
1310
|
+
// strings
|
|
1311
|
+
text: "Text",
|
|
1312
|
+
varchar: "VarChar",
|
|
1313
|
+
"character varying": "VarChar",
|
|
1314
|
+
bpchar: "Char",
|
|
1315
|
+
character: "Char",
|
|
1316
|
+
// uuid
|
|
1317
|
+
uuid: "Uuid",
|
|
1318
|
+
// dates/times
|
|
1319
|
+
date: "Date",
|
|
1320
|
+
time: "Time",
|
|
1321
|
+
timetz: "Timetz",
|
|
1322
|
+
timestamp: "Timestamp",
|
|
1323
|
+
timestamptz: "Timestamptz",
|
|
1324
|
+
// binary
|
|
1325
|
+
bytea: "ByteA",
|
|
1326
|
+
// json
|
|
1327
|
+
json: "Json",
|
|
1328
|
+
jsonb: "JsonB",
|
|
1329
|
+
// xml
|
|
1330
|
+
xml: "Xml",
|
|
1331
|
+
// network types
|
|
1332
|
+
inet: "Inet",
|
|
1333
|
+
// bit strings
|
|
1334
|
+
bit: "Bit",
|
|
1335
|
+
varbit: "VarBit",
|
|
1336
|
+
// oid
|
|
1337
|
+
oid: "Oid",
|
|
1338
|
+
// money
|
|
1339
|
+
money: "Money",
|
|
1340
|
+
// citext extension
|
|
1341
|
+
citext: "Citext"
|
|
1342
|
+
};
|
|
1343
|
+
var postgresql = {
|
|
1344
|
+
isSupportedFeature(feature) {
|
|
1345
|
+
const supportedFeatures = [
|
|
1346
|
+
"Schema",
|
|
1347
|
+
"NativeEnum"
|
|
1348
|
+
];
|
|
1349
|
+
return supportedFeatures.includes(feature);
|
|
1350
|
+
},
|
|
1351
|
+
getBuiltinType(type2) {
|
|
1352
|
+
const t = (type2 || "").toLowerCase();
|
|
1353
|
+
const isArray = t.startsWith("_");
|
|
1354
|
+
switch (t.replace(/^_/, "")) {
|
|
1355
|
+
// integers
|
|
1356
|
+
case "int2":
|
|
1357
|
+
case "smallint":
|
|
1358
|
+
case "int4":
|
|
1359
|
+
case "integer":
|
|
1360
|
+
return {
|
|
1361
|
+
type: "Int",
|
|
1362
|
+
isArray
|
|
1363
|
+
};
|
|
1364
|
+
case "int8":
|
|
1365
|
+
case "bigint":
|
|
1366
|
+
return {
|
|
1367
|
+
type: "BigInt",
|
|
1368
|
+
isArray
|
|
1369
|
+
};
|
|
1370
|
+
// decimals and floats
|
|
1371
|
+
case "numeric":
|
|
1372
|
+
case "decimal":
|
|
1373
|
+
return {
|
|
1374
|
+
type: "Decimal",
|
|
1375
|
+
isArray
|
|
1376
|
+
};
|
|
1377
|
+
case "float4":
|
|
1378
|
+
case "real":
|
|
1379
|
+
case "float8":
|
|
1380
|
+
case "double precision":
|
|
1381
|
+
return {
|
|
1382
|
+
type: "Float",
|
|
1383
|
+
isArray
|
|
1384
|
+
};
|
|
1385
|
+
// boolean
|
|
1386
|
+
case "bool":
|
|
1387
|
+
case "boolean":
|
|
1388
|
+
return {
|
|
1389
|
+
type: "Boolean",
|
|
1390
|
+
isArray
|
|
1391
|
+
};
|
|
1392
|
+
// strings
|
|
1393
|
+
case "text":
|
|
1394
|
+
case "varchar":
|
|
1395
|
+
case "bpchar":
|
|
1396
|
+
case "character varying":
|
|
1397
|
+
case "character":
|
|
1398
|
+
return {
|
|
1399
|
+
type: "String",
|
|
1400
|
+
isArray
|
|
1401
|
+
};
|
|
1402
|
+
// uuid
|
|
1403
|
+
case "uuid":
|
|
1404
|
+
return {
|
|
1405
|
+
type: "String",
|
|
1406
|
+
isArray
|
|
1407
|
+
};
|
|
1408
|
+
// dates/times
|
|
1409
|
+
case "date":
|
|
1410
|
+
case "time":
|
|
1411
|
+
case "timetz":
|
|
1412
|
+
case "timestamp":
|
|
1413
|
+
case "timestamptz":
|
|
1414
|
+
return {
|
|
1415
|
+
type: "DateTime",
|
|
1416
|
+
isArray
|
|
1417
|
+
};
|
|
1418
|
+
// binary
|
|
1419
|
+
case "bytea":
|
|
1420
|
+
return {
|
|
1421
|
+
type: "Bytes",
|
|
1422
|
+
isArray
|
|
1423
|
+
};
|
|
1424
|
+
// json
|
|
1425
|
+
case "json":
|
|
1426
|
+
case "jsonb":
|
|
1427
|
+
return {
|
|
1428
|
+
type: "Json",
|
|
1429
|
+
isArray
|
|
1430
|
+
};
|
|
1431
|
+
default:
|
|
1432
|
+
return {
|
|
1433
|
+
type: "Unsupported",
|
|
1434
|
+
isArray
|
|
1435
|
+
};
|
|
1436
|
+
}
|
|
1437
|
+
},
|
|
1438
|
+
async introspect(connectionString, options) {
|
|
1439
|
+
const client = new Client({
|
|
1440
|
+
connectionString
|
|
1441
|
+
});
|
|
1442
|
+
await client.connect();
|
|
1443
|
+
try {
|
|
1444
|
+
const { rows: tables } = await client.query(tableIntrospectionQuery);
|
|
1445
|
+
const { rows: enums } = await client.query(enumIntrospectionQuery);
|
|
1446
|
+
const filteredTables = tables.filter((t) => options.schemas.includes(t.schema));
|
|
1447
|
+
const filteredEnums = enums.filter((e) => options.schemas.includes(e.schema_name));
|
|
1448
|
+
return {
|
|
1449
|
+
enums: filteredEnums,
|
|
1450
|
+
tables: filteredTables
|
|
1451
|
+
};
|
|
1452
|
+
} finally {
|
|
1453
|
+
await client.end();
|
|
1454
|
+
}
|
|
1455
|
+
},
|
|
1456
|
+
getDefaultDatabaseType(type2) {
|
|
1457
|
+
switch (type2) {
|
|
1458
|
+
case "String":
|
|
1459
|
+
return {
|
|
1460
|
+
type: "text"
|
|
1461
|
+
};
|
|
1462
|
+
case "Boolean":
|
|
1463
|
+
return {
|
|
1464
|
+
type: "boolean"
|
|
1465
|
+
};
|
|
1466
|
+
case "Int":
|
|
1467
|
+
return {
|
|
1468
|
+
type: "integer"
|
|
1469
|
+
};
|
|
1470
|
+
case "BigInt":
|
|
1471
|
+
return {
|
|
1472
|
+
type: "bigint"
|
|
1473
|
+
};
|
|
1474
|
+
case "Float":
|
|
1475
|
+
return {
|
|
1476
|
+
type: "double precision"
|
|
1477
|
+
};
|
|
1478
|
+
case "Decimal":
|
|
1479
|
+
return {
|
|
1480
|
+
type: "decimal"
|
|
1481
|
+
};
|
|
1482
|
+
case "DateTime":
|
|
1483
|
+
return {
|
|
1484
|
+
type: "timestamp",
|
|
1485
|
+
precision: 3
|
|
1486
|
+
};
|
|
1487
|
+
case "Json":
|
|
1488
|
+
return {
|
|
1489
|
+
type: "jsonb"
|
|
1490
|
+
};
|
|
1491
|
+
case "Bytes":
|
|
1492
|
+
return {
|
|
1493
|
+
type: "bytea"
|
|
1494
|
+
};
|
|
1495
|
+
}
|
|
1496
|
+
},
|
|
1497
|
+
getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
|
|
1498
|
+
const val = defaultValue.trim();
|
|
1499
|
+
if (datatype === "enum" && datatype_name) {
|
|
1500
|
+
const enumDef = enums.find((e) => getDbName(e) === datatype_name);
|
|
1501
|
+
if (enumDef) {
|
|
1502
|
+
const enumValue = val.replace(/'/g, "").split("::")[0]?.trim();
|
|
1503
|
+
const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
|
|
1504
|
+
if (enumField) {
|
|
1505
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
1506
|
+
}
|
|
1507
|
+
}
|
|
1508
|
+
return typeCastingConvert({
|
|
1509
|
+
defaultValue,
|
|
1510
|
+
enums,
|
|
1511
|
+
val,
|
|
1512
|
+
services
|
|
1513
|
+
});
|
|
1514
|
+
}
|
|
1515
|
+
switch (fieldType) {
|
|
1516
|
+
case "DateTime":
|
|
1517
|
+
if (val === "CURRENT_TIMESTAMP" || val === "now()") {
|
|
1518
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
1519
|
+
}
|
|
1520
|
+
if (val.includes("::")) {
|
|
1521
|
+
return typeCastingConvert({
|
|
1522
|
+
defaultValue,
|
|
1523
|
+
enums,
|
|
1524
|
+
val,
|
|
1525
|
+
services
|
|
1526
|
+
});
|
|
1527
|
+
}
|
|
1528
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1529
|
+
case "Int":
|
|
1530
|
+
case "BigInt":
|
|
1531
|
+
if (val.startsWith("nextval(")) {
|
|
1532
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
1533
|
+
}
|
|
1534
|
+
if (val.includes("::")) {
|
|
1535
|
+
return typeCastingConvert({
|
|
1536
|
+
defaultValue,
|
|
1537
|
+
enums,
|
|
1538
|
+
val,
|
|
1539
|
+
services
|
|
1540
|
+
});
|
|
1541
|
+
}
|
|
1542
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
1543
|
+
case "Float":
|
|
1544
|
+
if (val.includes("::")) {
|
|
1545
|
+
return typeCastingConvert({
|
|
1546
|
+
defaultValue,
|
|
1547
|
+
enums,
|
|
1548
|
+
val,
|
|
1549
|
+
services
|
|
1550
|
+
});
|
|
1551
|
+
}
|
|
1552
|
+
return normalizeFloatDefault(val);
|
|
1553
|
+
case "Decimal":
|
|
1554
|
+
if (val.includes("::")) {
|
|
1555
|
+
return typeCastingConvert({
|
|
1556
|
+
defaultValue,
|
|
1557
|
+
enums,
|
|
1558
|
+
val,
|
|
1559
|
+
services
|
|
1560
|
+
});
|
|
1561
|
+
}
|
|
1562
|
+
return normalizeDecimalDefault(val);
|
|
1563
|
+
case "Boolean":
|
|
1564
|
+
return (ab) => ab.BooleanLiteral.setValue(val === "true");
|
|
1565
|
+
case "String":
|
|
1566
|
+
if (val.includes("::")) {
|
|
1567
|
+
return typeCastingConvert({
|
|
1568
|
+
defaultValue,
|
|
1569
|
+
enums,
|
|
1570
|
+
val,
|
|
1571
|
+
services
|
|
1572
|
+
});
|
|
1573
|
+
}
|
|
1574
|
+
if (val.startsWith("'") && val.endsWith("'")) {
|
|
1575
|
+
return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'"));
|
|
1576
|
+
}
|
|
1577
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
1578
|
+
}
|
|
1579
|
+
if (val.includes("(") && val.includes(")")) {
|
|
1580
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
1581
|
+
}
|
|
1582
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
1583
|
+
return null;
|
|
1584
|
+
},
|
|
1585
|
+
getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
|
|
1586
|
+
const factories = [];
|
|
1587
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
1588
|
+
factories.push(new DataFieldAttributeFactory3().setDecl(getAttributeRef("@updatedAt", services)));
|
|
1589
|
+
}
|
|
1590
|
+
const nativeTypeName = pgTypnameToZenStackNativeType[datatype.toLowerCase()] ?? datatype;
|
|
1591
|
+
const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${nativeTypeName.toLowerCase()}`)?.node;
|
|
1592
|
+
const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
|
|
1593
|
+
const normalizedDatatype = pgTypnameToStandard[datatype.toLowerCase()] ?? datatype.toLowerCase();
|
|
1594
|
+
const standardPrecision = standardTypePrecisions[datatype.toLowerCase()];
|
|
1595
|
+
const isStandardPrecision = standardPrecision !== void 0 && precision === standardPrecision;
|
|
1596
|
+
if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== normalizedDatatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
|
|
1597
|
+
const dbAttrFactory = new DataFieldAttributeFactory3().setDecl(dbAttr);
|
|
1598
|
+
if ((length || precision) && !isStandardPrecision) {
|
|
1599
|
+
dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length || precision));
|
|
1600
|
+
}
|
|
1601
|
+
factories.push(dbAttrFactory);
|
|
1602
|
+
}
|
|
1603
|
+
return factories;
|
|
1604
|
+
}
|
|
1605
|
+
};
|
|
1606
|
+
var enumIntrospectionQuery = `
|
|
1607
|
+
SELECT
|
|
1608
|
+
n.nspname AS schema_name, -- schema the enum belongs to (e.g., 'public')
|
|
1609
|
+
t.typname AS enum_type, -- enum type name as defined in CREATE TYPE
|
|
1610
|
+
coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -- ordered list of enum labels as JSON array
|
|
1611
|
+
FROM pg_type t -- pg_type: catalog of all data types
|
|
1612
|
+
JOIN pg_enum e ON t.oid = e.enumtypid -- pg_enum: one row per enum label; join to get labels for this enum type
|
|
1613
|
+
JOIN pg_namespace n ON n.oid = t.typnamespace -- pg_namespace: schema info; join to get the schema name
|
|
1614
|
+
GROUP BY schema_name, enum_type -- one row per enum type, with all labels aggregated
|
|
1615
|
+
ORDER BY schema_name, enum_type;`;
|
|
1616
|
+
var tableIntrospectionQuery = `
|
|
1617
|
+
-- Main query: one row per table/view with columns and indexes as nested JSON arrays.
|
|
1618
|
+
-- Joins pg_class (tables/views) with pg_namespace (schemas).
|
|
1619
|
+
SELECT
|
|
1620
|
+
"ns"."nspname" AS "schema", -- schema name (e.g., 'public')
|
|
1621
|
+
"cls"."relname" AS "name", -- table or view name
|
|
1622
|
+
CASE "cls"."relkind" -- relkind: 'r' = ordinary table, 'v' = view
|
|
1623
|
+
WHEN 'r' THEN 'table'
|
|
1624
|
+
WHEN 'v' THEN 'view'
|
|
1625
|
+
ELSE NULL
|
|
1626
|
+
END AS "type",
|
|
1627
|
+
CASE -- for views, retrieve the SQL definition
|
|
1628
|
+
WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true)
|
|
1629
|
+
ELSE NULL
|
|
1630
|
+
END AS "definition",
|
|
1631
|
+
|
|
1632
|
+
-- ===== COLUMNS subquery =====
|
|
1633
|
+
-- Aggregates all columns for this table into a JSON array.
|
|
1634
|
+
(
|
|
1635
|
+
SELECT coalesce(json_agg(agg), '[]')
|
|
1636
|
+
FROM (
|
|
1637
|
+
SELECT
|
|
1638
|
+
"att"."attname" AS "name", -- column name
|
|
1639
|
+
|
|
1640
|
+
-- datatype: if the type is an enum, report 'enum';
|
|
1641
|
+
-- if the column is generated/computed, construct the full DDL-like type definition
|
|
1642
|
+
-- (e.g., "text GENERATED ALWAYS AS (expr) STORED") so it can be rendered as Unsupported("...");
|
|
1643
|
+
-- otherwise use the pg_type name.
|
|
1644
|
+
CASE
|
|
1645
|
+
WHEN EXISTS (
|
|
1646
|
+
SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
|
|
1647
|
+
WHERE "e"."enumtypid" = "typ"."oid"
|
|
1648
|
+
) THEN 'enum'
|
|
1649
|
+
WHEN "att"."attgenerated" != '' THEN
|
|
1650
|
+
format_type("att"."atttypid", "att"."atttypmod")
|
|
1651
|
+
|| ' GENERATED ALWAYS AS ('
|
|
1652
|
+
|| pg_get_expr("def"."adbin", "def"."adrelid")
|
|
1653
|
+
|| ') '
|
|
1654
|
+
|| CASE "att"."attgenerated"
|
|
1655
|
+
WHEN 's' THEN 'STORED'
|
|
1656
|
+
WHEN 'v' THEN 'VIRTUAL'
|
|
1657
|
+
ELSE 'STORED'
|
|
1658
|
+
END
|
|
1659
|
+
ELSE "typ"."typname"::text -- internal type name (e.g., 'int4', 'varchar', 'text'); cast to text to prevent CASE from coercing result to name type (max 63 chars)
|
|
1660
|
+
END AS "datatype",
|
|
1661
|
+
|
|
1662
|
+
-- datatype_name: for enums only, the actual enum type name (used to look up the enum definition)
|
|
1663
|
+
CASE
|
|
1664
|
+
WHEN EXISTS (
|
|
1665
|
+
SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
|
|
1666
|
+
WHERE "e"."enumtypid" = "typ"."oid"
|
|
1667
|
+
) THEN "typ"."typname"
|
|
1668
|
+
ELSE NULL
|
|
1669
|
+
END AS "datatype_name",
|
|
1670
|
+
|
|
1671
|
+
"tns"."nspname" AS "datatype_schema", -- schema where the data type is defined
|
|
1672
|
+
"c"."character_maximum_length" AS "length", -- max length for char/varchar types (from information_schema)
|
|
1673
|
+
COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", -- numeric or datetime precision
|
|
1674
|
+
|
|
1675
|
+
-- Foreign key info (NULL if column is not part of a FK constraint)
|
|
1676
|
+
"fk_ns"."nspname" AS "foreign_key_schema", -- schema of the referenced table
|
|
1677
|
+
"fk_cls"."relname" AS "foreign_key_table", -- referenced table name
|
|
1678
|
+
"fk_att"."attname" AS "foreign_key_column", -- referenced column name
|
|
1679
|
+
"fk_con"."conname" AS "foreign_key_name", -- FK constraint name
|
|
1680
|
+
|
|
1681
|
+
-- FK referential actions: decode single-char codes to human-readable strings
|
|
1682
|
+
CASE "fk_con"."confupdtype"
|
|
1683
|
+
WHEN 'a' THEN 'NO ACTION'
|
|
1684
|
+
WHEN 'r' THEN 'RESTRICT'
|
|
1685
|
+
WHEN 'c' THEN 'CASCADE'
|
|
1686
|
+
WHEN 'n' THEN 'SET NULL'
|
|
1687
|
+
WHEN 'd' THEN 'SET DEFAULT'
|
|
1688
|
+
ELSE NULL
|
|
1689
|
+
END AS "foreign_key_on_update",
|
|
1690
|
+
CASE "fk_con"."confdeltype"
|
|
1691
|
+
WHEN 'a' THEN 'NO ACTION'
|
|
1692
|
+
WHEN 'r' THEN 'RESTRICT'
|
|
1693
|
+
WHEN 'c' THEN 'CASCADE'
|
|
1694
|
+
WHEN 'n' THEN 'SET NULL'
|
|
1695
|
+
WHEN 'd' THEN 'SET DEFAULT'
|
|
1696
|
+
ELSE NULL
|
|
1697
|
+
END AS "foreign_key_on_delete",
|
|
1698
|
+
|
|
1699
|
+
-- pk: true if this column is part of the table's primary key constraint
|
|
1700
|
+
"pk_con"."conkey" IS NOT NULL AS "pk",
|
|
1701
|
+
|
|
1702
|
+
-- unique: true if the column has a single-column UNIQUE constraint OR a single-column unique index
|
|
1703
|
+
(
|
|
1704
|
+
-- Check for a single-column UNIQUE constraint (contype = 'u')
|
|
1705
|
+
EXISTS (
|
|
1706
|
+
SELECT 1
|
|
1707
|
+
FROM "pg_catalog"."pg_constraint" AS "u_con"
|
|
1708
|
+
WHERE "u_con"."contype" = 'u' -- 'u' = unique constraint
|
|
1709
|
+
AND "u_con"."conrelid" = "cls"."oid" -- on this table
|
|
1710
|
+
AND array_length("u_con"."conkey", 1) = 1 -- single-column only
|
|
1711
|
+
AND "att"."attnum" = ANY ("u_con"."conkey") -- this column is in the constraint
|
|
1712
|
+
)
|
|
1713
|
+
OR
|
|
1714
|
+
-- Check for a single-column unique index (may exist without an explicit constraint)
|
|
1715
|
+
EXISTS (
|
|
1716
|
+
SELECT 1
|
|
1717
|
+
FROM "pg_catalog"."pg_index" AS "u_idx"
|
|
1718
|
+
WHERE "u_idx"."indrelid" = "cls"."oid" -- on this table
|
|
1719
|
+
AND "u_idx"."indisunique" = TRUE -- it's a unique index
|
|
1720
|
+
AND "u_idx"."indnkeyatts" = 1 -- single key column
|
|
1721
|
+
AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) -- this column is the key
|
|
1722
|
+
)
|
|
1723
|
+
) AS "unique",
|
|
1724
|
+
|
|
1725
|
+
-- unique_name: the name of the unique constraint or index (whichever exists first)
|
|
1726
|
+
(
|
|
1727
|
+
SELECT COALESCE(
|
|
1728
|
+
-- Try constraint name first
|
|
1729
|
+
(
|
|
1730
|
+
SELECT "u_con"."conname"
|
|
1731
|
+
FROM "pg_catalog"."pg_constraint" AS "u_con"
|
|
1732
|
+
WHERE "u_con"."contype" = 'u'
|
|
1733
|
+
AND "u_con"."conrelid" = "cls"."oid"
|
|
1734
|
+
AND array_length("u_con"."conkey", 1) = 1
|
|
1735
|
+
AND "att"."attnum" = ANY ("u_con"."conkey")
|
|
1736
|
+
LIMIT 1
|
|
1737
|
+
),
|
|
1738
|
+
-- Fall back to unique index name
|
|
1739
|
+
(
|
|
1740
|
+
SELECT "u_idx_cls"."relname"
|
|
1741
|
+
FROM "pg_catalog"."pg_index" AS "u_idx"
|
|
1742
|
+
JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid"
|
|
1743
|
+
WHERE "u_idx"."indrelid" = "cls"."oid"
|
|
1744
|
+
AND "u_idx"."indisunique" = TRUE
|
|
1745
|
+
AND "u_idx"."indnkeyatts" = 1
|
|
1746
|
+
AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[])
|
|
1747
|
+
LIMIT 1
|
|
1748
|
+
)
|
|
1749
|
+
)
|
|
1750
|
+
) AS "unique_name",
|
|
1751
|
+
|
|
1752
|
+
"att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column
|
|
1753
|
+
-- For generated columns, pg_attrdef stores the generation expression (not a default),
|
|
1754
|
+
-- so we must null it out to avoid emitting a spurious @default(dbgenerated(...)) attribute.
|
|
1755
|
+
CASE
|
|
1756
|
+
WHEN "att"."attgenerated" != '' THEN NULL
|
|
1757
|
+
ELSE pg_get_expr("def"."adbin", "def"."adrelid")
|
|
1758
|
+
END AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()')
|
|
1759
|
+
"att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values
|
|
1760
|
+
|
|
1761
|
+
-- options: for enum columns, aggregates all allowed enum labels into a JSON array
|
|
1762
|
+
coalesce(
|
|
1763
|
+
(
|
|
1764
|
+
SELECT json_agg("enm"."enumlabel") AS "o"
|
|
1765
|
+
FROM "pg_catalog"."pg_enum" AS "enm"
|
|
1766
|
+
WHERE "enm"."enumtypid" = "typ"."oid"
|
|
1767
|
+
),
|
|
1768
|
+
'[]'
|
|
1769
|
+
) AS "options"
|
|
1770
|
+
|
|
1771
|
+
-- === FROM / JOINs for the columns subquery ===
|
|
1772
|
+
|
|
1773
|
+
-- pg_attribute: one row per table column (attnum >= 0 excludes system columns)
|
|
1774
|
+
FROM "pg_catalog"."pg_attribute" AS "att"
|
|
1775
|
+
|
|
1776
|
+
-- pg_type: data type of the column (e.g., int4, text, custom_enum)
|
|
1777
|
+
INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid"
|
|
1778
|
+
|
|
1779
|
+
-- pg_namespace for the type: needed to determine which schema the type lives in
|
|
1780
|
+
INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace"
|
|
1781
|
+
|
|
1782
|
+
-- information_schema.columns: provides length/precision info not easily available from pg_catalog
|
|
1783
|
+
LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname"
|
|
1784
|
+
AND "c"."table_name" = "cls"."relname"
|
|
1785
|
+
AND "c"."column_name" = "att"."attname"
|
|
1786
|
+
|
|
1787
|
+
-- pg_constraint (primary key): join on contype='p' to detect if column is part of PK
|
|
1788
|
+
LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p'
|
|
1789
|
+
AND "pk_con"."conrelid" = "cls"."oid"
|
|
1790
|
+
AND "att"."attnum" = ANY ("pk_con"."conkey")
|
|
1791
|
+
|
|
1792
|
+
-- pg_constraint (foreign key): join on contype='f' to get FK details for this column
|
|
1793
|
+
LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f'
|
|
1794
|
+
AND "fk_con"."conrelid" = "cls"."oid"
|
|
1795
|
+
AND "att"."attnum" = ANY ("fk_con"."conkey")
|
|
1796
|
+
|
|
1797
|
+
-- pg_class for FK target table: resolve the referenced table's OID to its name
|
|
1798
|
+
LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid"
|
|
1799
|
+
|
|
1800
|
+
-- pg_namespace for FK target: get the schema of the referenced table
|
|
1801
|
+
LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace"
|
|
1802
|
+
|
|
1803
|
+
-- pg_attribute for FK target column: resolve the referenced column number to its name.
|
|
1804
|
+
-- Use array_position to correlate by position: find this source column's index in conkey,
|
|
1805
|
+
-- then pick the referenced attnum at that same index from confkey.
|
|
1806
|
+
-- This ensures composite FKs correctly map each source column to its corresponding target column.
|
|
1807
|
+
LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid"
|
|
1808
|
+
AND "fk_att"."attnum" = "fk_con"."confkey"[array_position("fk_con"."conkey", "att"."attnum")]
|
|
1809
|
+
|
|
1810
|
+
-- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr()
|
|
1811
|
+
LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum"
|
|
1812
|
+
|
|
1813
|
+
WHERE
|
|
1814
|
+
"att"."attrelid" = "cls"."oid" -- only columns belonging to this table
|
|
1815
|
+
AND "att"."attnum" >= 0 -- exclude system columns (ctid, xmin, etc. have attnum < 0)
|
|
1816
|
+
AND "att"."attisdropped" != TRUE -- exclude dropped (deleted) columns
|
|
1817
|
+
ORDER BY "att"."attnum" -- preserve original column order
|
|
1818
|
+
) AS agg
|
|
1819
|
+
) AS "columns",
|
|
1820
|
+
|
|
1821
|
+
-- ===== INDEXES subquery =====
|
|
1822
|
+
-- Aggregates all indexes for this table into a JSON array.
|
|
1823
|
+
(
|
|
1824
|
+
SELECT coalesce(json_agg(agg), '[]')
|
|
1825
|
+
FROM (
|
|
1826
|
+
SELECT
|
|
1827
|
+
"idx_cls"."relname" AS "name", -- index name
|
|
1828
|
+
"am"."amname" AS "method", -- access method (e.g., 'btree', 'hash', 'gin', 'gist')
|
|
1829
|
+
"idx"."indisunique" AS "unique", -- true if unique index
|
|
1830
|
+
"idx"."indisprimary" AS "primary", -- true if this is the PK index
|
|
1831
|
+
"idx"."indisvalid" AS "valid", -- false during concurrent index builds
|
|
1832
|
+
"idx"."indisready" AS "ready", -- true when index is ready for inserts
|
|
1833
|
+
("idx"."indpred" IS NOT NULL) AS "partial", -- true if index has a WHERE clause (partial index)
|
|
1834
|
+
pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", -- the WHERE clause expression for partial indexes
|
|
1835
|
+
|
|
1836
|
+
-- Index columns: iterate over each position in the index key array
|
|
1837
|
+
(
|
|
1838
|
+
SELECT json_agg(
|
|
1839
|
+
json_build_object(
|
|
1840
|
+
-- 'name': column name, or for expression indexes the expression text
|
|
1841
|
+
'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)),
|
|
1842
|
+
-- 'expression': non-null only for expression-based index columns (e.g., lower(name))
|
|
1843
|
+
'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END,
|
|
1844
|
+
-- 'order': sort direction; bit 0 of indoption = 1 means DESC
|
|
1845
|
+
'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END,
|
|
1846
|
+
-- 'nulls': null ordering; bit 1 of indoption = 1 means NULLS FIRST
|
|
1847
|
+
'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END
|
|
1848
|
+
)
|
|
1849
|
+
ORDER BY "s"."i" -- preserve column order within the index
|
|
1850
|
+
)
|
|
1851
|
+
-- generate_subscripts creates one row per index key position (1-based)
|
|
1852
|
+
FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i")
|
|
1853
|
+
-- Join to pg_attribute to resolve column numbers to names
|
|
1854
|
+
-- NULL attname means it's an expression index column
|
|
1855
|
+
LEFT JOIN "pg_catalog"."pg_attribute" AS "att"
|
|
1856
|
+
ON "att"."attrelid" = "cls"."oid"
|
|
1857
|
+
AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"]
|
|
1858
|
+
) AS "columns"
|
|
1859
|
+
|
|
1860
|
+
FROM "pg_catalog"."pg_index" AS "idx" -- pg_index: one row per index
|
|
1861
|
+
JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" -- index's own pg_class entry (for the name)
|
|
1862
|
+
JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" -- access method catalog
|
|
1863
|
+
WHERE "idx"."indrelid" = "cls"."oid" -- only indexes on this table
|
|
1864
|
+
ORDER BY "idx_cls"."relname"
|
|
1865
|
+
) AS agg
|
|
1866
|
+
) AS "indexes"
|
|
1867
|
+
|
|
1868
|
+
-- === Main FROM: pg_class (tables and views) joined with pg_namespace (schemas) ===
|
|
1869
|
+
FROM "pg_catalog"."pg_class" AS "cls"
|
|
1870
|
+
INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid"
|
|
1871
|
+
WHERE
|
|
1872
|
+
"ns"."nspname" !~ '^pg_' -- exclude PostgreSQL internal schemas (pg_catalog, pg_toast, etc.)
|
|
1873
|
+
AND "ns"."nspname" != 'information_schema' -- exclude the information_schema
|
|
1874
|
+
AND "cls"."relkind" IN ('r', 'v') -- only tables ('r') and views ('v')
|
|
1875
|
+
AND "cls"."relname" !~ '^pg_' -- exclude system tables starting with pg_
|
|
1876
|
+
AND "cls"."relname" !~ '_prisma_migrations' -- exclude Prisma migration tracking table
|
|
1877
|
+
ORDER BY "ns"."nspname", "cls"."relname" ASC;
|
|
1878
|
+
`;
|
|
1879
|
+
function typeCastingConvert({ defaultValue, enums, val, services }) {
|
|
1880
|
+
const [value, type2] = val.replace(/'/g, "").split("::").map((s) => s.trim());
|
|
1881
|
+
switch (type2) {
|
|
1882
|
+
case "character varying":
|
|
1883
|
+
case "uuid":
|
|
1884
|
+
case "json":
|
|
1885
|
+
case "jsonb":
|
|
1886
|
+
case "text":
|
|
1887
|
+
if (value === "NULL") return null;
|
|
1888
|
+
return (ab) => ab.StringLiteral.setValue(value);
|
|
1889
|
+
case "real":
|
|
1890
|
+
return (ab) => ab.NumberLiteral.setValue(value);
|
|
1891
|
+
default: {
|
|
1892
|
+
const enumDef = enums.find((e) => getDbName(e, true) === type2);
|
|
1893
|
+
if (!enumDef) {
|
|
1894
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
|
|
1895
|
+
}
|
|
1896
|
+
const enumField = enumDef.fields.find((v) => getDbName(v) === value);
|
|
1897
|
+
if (!enumField) {
|
|
1898
|
+
throw new CliError(`Enum value ${value} not found in enum ${type2} for default value ${defaultValue}`);
|
|
1899
|
+
}
|
|
1900
|
+
return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
1901
|
+
}
|
|
1902
|
+
}
|
|
1903
|
+
}
|
|
1904
|
+
__name(typeCastingConvert, "typeCastingConvert");
|
|
1905
|
+
|
|
1906
|
+
// src/actions/pull/provider/sqlite.ts
|
|
1907
|
+
import { DataFieldAttributeFactory as DataFieldAttributeFactory4 } from "@zenstackhq/language/factory";
|
|
1908
|
+
var sqlite = {
|
|
1909
|
+
isSupportedFeature(feature) {
|
|
1910
|
+
switch (feature) {
|
|
1911
|
+
case "Schema":
|
|
1912
|
+
return false;
|
|
1913
|
+
case "NativeEnum":
|
|
1914
|
+
return false;
|
|
1915
|
+
default:
|
|
1916
|
+
return false;
|
|
1917
|
+
}
|
|
1918
|
+
},
|
|
1919
|
+
getBuiltinType(type2) {
|
|
1920
|
+
const t = (type2 || "").toLowerCase().trim().replace(/\(.*\)$/, "").trim();
|
|
1921
|
+
const isArray = false;
|
|
1922
|
+
switch (t) {
|
|
1923
|
+
// INTEGER types (SQLite: INT, INTEGER, TINYINT, SMALLINT, MEDIUMINT, INT2, INT8)
|
|
1924
|
+
case "integer":
|
|
1925
|
+
case "int":
|
|
1926
|
+
case "tinyint":
|
|
1927
|
+
case "smallint":
|
|
1928
|
+
case "mediumint":
|
|
1929
|
+
case "int2":
|
|
1930
|
+
case "int8":
|
|
1931
|
+
return {
|
|
1932
|
+
type: "Int",
|
|
1933
|
+
isArray
|
|
1934
|
+
};
|
|
1935
|
+
// BIGINT - map to BigInt for large integers
|
|
1936
|
+
case "bigint":
|
|
1937
|
+
case "unsigned big int":
|
|
1938
|
+
return {
|
|
1939
|
+
type: "BigInt",
|
|
1940
|
+
isArray
|
|
1941
|
+
};
|
|
1942
|
+
// TEXT types (SQLite: CHARACTER, VARCHAR, VARYING CHARACTER, NCHAR, NATIVE CHARACTER, NVARCHAR, TEXT, CLOB)
|
|
1943
|
+
case "text":
|
|
1944
|
+
case "varchar":
|
|
1945
|
+
case "char":
|
|
1946
|
+
case "character":
|
|
1947
|
+
case "varying character":
|
|
1948
|
+
case "nchar":
|
|
1949
|
+
case "native character":
|
|
1950
|
+
case "nvarchar":
|
|
1951
|
+
case "clob":
|
|
1952
|
+
return {
|
|
1953
|
+
type: "String",
|
|
1954
|
+
isArray
|
|
1955
|
+
};
|
|
1956
|
+
// BLOB type
|
|
1957
|
+
case "blob":
|
|
1958
|
+
return {
|
|
1959
|
+
type: "Bytes",
|
|
1960
|
+
isArray
|
|
1961
|
+
};
|
|
1962
|
+
// REAL types (SQLite: REAL, DOUBLE, DOUBLE PRECISION, FLOAT)
|
|
1963
|
+
case "real":
|
|
1964
|
+
case "float":
|
|
1965
|
+
case "double":
|
|
1966
|
+
case "double precision":
|
|
1967
|
+
return {
|
|
1968
|
+
type: "Float",
|
|
1969
|
+
isArray
|
|
1970
|
+
};
|
|
1971
|
+
// NUMERIC types (SQLite: NUMERIC, DECIMAL)
|
|
1972
|
+
case "numeric":
|
|
1973
|
+
case "decimal":
|
|
1974
|
+
return {
|
|
1975
|
+
type: "Decimal",
|
|
1976
|
+
isArray
|
|
1977
|
+
};
|
|
1978
|
+
// DateTime types
|
|
1979
|
+
case "datetime":
|
|
1980
|
+
case "date":
|
|
1981
|
+
case "time":
|
|
1982
|
+
case "timestamp":
|
|
1983
|
+
return {
|
|
1984
|
+
type: "DateTime",
|
|
1985
|
+
isArray
|
|
1986
|
+
};
|
|
1987
|
+
// JSON types
|
|
1988
|
+
case "json":
|
|
1989
|
+
case "jsonb":
|
|
1990
|
+
return {
|
|
1991
|
+
type: "Json",
|
|
1992
|
+
isArray
|
|
1993
|
+
};
|
|
1994
|
+
// Boolean types
|
|
1995
|
+
case "boolean":
|
|
1996
|
+
case "bool":
|
|
1997
|
+
return {
|
|
1998
|
+
type: "Boolean",
|
|
1999
|
+
isArray
|
|
2000
|
+
};
|
|
2001
|
+
default: {
|
|
2002
|
+
if (!t) {
|
|
2003
|
+
return {
|
|
2004
|
+
type: "Bytes",
|
|
2005
|
+
isArray
|
|
2006
|
+
};
|
|
2007
|
+
}
|
|
2008
|
+
if (t.includes("int")) {
|
|
2009
|
+
return {
|
|
2010
|
+
type: "Int",
|
|
2011
|
+
isArray
|
|
2012
|
+
};
|
|
2013
|
+
}
|
|
2014
|
+
if (t.includes("char") || t.includes("clob") || t.includes("text")) {
|
|
2015
|
+
return {
|
|
2016
|
+
type: "String",
|
|
2017
|
+
isArray
|
|
2018
|
+
};
|
|
2019
|
+
}
|
|
2020
|
+
if (t.includes("blob")) {
|
|
2021
|
+
return {
|
|
2022
|
+
type: "Bytes",
|
|
2023
|
+
isArray
|
|
2024
|
+
};
|
|
2025
|
+
}
|
|
2026
|
+
if (t.includes("real") || t.includes("floa") || t.includes("doub")) {
|
|
2027
|
+
return {
|
|
2028
|
+
type: "Float",
|
|
2029
|
+
isArray
|
|
2030
|
+
};
|
|
2031
|
+
}
|
|
2032
|
+
return {
|
|
2033
|
+
type: "Unsupported",
|
|
2034
|
+
isArray
|
|
2035
|
+
};
|
|
2036
|
+
}
|
|
2037
|
+
}
|
|
2038
|
+
},
|
|
2039
|
+
getDefaultDatabaseType() {
|
|
2040
|
+
return void 0;
|
|
2041
|
+
},
|
|
2042
|
+
async introspect(connectionString, _options) {
|
|
2043
|
+
const SQLite2 = (await import("better-sqlite3")).default;
|
|
2044
|
+
const db = new SQLite2(connectionString, {
|
|
2045
|
+
readonly: true
|
|
2046
|
+
});
|
|
2047
|
+
try {
|
|
2048
|
+
const all = /* @__PURE__ */ __name((sql) => {
|
|
2049
|
+
const stmt = db.prepare(sql);
|
|
2050
|
+
return stmt.all();
|
|
2051
|
+
}, "all");
|
|
2052
|
+
const tablesRaw = all("SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name");
|
|
2053
|
+
const autoIncrementTables = /* @__PURE__ */ new Set();
|
|
2054
|
+
for (const t of tablesRaw) {
|
|
2055
|
+
if (t.type === "table" && t.definition) {
|
|
2056
|
+
if (/\bAUTOINCREMENT\b/i.test(t.definition)) {
|
|
2057
|
+
autoIncrementTables.add(t.name);
|
|
2058
|
+
}
|
|
2059
|
+
}
|
|
2060
|
+
}
|
|
2061
|
+
const tables = [];
|
|
2062
|
+
for (const t of tablesRaw) {
|
|
2063
|
+
const tableName = t.name;
|
|
2064
|
+
const schema = "";
|
|
2065
|
+
const hasAutoIncrement = autoIncrementTables.has(tableName);
|
|
2066
|
+
const columnsInfo = all(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`);
|
|
2067
|
+
const tableNameEsc = tableName.replace(/'/g, "''");
|
|
2068
|
+
const idxList = all(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith("sqlite_autoindex_"));
|
|
2069
|
+
const uniqueSingleColumn = /* @__PURE__ */ new Set();
|
|
2070
|
+
const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1);
|
|
2071
|
+
for (const idx of uniqueIndexRows) {
|
|
2072
|
+
const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
|
|
2073
|
+
if (idxCols.length === 1 && idxCols[0]?.name) {
|
|
2074
|
+
uniqueSingleColumn.add(idxCols[0].name);
|
|
2075
|
+
}
|
|
2076
|
+
}
|
|
2077
|
+
const indexes = idxList.map((idx) => {
|
|
2078
|
+
const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
|
|
2079
|
+
return {
|
|
2080
|
+
name: idx.name,
|
|
2081
|
+
method: null,
|
|
2082
|
+
unique: idx.unique === 1,
|
|
2083
|
+
primary: false,
|
|
2084
|
+
valid: true,
|
|
2085
|
+
ready: true,
|
|
2086
|
+
partial: idx.partial === 1,
|
|
2087
|
+
predicate: idx.partial === 1 ? "[partial]" : null,
|
|
2088
|
+
columns: idxCols.map((col) => ({
|
|
2089
|
+
name: col.name,
|
|
2090
|
+
expression: null,
|
|
2091
|
+
order: null,
|
|
2092
|
+
nulls: null
|
|
2093
|
+
}))
|
|
2094
|
+
};
|
|
2095
|
+
});
|
|
2096
|
+
const fkRows = all(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`);
|
|
2097
|
+
const fkConstraintNames = /* @__PURE__ */ new Map();
|
|
2098
|
+
if (t.definition) {
|
|
2099
|
+
const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(([^)]+)\)/gi;
|
|
2100
|
+
let match;
|
|
2101
|
+
while ((match = fkRegex.exec(t.definition)) !== null) {
|
|
2102
|
+
const constraintName = match[1] || match[2];
|
|
2103
|
+
const columnList = match[3];
|
|
2104
|
+
if (constraintName && columnList) {
|
|
2105
|
+
const columns2 = columnList.split(",").map((col) => col.trim().replace(/^["'`]|["'`]$/g, ""));
|
|
2106
|
+
for (const col of columns2) {
|
|
2107
|
+
if (col) {
|
|
2108
|
+
fkConstraintNames.set(col, constraintName);
|
|
2109
|
+
}
|
|
2110
|
+
}
|
|
2111
|
+
}
|
|
2112
|
+
}
|
|
2113
|
+
}
|
|
2114
|
+
const fkByColumn = /* @__PURE__ */ new Map();
|
|
2115
|
+
for (const fk of fkRows) {
|
|
2116
|
+
fkByColumn.set(fk.from, {
|
|
2117
|
+
foreign_key_schema: "",
|
|
2118
|
+
foreign_key_table: fk.table || null,
|
|
2119
|
+
foreign_key_column: fk.to || null,
|
|
2120
|
+
foreign_key_name: fkConstraintNames.get(fk.from) ?? null,
|
|
2121
|
+
foreign_key_on_update: fk.on_update ?? null,
|
|
2122
|
+
foreign_key_on_delete: fk.on_delete ?? null
|
|
2123
|
+
});
|
|
2124
|
+
}
|
|
2125
|
+
const generatedColDefs = t.definition ? extractColumnTypeDefs(t.definition) : /* @__PURE__ */ new Map();
|
|
2126
|
+
const columns = [];
|
|
2127
|
+
for (const c of columnsInfo) {
|
|
2128
|
+
const hidden = c.hidden ?? 0;
|
|
2129
|
+
if (hidden === 1) continue;
|
|
2130
|
+
const isGenerated = hidden === 2 || hidden === 3;
|
|
2131
|
+
const fk = fkByColumn.get(c.name);
|
|
2132
|
+
let defaultValue = c.dflt_value;
|
|
2133
|
+
if (hasAutoIncrement && c.pk) {
|
|
2134
|
+
defaultValue = "autoincrement";
|
|
2135
|
+
}
|
|
2136
|
+
let datatype = c.type || "";
|
|
2137
|
+
if (isGenerated) {
|
|
2138
|
+
const fullDef = generatedColDefs.get(c.name);
|
|
2139
|
+
if (fullDef) {
|
|
2140
|
+
datatype = fullDef;
|
|
2141
|
+
}
|
|
2142
|
+
}
|
|
2143
|
+
columns.push({
|
|
2144
|
+
name: c.name,
|
|
2145
|
+
datatype,
|
|
2146
|
+
datatype_name: null,
|
|
2147
|
+
length: null,
|
|
2148
|
+
precision: null,
|
|
2149
|
+
datatype_schema: schema,
|
|
2150
|
+
foreign_key_schema: fk?.foreign_key_schema ?? null,
|
|
2151
|
+
foreign_key_table: fk?.foreign_key_table ?? null,
|
|
2152
|
+
foreign_key_column: fk?.foreign_key_column ?? null,
|
|
2153
|
+
foreign_key_name: fk?.foreign_key_name ?? null,
|
|
2154
|
+
foreign_key_on_update: fk?.foreign_key_on_update ?? null,
|
|
2155
|
+
foreign_key_on_delete: fk?.foreign_key_on_delete ?? null,
|
|
2156
|
+
pk: !!c.pk,
|
|
2157
|
+
computed: isGenerated,
|
|
2158
|
+
nullable: c.notnull !== 1,
|
|
2159
|
+
default: defaultValue,
|
|
2160
|
+
unique: uniqueSingleColumn.has(c.name),
|
|
2161
|
+
unique_name: null
|
|
2162
|
+
});
|
|
2163
|
+
}
|
|
2164
|
+
tables.push({
|
|
2165
|
+
schema,
|
|
2166
|
+
name: tableName,
|
|
2167
|
+
columns,
|
|
2168
|
+
type: t.type,
|
|
2169
|
+
definition: t.definition,
|
|
2170
|
+
indexes
|
|
2171
|
+
});
|
|
2172
|
+
}
|
|
2173
|
+
const enums = [];
|
|
2174
|
+
return {
|
|
2175
|
+
tables,
|
|
2176
|
+
enums
|
|
2177
|
+
};
|
|
2178
|
+
} finally {
|
|
2179
|
+
db.close();
|
|
2180
|
+
}
|
|
2181
|
+
},
|
|
2182
|
+
getDefaultValue({ defaultValue, fieldType, services, enums }) {
|
|
2183
|
+
const val = defaultValue.trim();
|
|
2184
|
+
switch (fieldType) {
|
|
2185
|
+
case "DateTime":
|
|
2186
|
+
if (val === "CURRENT_TIMESTAMP" || val === "now()") {
|
|
2187
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
|
|
2188
|
+
}
|
|
2189
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2190
|
+
case "Int":
|
|
2191
|
+
case "BigInt":
|
|
2192
|
+
if (val === "autoincrement") {
|
|
2193
|
+
return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
|
|
2194
|
+
}
|
|
2195
|
+
return (ab) => ab.NumberLiteral.setValue(val);
|
|
2196
|
+
case "Float":
|
|
2197
|
+
return normalizeFloatDefault(val);
|
|
2198
|
+
case "Decimal":
|
|
2199
|
+
return normalizeDecimalDefault(val);
|
|
2200
|
+
case "Boolean":
|
|
2201
|
+
return (ab) => ab.BooleanLiteral.setValue(val === "true" || val === "1");
|
|
2202
|
+
case "String":
|
|
2203
|
+
if (val.startsWith("'") && val.endsWith("'")) {
|
|
2204
|
+
const strippedName = val.slice(1, -1);
|
|
2205
|
+
const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName));
|
|
2206
|
+
if (enumDef) {
|
|
2207
|
+
const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName);
|
|
2208
|
+
if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField);
|
|
2209
|
+
}
|
|
2210
|
+
return (ab) => ab.StringLiteral.setValue(strippedName);
|
|
2211
|
+
}
|
|
2212
|
+
return (ab) => ab.StringLiteral.setValue(val);
|
|
2213
|
+
}
|
|
2214
|
+
console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
|
|
2215
|
+
return null;
|
|
2216
|
+
},
|
|
2217
|
+
getFieldAttributes({ fieldName, fieldType, services }) {
|
|
2218
|
+
const factories = [];
|
|
2219
|
+
if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
|
|
2220
|
+
factories.push(new DataFieldAttributeFactory4().setDecl(getAttributeRef("@updatedAt", services)));
|
|
2221
|
+
}
|
|
2222
|
+
return factories;
|
|
2223
|
+
}
|
|
2224
|
+
};
|
|
2225
|
+
function extractColumnTypeDefs(ddl) {
|
|
2226
|
+
const openIdx = ddl.indexOf("(");
|
|
2227
|
+
if (openIdx === -1) return /* @__PURE__ */ new Map();
|
|
2228
|
+
let depth = 1;
|
|
2229
|
+
let closeIdx = -1;
|
|
2230
|
+
for (let i = openIdx + 1; i < ddl.length; i++) {
|
|
2231
|
+
if (ddl[i] === "(") depth++;
|
|
2232
|
+
else if (ddl[i] === ")") {
|
|
2233
|
+
depth--;
|
|
2234
|
+
if (depth === 0) {
|
|
2235
|
+
closeIdx = i;
|
|
2236
|
+
break;
|
|
2237
|
+
}
|
|
2238
|
+
}
|
|
2239
|
+
}
|
|
2240
|
+
if (closeIdx === -1) return /* @__PURE__ */ new Map();
|
|
2241
|
+
const content = ddl.substring(openIdx + 1, closeIdx);
|
|
2242
|
+
const defs = [];
|
|
2243
|
+
let current = "";
|
|
2244
|
+
depth = 0;
|
|
2245
|
+
for (const char of content) {
|
|
2246
|
+
if (char === "(") depth++;
|
|
2247
|
+
else if (char === ")") depth--;
|
|
2248
|
+
else if (char === "," && depth === 0) {
|
|
2249
|
+
defs.push(current.trim());
|
|
2250
|
+
current = "";
|
|
2251
|
+
continue;
|
|
2252
|
+
}
|
|
2253
|
+
current += char;
|
|
2254
|
+
}
|
|
2255
|
+
if (current.trim()) defs.push(current.trim());
|
|
2256
|
+
const result = /* @__PURE__ */ new Map();
|
|
2257
|
+
for (const def of defs) {
|
|
2258
|
+
const nameMatch = def.match(/^(?:["'`]([^"'`]+)["'`]|(\w+))\s+(.+)/s);
|
|
2259
|
+
if (nameMatch) {
|
|
2260
|
+
const name = nameMatch[1] || nameMatch[2];
|
|
2261
|
+
const typeDef = nameMatch[3];
|
|
2262
|
+
if (name && typeDef) {
|
|
2263
|
+
result.set(name, typeDef.trim());
|
|
2264
|
+
}
|
|
2265
|
+
}
|
|
2266
|
+
}
|
|
2267
|
+
return result;
|
|
2268
|
+
}
|
|
2269
|
+
__name(extractColumnTypeDefs, "extractColumnTypeDefs");
|
|
2270
|
+
|
|
2271
|
+
// src/actions/pull/provider/index.ts
|
|
2272
|
+
var providers = {
|
|
2273
|
+
mysql,
|
|
2274
|
+
postgresql,
|
|
2275
|
+
sqlite
|
|
2276
|
+
};
|
|
2277
|
+
|
|
234
2278
|
// src/actions/db.ts
|
|
235
2279
|
async function run2(command, options) {
|
|
236
2280
|
switch (command) {
|
|
237
2281
|
case "push":
|
|
238
2282
|
await runPush(options);
|
|
239
2283
|
break;
|
|
2284
|
+
case "pull":
|
|
2285
|
+
await runPull(options);
|
|
2286
|
+
break;
|
|
240
2287
|
}
|
|
241
2288
|
}
|
|
242
2289
|
__name(run2, "run");
|
|
@@ -264,22 +2311,427 @@ async function runPush(options) {
|
|
|
264
2311
|
}
|
|
265
2312
|
}
|
|
266
2313
|
__name(runPush, "runPush");
|
|
2314
|
+
async function runPull(options) {
|
|
2315
|
+
const spinner = ora();
|
|
2316
|
+
try {
|
|
2317
|
+
const schemaFile = getSchemaFile(options.schema);
|
|
2318
|
+
const outPath = options.output ? path2.resolve(options.output) : void 0;
|
|
2319
|
+
const treatAsFile = !!outPath && (fs2.existsSync(outPath) && fs2.lstatSync(outPath).isFile() || path2.extname(outPath) !== "");
|
|
2320
|
+
const { model, services } = await loadSchemaDocument(schemaFile, {
|
|
2321
|
+
returnServices: true,
|
|
2322
|
+
mergeImports: treatAsFile
|
|
2323
|
+
});
|
|
2324
|
+
const SUPPORTED_PROVIDERS = Object.keys(providers);
|
|
2325
|
+
const datasource = getDatasource(model);
|
|
2326
|
+
if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) {
|
|
2327
|
+
throw new CliError(`Unsupported datasource provider: ${datasource.provider}`);
|
|
2328
|
+
}
|
|
2329
|
+
const provider = providers[datasource.provider];
|
|
2330
|
+
if (!provider) {
|
|
2331
|
+
throw new CliError(`No introspection provider found for: ${datasource.provider}`);
|
|
2332
|
+
}
|
|
2333
|
+
spinner.start("Introspecting database...");
|
|
2334
|
+
const { enums, tables } = await provider.introspect(datasource.url, {
|
|
2335
|
+
schemas: datasource.allSchemas,
|
|
2336
|
+
modelCasing: options.modelCasing
|
|
2337
|
+
});
|
|
2338
|
+
spinner.succeed("Database introspected");
|
|
2339
|
+
console.log(colors4.blue("Syncing schema..."));
|
|
2340
|
+
const newModel = {
|
|
2341
|
+
$type: "Model",
|
|
2342
|
+
$container: void 0,
|
|
2343
|
+
$containerProperty: void 0,
|
|
2344
|
+
$containerIndex: void 0,
|
|
2345
|
+
declarations: [
|
|
2346
|
+
...model.declarations.filter((d) => [
|
|
2347
|
+
"DataSource"
|
|
2348
|
+
].includes(d.$type))
|
|
2349
|
+
],
|
|
2350
|
+
imports: model.imports
|
|
2351
|
+
};
|
|
2352
|
+
syncEnums({
|
|
2353
|
+
dbEnums: enums,
|
|
2354
|
+
model: newModel,
|
|
2355
|
+
services,
|
|
2356
|
+
options,
|
|
2357
|
+
defaultSchema: datasource.defaultSchema,
|
|
2358
|
+
oldModel: model,
|
|
2359
|
+
provider
|
|
2360
|
+
});
|
|
2361
|
+
const resolvedRelations = [];
|
|
2362
|
+
for (const table of tables) {
|
|
2363
|
+
const relations = syncTable({
|
|
2364
|
+
table,
|
|
2365
|
+
model: newModel,
|
|
2366
|
+
provider,
|
|
2367
|
+
services,
|
|
2368
|
+
options,
|
|
2369
|
+
defaultSchema: datasource.defaultSchema,
|
|
2370
|
+
oldModel: model
|
|
2371
|
+
});
|
|
2372
|
+
resolvedRelations.push(...relations);
|
|
2373
|
+
}
|
|
2374
|
+
for (const relation of resolvedRelations) {
|
|
2375
|
+
const similarRelations = resolvedRelations.filter((rr) => {
|
|
2376
|
+
return rr !== relation && (rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table || rr.schema === relation.references.schema && rr.columns[0] === relation.references.columns[0] && rr.references.schema === relation.schema && rr.references.table === relation.table);
|
|
2377
|
+
}).length;
|
|
2378
|
+
const selfRelation = relation.references.schema === relation.schema && relation.references.table === relation.table;
|
|
2379
|
+
syncRelation({
|
|
2380
|
+
model: newModel,
|
|
2381
|
+
relation,
|
|
2382
|
+
services,
|
|
2383
|
+
options,
|
|
2384
|
+
selfRelation,
|
|
2385
|
+
similarRelations
|
|
2386
|
+
});
|
|
2387
|
+
}
|
|
2388
|
+
console.log(colors4.blue("Schema synced"));
|
|
2389
|
+
const baseDir = path2.dirname(path2.resolve(schemaFile));
|
|
2390
|
+
const baseDirUrlPath = new URL(`file://${baseDir}`).pathname;
|
|
2391
|
+
const docs = services.shared.workspace.LangiumDocuments.all.filter(({ uri }) => uri.path.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())).toArray();
|
|
2392
|
+
const docsSet = new Set(docs.map((d) => d.uri.toString()));
|
|
2393
|
+
console.log(colors4.bold("\nApplying changes to ZModel..."));
|
|
2394
|
+
const deletedModels = [];
|
|
2395
|
+
const deletedEnums = [];
|
|
2396
|
+
const addedModels = [];
|
|
2397
|
+
const addedEnums = [];
|
|
2398
|
+
const modelChanges = /* @__PURE__ */ new Map();
|
|
2399
|
+
const getModelChanges = /* @__PURE__ */ __name((modelName) => {
|
|
2400
|
+
if (!modelChanges.has(modelName)) {
|
|
2401
|
+
modelChanges.set(modelName, {
|
|
2402
|
+
addedFields: [],
|
|
2403
|
+
deletedFields: [],
|
|
2404
|
+
updatedFields: [],
|
|
2405
|
+
addedAttributes: [],
|
|
2406
|
+
deletedAttributes: [],
|
|
2407
|
+
updatedAttributes: []
|
|
2408
|
+
});
|
|
2409
|
+
}
|
|
2410
|
+
return modelChanges.get(modelName);
|
|
2411
|
+
}, "getModelChanges");
|
|
2412
|
+
services.shared.workspace.IndexManager.allElements("DataModel", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
|
|
2413
|
+
const model2 = decl.node.$container;
|
|
2414
|
+
const index = model2.declarations.findIndex((d) => d === decl.node);
|
|
2415
|
+
model2.declarations.splice(index, 1);
|
|
2416
|
+
deletedModels.push(colors4.red(`- Model ${decl.name} deleted`));
|
|
2417
|
+
});
|
|
2418
|
+
if (provider.isSupportedFeature("NativeEnum")) services.shared.workspace.IndexManager.allElements("Enum", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
|
|
2419
|
+
const model2 = decl.node.$container;
|
|
2420
|
+
const index = model2.declarations.findIndex((d) => d === decl.node);
|
|
2421
|
+
model2.declarations.splice(index, 1);
|
|
2422
|
+
deletedEnums.push(colors4.red(`- Enum ${decl.name} deleted`));
|
|
2423
|
+
});
|
|
2424
|
+
newModel.declarations.filter((d) => [
|
|
2425
|
+
DataModel,
|
|
2426
|
+
Enum
|
|
2427
|
+
].includes(d.$type)).forEach((_declaration) => {
|
|
2428
|
+
const newDataModel = _declaration;
|
|
2429
|
+
const declarations = services.shared.workspace.IndexManager.allElements(newDataModel.$type, docsSet).toArray();
|
|
2430
|
+
const originalDataModel = declarations.find((d) => getDbName(d.node) === getDbName(newDataModel))?.node;
|
|
2431
|
+
if (!originalDataModel) {
|
|
2432
|
+
if (newDataModel.$type === "DataModel") {
|
|
2433
|
+
addedModels.push(colors4.green(`+ Model ${newDataModel.name} added`));
|
|
2434
|
+
} else if (newDataModel.$type === "Enum") {
|
|
2435
|
+
addedEnums.push(colors4.green(`+ Enum ${newDataModel.name} added`));
|
|
2436
|
+
}
|
|
2437
|
+
model.declarations.push(newDataModel);
|
|
2438
|
+
newDataModel.$container = model;
|
|
2439
|
+
newDataModel.fields.forEach((f) => {
|
|
2440
|
+
if (f.$type === "DataField" && f.type.reference?.ref) {
|
|
2441
|
+
const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
|
|
2442
|
+
if (ref && f.type.reference) {
|
|
2443
|
+
f.type.reference = {
|
|
2444
|
+
ref,
|
|
2445
|
+
$refText: ref.name ?? f.type.reference.$refText
|
|
2446
|
+
};
|
|
2447
|
+
}
|
|
2448
|
+
}
|
|
2449
|
+
});
|
|
2450
|
+
return;
|
|
2451
|
+
}
|
|
2452
|
+
newDataModel.fields.forEach((f) => {
|
|
2453
|
+
let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f));
|
|
2454
|
+
const isRelationField = f.$type === "DataField" && !!f.attributes?.some((a) => a?.decl?.ref?.name === "@relation");
|
|
2455
|
+
if (originalFields.length === 0 && isRelationField && !getRelationFieldsKey(f)) {
|
|
2456
|
+
return;
|
|
2457
|
+
}
|
|
2458
|
+
if (originalFields.length === 0) {
|
|
2459
|
+
const newFieldsKey = getRelationFieldsKey(f);
|
|
2460
|
+
if (newFieldsKey) {
|
|
2461
|
+
originalFields = originalDataModel.fields.filter((d) => getRelationFieldsKey(d) === newFieldsKey);
|
|
2462
|
+
}
|
|
2463
|
+
}
|
|
2464
|
+
if (originalFields.length === 0) {
|
|
2465
|
+
originalFields = originalDataModel.fields.filter((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
|
|
2466
|
+
}
|
|
2467
|
+
if (originalFields.length === 0) {
|
|
2468
|
+
originalFields = originalDataModel.fields.filter((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
|
|
2469
|
+
}
|
|
2470
|
+
if (originalFields.length > 1) {
|
|
2471
|
+
const isBackReferenceField = !getRelationFieldsKey(f);
|
|
2472
|
+
if (!isBackReferenceField) {
|
|
2473
|
+
console.warn(colors4.yellow(`Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(", ")}](${f.name})`));
|
|
2474
|
+
}
|
|
2475
|
+
return;
|
|
2476
|
+
}
|
|
2477
|
+
const originalField = originalFields.at(0);
|
|
2478
|
+
if (originalField && f.$type === "DataField" && originalField.$type === "DataField") {
|
|
2479
|
+
const newType = f.type;
|
|
2480
|
+
const oldType = originalField.type;
|
|
2481
|
+
const fieldUpdates = [];
|
|
2482
|
+
const isOldTypeEnumWithoutNativeSupport = oldType.reference?.ref?.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
|
|
2483
|
+
if (newType.type && oldType.type !== newType.type && !isOldTypeEnumWithoutNativeSupport) {
|
|
2484
|
+
fieldUpdates.push(`type: ${oldType.type} -> ${newType.type}`);
|
|
2485
|
+
oldType.type = newType.type;
|
|
2486
|
+
}
|
|
2487
|
+
if (newType.reference?.ref && oldType.reference?.ref) {
|
|
2488
|
+
const newRefName = getDbName(newType.reference.ref);
|
|
2489
|
+
const oldRefName = getDbName(oldType.reference.ref);
|
|
2490
|
+
if (newRefName !== oldRefName) {
|
|
2491
|
+
fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`);
|
|
2492
|
+
oldType.reference = {
|
|
2493
|
+
ref: newType.reference.ref,
|
|
2494
|
+
$refText: newType.reference.$refText
|
|
2495
|
+
};
|
|
2496
|
+
}
|
|
2497
|
+
} else if (newType.reference?.ref && !oldType.reference) {
|
|
2498
|
+
fieldUpdates.push(`type: ${oldType.type} -> ${newType.reference.$refText}`);
|
|
2499
|
+
oldType.reference = newType.reference;
|
|
2500
|
+
oldType.type = void 0;
|
|
2501
|
+
} else if (!newType.reference && oldType.reference?.ref && newType.type) {
|
|
2502
|
+
const isEnumWithoutNativeSupport = oldType.reference.ref.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
|
|
2503
|
+
if (!isEnumWithoutNativeSupport) {
|
|
2504
|
+
fieldUpdates.push(`type: ${oldType.reference.$refText} -> ${newType.type}`);
|
|
2505
|
+
oldType.type = newType.type;
|
|
2506
|
+
oldType.reference = void 0;
|
|
2507
|
+
}
|
|
2508
|
+
}
|
|
2509
|
+
if (!!newType.optional !== !!oldType.optional) {
|
|
2510
|
+
fieldUpdates.push(`optional: ${!!oldType.optional} -> ${!!newType.optional}`);
|
|
2511
|
+
oldType.optional = newType.optional;
|
|
2512
|
+
}
|
|
2513
|
+
if (!!newType.array !== !!oldType.array) {
|
|
2514
|
+
fieldUpdates.push(`array: ${!!oldType.array} -> ${!!newType.array}`);
|
|
2515
|
+
oldType.array = newType.array;
|
|
2516
|
+
}
|
|
2517
|
+
if (fieldUpdates.length > 0) {
|
|
2518
|
+
getModelChanges(originalDataModel.name).updatedFields.push(colors4.yellow(`~ ${originalField.name} (${fieldUpdates.join(", ")})`));
|
|
2519
|
+
}
|
|
2520
|
+
const newDefaultAttr = f.attributes.find((a) => a.decl.$refText === "@default");
|
|
2521
|
+
const oldDefaultAttr = originalField.attributes.find((a) => a.decl.$refText === "@default");
|
|
2522
|
+
if (newDefaultAttr && oldDefaultAttr) {
|
|
2523
|
+
const serializeArgs = /* @__PURE__ */ __name((args) => args.map((arg) => {
|
|
2524
|
+
if (arg.value?.$type === "StringLiteral") return `"${arg.value.value}"`;
|
|
2525
|
+
if (arg.value?.$type === "NumberLiteral") return String(arg.value.value);
|
|
2526
|
+
if (arg.value?.$type === "BooleanLiteral") return String(arg.value.value);
|
|
2527
|
+
if (arg.value?.$type === "InvocationExpr") return arg.value.function?.$refText ?? "";
|
|
2528
|
+
if (arg.value?.$type === "ReferenceExpr") return arg.value.target?.$refText ?? "";
|
|
2529
|
+
if (arg.value?.$type === "ArrayExpr") {
|
|
2530
|
+
return `[${(arg.value.items ?? []).map((item) => {
|
|
2531
|
+
if (item.$type === "ReferenceExpr") return item.target?.$refText ?? "";
|
|
2532
|
+
return item.$type ?? "unknown";
|
|
2533
|
+
}).join(",")}]`;
|
|
2534
|
+
}
|
|
2535
|
+
return arg.value?.$type ?? "unknown";
|
|
2536
|
+
}).join(","), "serializeArgs");
|
|
2537
|
+
const newArgsStr = serializeArgs(newDefaultAttr.args ?? []);
|
|
2538
|
+
const oldArgsStr = serializeArgs(oldDefaultAttr.args ?? []);
|
|
2539
|
+
if (newArgsStr !== oldArgsStr) {
|
|
2540
|
+
oldDefaultAttr.args = newDefaultAttr.args.map((arg) => ({
|
|
2541
|
+
...arg,
|
|
2542
|
+
$container: oldDefaultAttr
|
|
2543
|
+
}));
|
|
2544
|
+
getModelChanges(originalDataModel.name).updatedAttributes.push(colors4.yellow(`~ @default on ${originalDataModel.name}.${originalField.name}`));
|
|
2545
|
+
}
|
|
2546
|
+
}
|
|
2547
|
+
}
|
|
2548
|
+
if (!originalField) {
|
|
2549
|
+
getModelChanges(originalDataModel.name).addedFields.push(colors4.green(`+ ${f.name}`));
|
|
2550
|
+
f.$container = originalDataModel;
|
|
2551
|
+
originalDataModel.fields.push(f);
|
|
2552
|
+
if (f.$type === "DataField" && f.type.reference?.ref) {
|
|
2553
|
+
const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
|
|
2554
|
+
if (ref) {
|
|
2555
|
+
f.type.reference = {
|
|
2556
|
+
ref,
|
|
2557
|
+
$refText: ref.name ?? f.type.reference.$refText
|
|
2558
|
+
};
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
return;
|
|
2562
|
+
}
|
|
2563
|
+
originalField.attributes.filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && ![
|
|
2564
|
+
"@map",
|
|
2565
|
+
"@@map",
|
|
2566
|
+
"@default",
|
|
2567
|
+
"@updatedAt"
|
|
2568
|
+
].includes(attr.decl.$refText)).forEach((attr) => {
|
|
2569
|
+
const field = attr.$container;
|
|
2570
|
+
const index = field.attributes.findIndex((d) => d === attr);
|
|
2571
|
+
field.attributes.splice(index, 1);
|
|
2572
|
+
getModelChanges(originalDataModel.name).deletedAttributes.push(colors4.yellow(`- ${attr.decl.$refText} from field: ${originalDataModel.name}.${field.name}`));
|
|
2573
|
+
});
|
|
2574
|
+
f.attributes.filter((attr) => !originalField.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && ![
|
|
2575
|
+
"@map",
|
|
2576
|
+
"@@map",
|
|
2577
|
+
"@default",
|
|
2578
|
+
"@updatedAt"
|
|
2579
|
+
].includes(attr.decl.$refText)).forEach((attr) => {
|
|
2580
|
+
const cloned = {
|
|
2581
|
+
...attr,
|
|
2582
|
+
$container: originalField
|
|
2583
|
+
};
|
|
2584
|
+
originalField.attributes.push(cloned);
|
|
2585
|
+
getModelChanges(originalDataModel.name).addedAttributes.push(colors4.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`));
|
|
2586
|
+
});
|
|
2587
|
+
});
|
|
2588
|
+
originalDataModel.fields.filter((f) => {
|
|
2589
|
+
const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f));
|
|
2590
|
+
if (matchByDbName) return false;
|
|
2591
|
+
const originalFieldsKey = getRelationFieldsKey(f);
|
|
2592
|
+
if (originalFieldsKey) {
|
|
2593
|
+
const matchByFieldsKey = newDataModel.fields.find((d) => getRelationFieldsKey(d) === originalFieldsKey);
|
|
2594
|
+
if (matchByFieldsKey) return false;
|
|
2595
|
+
}
|
|
2596
|
+
const matchByFkName = newDataModel.fields.find((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
|
|
2597
|
+
if (matchByFkName) return false;
|
|
2598
|
+
const matchByTypeRef = newDataModel.fields.find((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
|
|
2599
|
+
return !matchByTypeRef;
|
|
2600
|
+
}).forEach((f) => {
|
|
2601
|
+
const _model = f.$container;
|
|
2602
|
+
const index = _model.fields.findIndex((d) => d === f);
|
|
2603
|
+
_model.fields.splice(index, 1);
|
|
2604
|
+
getModelChanges(_model.name).deletedFields.push(colors4.red(`- ${f.name}`));
|
|
2605
|
+
});
|
|
2606
|
+
});
|
|
2607
|
+
if (deletedModels.length > 0) {
|
|
2608
|
+
console.log(colors4.bold("\nDeleted Models:"));
|
|
2609
|
+
deletedModels.forEach((msg) => {
|
|
2610
|
+
console.log(msg);
|
|
2611
|
+
});
|
|
2612
|
+
}
|
|
2613
|
+
if (deletedEnums.length > 0) {
|
|
2614
|
+
console.log(colors4.bold("\nDeleted Enums:"));
|
|
2615
|
+
deletedEnums.forEach((msg) => {
|
|
2616
|
+
console.log(msg);
|
|
2617
|
+
});
|
|
2618
|
+
}
|
|
2619
|
+
if (addedModels.length > 0) {
|
|
2620
|
+
console.log(colors4.bold("\nAdded Models:"));
|
|
2621
|
+
addedModels.forEach((msg) => {
|
|
2622
|
+
console.log(msg);
|
|
2623
|
+
});
|
|
2624
|
+
}
|
|
2625
|
+
if (addedEnums.length > 0) {
|
|
2626
|
+
console.log(colors4.bold("\nAdded Enums:"));
|
|
2627
|
+
addedEnums.forEach((msg) => {
|
|
2628
|
+
console.log(msg);
|
|
2629
|
+
});
|
|
2630
|
+
}
|
|
2631
|
+
if (modelChanges.size > 0) {
|
|
2632
|
+
console.log(colors4.bold("\nModel Changes:"));
|
|
2633
|
+
modelChanges.forEach((changes, modelName) => {
|
|
2634
|
+
const hasChanges = changes.addedFields.length > 0 || changes.deletedFields.length > 0 || changes.updatedFields.length > 0 || changes.addedAttributes.length > 0 || changes.deletedAttributes.length > 0 || changes.updatedAttributes.length > 0;
|
|
2635
|
+
if (hasChanges) {
|
|
2636
|
+
console.log(colors4.cyan(` ${modelName}:`));
|
|
2637
|
+
if (changes.addedFields.length > 0) {
|
|
2638
|
+
console.log(colors4.gray(" Added Fields:"));
|
|
2639
|
+
changes.addedFields.forEach((msg) => {
|
|
2640
|
+
console.log(` ${msg}`);
|
|
2641
|
+
});
|
|
2642
|
+
}
|
|
2643
|
+
if (changes.deletedFields.length > 0) {
|
|
2644
|
+
console.log(colors4.gray(" Deleted Fields:"));
|
|
2645
|
+
changes.deletedFields.forEach((msg) => {
|
|
2646
|
+
console.log(` ${msg}`);
|
|
2647
|
+
});
|
|
2648
|
+
}
|
|
2649
|
+
if (changes.updatedFields.length > 0) {
|
|
2650
|
+
console.log(colors4.gray(" Updated Fields:"));
|
|
2651
|
+
changes.updatedFields.forEach((msg) => {
|
|
2652
|
+
console.log(` ${msg}`);
|
|
2653
|
+
});
|
|
2654
|
+
}
|
|
2655
|
+
if (changes.addedAttributes.length > 0) {
|
|
2656
|
+
console.log(colors4.gray(" Added Attributes:"));
|
|
2657
|
+
changes.addedAttributes.forEach((msg) => {
|
|
2658
|
+
console.log(` ${msg}`);
|
|
2659
|
+
});
|
|
2660
|
+
}
|
|
2661
|
+
if (changes.deletedAttributes.length > 0) {
|
|
2662
|
+
console.log(colors4.gray(" Deleted Attributes:"));
|
|
2663
|
+
changes.deletedAttributes.forEach((msg) => {
|
|
2664
|
+
console.log(` ${msg}`);
|
|
2665
|
+
});
|
|
2666
|
+
}
|
|
2667
|
+
if (changes.updatedAttributes.length > 0) {
|
|
2668
|
+
console.log(colors4.gray(" Updated Attributes:"));
|
|
2669
|
+
changes.updatedAttributes.forEach((msg) => {
|
|
2670
|
+
console.log(` ${msg}`);
|
|
2671
|
+
});
|
|
2672
|
+
}
|
|
2673
|
+
}
|
|
2674
|
+
});
|
|
2675
|
+
}
|
|
2676
|
+
const generator = new ZModelCodeGenerator({
|
|
2677
|
+
quote: options.quote,
|
|
2678
|
+
indent: options.indent
|
|
2679
|
+
});
|
|
2680
|
+
if (options.output) {
|
|
2681
|
+
if (treatAsFile) {
|
|
2682
|
+
const zmodelSchema = await formatDocument(generator.generate(newModel));
|
|
2683
|
+
console.log(colors4.blue(`Writing to ${outPath}`));
|
|
2684
|
+
fs2.mkdirSync(path2.dirname(outPath), {
|
|
2685
|
+
recursive: true
|
|
2686
|
+
});
|
|
2687
|
+
fs2.writeFileSync(outPath, zmodelSchema);
|
|
2688
|
+
} else {
|
|
2689
|
+
fs2.mkdirSync(outPath, {
|
|
2690
|
+
recursive: true
|
|
2691
|
+
});
|
|
2692
|
+
const baseDir2 = path2.dirname(path2.resolve(schemaFile));
|
|
2693
|
+
for (const { uri, parseResult: { value: documentModel } } of docs) {
|
|
2694
|
+
const zmodelSchema = await formatDocument(generator.generate(documentModel));
|
|
2695
|
+
const relPath = path2.relative(baseDir2, uri.fsPath);
|
|
2696
|
+
const targetFile = path2.join(outPath, relPath);
|
|
2697
|
+
fs2.mkdirSync(path2.dirname(targetFile), {
|
|
2698
|
+
recursive: true
|
|
2699
|
+
});
|
|
2700
|
+
console.log(colors4.blue(`Writing to ${targetFile}`));
|
|
2701
|
+
fs2.writeFileSync(targetFile, zmodelSchema);
|
|
2702
|
+
}
|
|
2703
|
+
}
|
|
2704
|
+
} else {
|
|
2705
|
+
for (const { uri, parseResult: { value: documentModel } } of docs) {
|
|
2706
|
+
const zmodelSchema = await formatDocument(generator.generate(documentModel));
|
|
2707
|
+
console.log(colors4.blue(`Writing to ${path2.relative(process.cwd(), uri.fsPath).replace(/\\/g, "/")}`));
|
|
2708
|
+
fs2.writeFileSync(uri.fsPath, zmodelSchema);
|
|
2709
|
+
}
|
|
2710
|
+
}
|
|
2711
|
+
console.log(colors4.green.bold("\nPull completed successfully!"));
|
|
2712
|
+
} catch (error) {
|
|
2713
|
+
spinner.fail("Pull failed");
|
|
2714
|
+
console.error(error);
|
|
2715
|
+
throw error;
|
|
2716
|
+
}
|
|
2717
|
+
}
|
|
2718
|
+
__name(runPull, "runPull");
|
|
267
2719
|
|
|
268
2720
|
// src/actions/format.ts
|
|
269
|
-
import { formatDocument } from "@zenstackhq/language";
|
|
270
|
-
import
|
|
2721
|
+
import { formatDocument as formatDocument2 } from "@zenstackhq/language";
|
|
2722
|
+
import colors5 from "colors";
|
|
271
2723
|
import fs3 from "fs";
|
|
272
2724
|
async function run3(options) {
|
|
273
2725
|
const schemaFile = getSchemaFile(options.schema);
|
|
274
2726
|
let formattedContent;
|
|
275
2727
|
try {
|
|
276
|
-
formattedContent = await
|
|
2728
|
+
formattedContent = await formatDocument2(fs3.readFileSync(schemaFile, "utf-8"));
|
|
277
2729
|
} catch (error) {
|
|
278
|
-
console.error(
|
|
2730
|
+
console.error(colors5.red("\u2717 Schema formatting failed."));
|
|
279
2731
|
throw error;
|
|
280
2732
|
}
|
|
281
2733
|
fs3.writeFileSync(schemaFile, formattedContent, "utf-8");
|
|
282
|
-
console.log(
|
|
2734
|
+
console.log(colors5.green("\u2713 Schema formatting completed successfully."));
|
|
283
2735
|
}
|
|
284
2736
|
__name(run3, "run");
|
|
285
2737
|
|
|
@@ -287,14 +2739,14 @@ __name(run3, "run");
|
|
|
287
2739
|
import { invariant, singleDebounce } from "@zenstackhq/common-helpers";
|
|
288
2740
|
import { ZModelLanguageMetaData } from "@zenstackhq/language";
|
|
289
2741
|
import { isPlugin } from "@zenstackhq/language/ast";
|
|
290
|
-
import { getLiteral, getLiteralArray } from "@zenstackhq/language/utils";
|
|
291
|
-
import
|
|
2742
|
+
import { getLiteral, getLiteralArray as getLiteralArray2 } from "@zenstackhq/language/utils";
|
|
2743
|
+
import colors6 from "colors";
|
|
292
2744
|
import { createJiti } from "jiti";
|
|
293
2745
|
import fs6 from "fs";
|
|
294
|
-
import
|
|
2746
|
+
import path5 from "path";
|
|
295
2747
|
import { pathToFileURL } from "url";
|
|
296
2748
|
import { watch } from "chokidar";
|
|
297
|
-
import
|
|
2749
|
+
import ora2 from "ora";
|
|
298
2750
|
|
|
299
2751
|
// src/plugins/index.ts
|
|
300
2752
|
var plugins_exports = {};
|
|
@@ -306,16 +2758,16 @@ __export(plugins_exports, {
|
|
|
306
2758
|
// src/plugins/prisma.ts
|
|
307
2759
|
import { PrismaSchemaGenerator as PrismaSchemaGenerator2 } from "@zenstackhq/sdk";
|
|
308
2760
|
import fs4 from "fs";
|
|
309
|
-
import
|
|
2761
|
+
import path3 from "path";
|
|
310
2762
|
var plugin = {
|
|
311
2763
|
name: "Prisma Schema Generator",
|
|
312
2764
|
statusText: "Generating Prisma schema",
|
|
313
2765
|
async generate({ model, defaultOutputPath, pluginOptions }) {
|
|
314
|
-
let outFile =
|
|
2766
|
+
let outFile = path3.join(defaultOutputPath, "schema.prisma");
|
|
315
2767
|
if (typeof pluginOptions["output"] === "string") {
|
|
316
|
-
outFile =
|
|
317
|
-
if (!fs4.existsSync(
|
|
318
|
-
fs4.mkdirSync(
|
|
2768
|
+
outFile = path3.resolve(defaultOutputPath, pluginOptions["output"]);
|
|
2769
|
+
if (!fs4.existsSync(path3.dirname(outFile))) {
|
|
2770
|
+
fs4.mkdirSync(path3.dirname(outFile), {
|
|
319
2771
|
recursive: true
|
|
320
2772
|
});
|
|
321
2773
|
}
|
|
@@ -329,14 +2781,14 @@ var prisma_default = plugin;
|
|
|
329
2781
|
// src/plugins/typescript.ts
|
|
330
2782
|
import { TsSchemaGenerator } from "@zenstackhq/sdk";
|
|
331
2783
|
import fs5 from "fs";
|
|
332
|
-
import
|
|
2784
|
+
import path4 from "path";
|
|
333
2785
|
var plugin2 = {
|
|
334
2786
|
name: "TypeScript Schema Generator",
|
|
335
2787
|
statusText: "Generating TypeScript schema",
|
|
336
2788
|
async generate({ model, defaultOutputPath, pluginOptions }) {
|
|
337
2789
|
let outDir = defaultOutputPath;
|
|
338
2790
|
if (typeof pluginOptions["output"] === "string") {
|
|
339
|
-
outDir =
|
|
2791
|
+
outDir = path4.resolve(defaultOutputPath, pluginOptions["output"]);
|
|
340
2792
|
if (!fs5.existsSync(outDir)) {
|
|
341
2793
|
fs5.mkdirSync(outDir, {
|
|
342
2794
|
recursive: true
|
|
@@ -365,7 +2817,7 @@ async function run4(options) {
|
|
|
365
2817
|
if (options.watch) {
|
|
366
2818
|
const logsEnabled = !options.silent;
|
|
367
2819
|
if (logsEnabled) {
|
|
368
|
-
console.log(
|
|
2820
|
+
console.log(colors6.green(`
|
|
369
2821
|
Enabled watch mode!`));
|
|
370
2822
|
}
|
|
371
2823
|
const schemaExtensions = ZModelLanguageMetaData.fileExtensions;
|
|
@@ -442,14 +2894,14 @@ async function pureGenerate(options, fromWatch) {
|
|
|
442
2894
|
const outputPath = getOutputPath(options, schemaFile);
|
|
443
2895
|
await runPlugins(schemaFile, model, outputPath, options);
|
|
444
2896
|
if (!options.silent) {
|
|
445
|
-
console.log(
|
|
2897
|
+
console.log(colors6.green(`Generation completed successfully in ${Date.now() - start}ms.
|
|
446
2898
|
`));
|
|
447
2899
|
if (!fromWatch) {
|
|
448
2900
|
console.log(`You can now create a ZenStack client with it.
|
|
449
2901
|
|
|
450
2902
|
\`\`\`ts
|
|
451
2903
|
import { ZenStackClient } from '@zenstackhq/orm';
|
|
452
|
-
import { schema } from '${
|
|
2904
|
+
import { schema } from '${path5.relative(".", outputPath)}/schema';
|
|
453
2905
|
|
|
454
2906
|
const client = new ZenStackClient(schema, {
|
|
455
2907
|
dialect: { ... }
|
|
@@ -474,7 +2926,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
|
|
|
474
2926
|
throw new CliError(`Unknown core plugin: ${provider}`);
|
|
475
2927
|
}
|
|
476
2928
|
} else {
|
|
477
|
-
cliPlugin = await loadPluginModule(provider,
|
|
2929
|
+
cliPlugin = await loadPluginModule(provider, path5.dirname(schemaFile));
|
|
478
2930
|
}
|
|
479
2931
|
if (cliPlugin) {
|
|
480
2932
|
const pluginOptions = getPluginOptions(plugin3);
|
|
@@ -513,7 +2965,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
|
|
|
513
2965
|
invariant(typeof cliPlugin.generate === "function", `Plugin ${cliPlugin.name} does not have a generate function`);
|
|
514
2966
|
let spinner;
|
|
515
2967
|
if (!options.silent) {
|
|
516
|
-
spinner =
|
|
2968
|
+
spinner = ora2(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
|
|
517
2969
|
}
|
|
518
2970
|
try {
|
|
519
2971
|
await cliPlugin.generate({
|
|
@@ -543,7 +2995,7 @@ function getPluginOptions(plugin3) {
|
|
|
543
2995
|
if (field.name === "provider") {
|
|
544
2996
|
continue;
|
|
545
2997
|
}
|
|
546
|
-
const value = getLiteral(field.value) ??
|
|
2998
|
+
const value = getLiteral(field.value) ?? getLiteralArray2(field.value);
|
|
547
2999
|
if (value === void 0) {
|
|
548
3000
|
console.warn(`Plugin "${plugin3.name}" option "${field.name}" has unsupported value, skipping`);
|
|
549
3001
|
continue;
|
|
@@ -556,7 +3008,7 @@ __name(getPluginOptions, "getPluginOptions");
|
|
|
556
3008
|
async function loadPluginModule(provider, basePath) {
|
|
557
3009
|
let moduleSpec = provider;
|
|
558
3010
|
if (moduleSpec.startsWith(".")) {
|
|
559
|
-
moduleSpec =
|
|
3011
|
+
moduleSpec = path5.resolve(basePath, moduleSpec);
|
|
560
3012
|
}
|
|
561
3013
|
const importAsEsm = /* @__PURE__ */ __name(async (spec) => {
|
|
562
3014
|
try {
|
|
@@ -594,13 +3046,13 @@ async function loadPluginModule(provider, basePath) {
|
|
|
594
3046
|
}
|
|
595
3047
|
}
|
|
596
3048
|
for (const suffix of esmSuffixes) {
|
|
597
|
-
const indexPath =
|
|
3049
|
+
const indexPath = path5.join(moduleSpec, `index${suffix}`);
|
|
598
3050
|
if (fs6.existsSync(indexPath)) {
|
|
599
3051
|
return await importAsEsm(pathToFileURL(indexPath).toString());
|
|
600
3052
|
}
|
|
601
3053
|
}
|
|
602
3054
|
for (const suffix of tsSuffixes) {
|
|
603
|
-
const indexPath =
|
|
3055
|
+
const indexPath = path5.join(moduleSpec, `index${suffix}`);
|
|
604
3056
|
if (fs6.existsSync(indexPath)) {
|
|
605
3057
|
return await importAsTs(indexPath);
|
|
606
3058
|
}
|
|
@@ -614,8 +3066,8 @@ async function loadPluginModule(provider, basePath) {
|
|
|
614
3066
|
__name(loadPluginModule, "loadPluginModule");
|
|
615
3067
|
|
|
616
3068
|
// src/actions/info.ts
|
|
617
|
-
import
|
|
618
|
-
import
|
|
3069
|
+
import colors7 from "colors";
|
|
3070
|
+
import path6 from "path";
|
|
619
3071
|
async function run5(projectPath) {
|
|
620
3072
|
const packages = await getZenStackPackages(projectPath);
|
|
621
3073
|
if (!packages) {
|
|
@@ -628,18 +3080,18 @@ async function run5(projectPath) {
|
|
|
628
3080
|
if (version2) {
|
|
629
3081
|
versions.add(version2);
|
|
630
3082
|
}
|
|
631
|
-
console.log(` ${
|
|
3083
|
+
console.log(` ${colors7.green(pkg.padEnd(20))} ${version2}`);
|
|
632
3084
|
}
|
|
633
3085
|
if (versions.size > 1) {
|
|
634
|
-
console.warn(
|
|
3086
|
+
console.warn(colors7.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
|
|
635
3087
|
}
|
|
636
3088
|
}
|
|
637
3089
|
__name(run5, "run");
|
|
638
3090
|
async function getZenStackPackages(projectPath) {
|
|
639
3091
|
let pkgJson;
|
|
640
|
-
const resolvedPath =
|
|
3092
|
+
const resolvedPath = path6.resolve(projectPath);
|
|
641
3093
|
try {
|
|
642
|
-
pkgJson = (await import(
|
|
3094
|
+
pkgJson = (await import(path6.join(resolvedPath, "package.json"), {
|
|
643
3095
|
with: {
|
|
644
3096
|
type: "json"
|
|
645
3097
|
}
|
|
@@ -677,10 +3129,10 @@ async function getZenStackPackages(projectPath) {
|
|
|
677
3129
|
__name(getZenStackPackages, "getZenStackPackages");
|
|
678
3130
|
|
|
679
3131
|
// src/actions/init.ts
|
|
680
|
-
import
|
|
3132
|
+
import colors8 from "colors";
|
|
681
3133
|
import fs7 from "fs";
|
|
682
|
-
import
|
|
683
|
-
import
|
|
3134
|
+
import path7 from "path";
|
|
3135
|
+
import ora3 from "ora";
|
|
684
3136
|
import { detect, resolveCommand } from "package-manager-detector";
|
|
685
3137
|
|
|
686
3138
|
// src/actions/templates.ts
|
|
@@ -731,7 +3183,7 @@ async function run6(projectPath) {
|
|
|
731
3183
|
name: "npm"
|
|
732
3184
|
};
|
|
733
3185
|
}
|
|
734
|
-
console.log(
|
|
3186
|
+
console.log(colors8.gray(`Using package manager: ${pm.agent}`));
|
|
735
3187
|
for (const pkg of packages) {
|
|
736
3188
|
const resolved = resolveCommand(pm.agent, "add", [
|
|
737
3189
|
pkg.name,
|
|
@@ -742,7 +3194,7 @@ async function run6(projectPath) {
|
|
|
742
3194
|
if (!resolved) {
|
|
743
3195
|
throw new CliError(`Unable to determine how to install package "${pkg.name}". Please install it manually.`);
|
|
744
3196
|
}
|
|
745
|
-
const spinner =
|
|
3197
|
+
const spinner = ora3(`Installing "${pkg.name}"`).start();
|
|
746
3198
|
try {
|
|
747
3199
|
execSync(`${resolved.command} ${resolved.args.join(" ")}`, {
|
|
748
3200
|
cwd: projectPath
|
|
@@ -754,38 +3206,38 @@ async function run6(projectPath) {
|
|
|
754
3206
|
}
|
|
755
3207
|
}
|
|
756
3208
|
const generationFolder = "zenstack";
|
|
757
|
-
if (!fs7.existsSync(
|
|
758
|
-
fs7.mkdirSync(
|
|
3209
|
+
if (!fs7.existsSync(path7.join(projectPath, generationFolder))) {
|
|
3210
|
+
fs7.mkdirSync(path7.join(projectPath, generationFolder));
|
|
759
3211
|
}
|
|
760
|
-
if (!fs7.existsSync(
|
|
761
|
-
fs7.writeFileSync(
|
|
3212
|
+
if (!fs7.existsSync(path7.join(projectPath, generationFolder, "schema.zmodel"))) {
|
|
3213
|
+
fs7.writeFileSync(path7.join(projectPath, generationFolder, "schema.zmodel"), STARTER_ZMODEL);
|
|
762
3214
|
} else {
|
|
763
|
-
console.log(
|
|
3215
|
+
console.log(colors8.yellow("Schema file already exists. Skipping generation of sample."));
|
|
764
3216
|
}
|
|
765
|
-
console.log(
|
|
766
|
-
console.log(
|
|
767
|
-
console.log(
|
|
3217
|
+
console.log(colors8.green("ZenStack project initialized successfully!"));
|
|
3218
|
+
console.log(colors8.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
|
|
3219
|
+
console.log(colors8.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
|
|
768
3220
|
}
|
|
769
3221
|
__name(run6, "run");
|
|
770
3222
|
|
|
771
3223
|
// src/actions/migrate.ts
|
|
772
3224
|
import fs8 from "fs";
|
|
773
|
-
import
|
|
3225
|
+
import path8 from "path";
|
|
774
3226
|
|
|
775
3227
|
// src/actions/seed.ts
|
|
776
|
-
import
|
|
3228
|
+
import colors9 from "colors";
|
|
777
3229
|
import { execaCommand } from "execa";
|
|
778
3230
|
async function run7(options, args) {
|
|
779
3231
|
const pkgJsonConfig = getPkgJsonConfig(process.cwd());
|
|
780
3232
|
if (!pkgJsonConfig.seed) {
|
|
781
3233
|
if (!options.noWarnings) {
|
|
782
|
-
console.warn(
|
|
3234
|
+
console.warn(colors9.yellow("No seed script defined in package.json. Skipping seeding."));
|
|
783
3235
|
}
|
|
784
3236
|
return;
|
|
785
3237
|
}
|
|
786
3238
|
const command = `${pkgJsonConfig.seed}${args.length > 0 ? " " + args.join(" ") : ""}`;
|
|
787
3239
|
if (options.printStatus) {
|
|
788
|
-
console.log(
|
|
3240
|
+
console.log(colors9.gray(`Running seed script "${command}"...`));
|
|
789
3241
|
}
|
|
790
3242
|
try {
|
|
791
3243
|
await execaCommand(command, {
|
|
@@ -793,7 +3245,7 @@ async function run7(options, args) {
|
|
|
793
3245
|
stderr: "inherit"
|
|
794
3246
|
});
|
|
795
3247
|
} catch (err) {
|
|
796
|
-
console.error(
|
|
3248
|
+
console.error(colors9.red(err instanceof Error ? err.message : String(err)));
|
|
797
3249
|
throw new CliError("Failed to seed the database. Please check the error message above for details.");
|
|
798
3250
|
}
|
|
799
3251
|
}
|
|
@@ -803,7 +3255,7 @@ __name(run7, "run");
|
|
|
803
3255
|
async function run8(command, options) {
|
|
804
3256
|
const schemaFile = getSchemaFile(options.schema);
|
|
805
3257
|
await requireDataSourceUrl(schemaFile);
|
|
806
|
-
const prismaSchemaDir = options.migrations ?
|
|
3258
|
+
const prismaSchemaDir = options.migrations ? path8.dirname(options.migrations) : void 0;
|
|
807
3259
|
const prismaSchemaFile = await generateTempPrismaSchema(schemaFile, prismaSchemaDir);
|
|
808
3260
|
try {
|
|
809
3261
|
switch (command) {
|
|
@@ -914,8 +3366,8 @@ function handleSubProcessError2(err) {
|
|
|
914
3366
|
__name(handleSubProcessError2, "handleSubProcessError");
|
|
915
3367
|
|
|
916
3368
|
// src/actions/proxy.ts
|
|
917
|
-
import { isDataSource as isDataSource2, isInvocationExpr, isLiteralExpr } from "@zenstackhq/language/ast";
|
|
918
|
-
import { getStringLiteral } from "@zenstackhq/language/utils";
|
|
3369
|
+
import { isDataSource as isDataSource2, isInvocationExpr as isInvocationExpr2, isLiteralExpr } from "@zenstackhq/language/ast";
|
|
3370
|
+
import { getStringLiteral as getStringLiteral2 } from "@zenstackhq/language/utils";
|
|
919
3371
|
import { ZenStackClient } from "@zenstackhq/orm";
|
|
920
3372
|
import { MysqlDialect } from "@zenstackhq/orm/dialects/mysql";
|
|
921
3373
|
import { PostgresDialect } from "@zenstackhq/orm/dialects/postgres";
|
|
@@ -923,26 +3375,26 @@ import { SqliteDialect } from "@zenstackhq/orm/dialects/sqlite";
|
|
|
923
3375
|
import { RPCApiHandler } from "@zenstackhq/server/api";
|
|
924
3376
|
import { ZenStackMiddleware } from "@zenstackhq/server/express";
|
|
925
3377
|
import SQLite from "better-sqlite3";
|
|
926
|
-
import
|
|
3378
|
+
import colors11 from "colors";
|
|
927
3379
|
import cors from "cors";
|
|
928
3380
|
import express from "express";
|
|
929
3381
|
import { createJiti as createJiti2 } from "jiti";
|
|
930
3382
|
import { createPool as createMysqlPool } from "mysql2";
|
|
931
|
-
import
|
|
3383
|
+
import path10 from "path";
|
|
932
3384
|
import { Pool as PgPool } from "pg";
|
|
933
3385
|
|
|
934
3386
|
// src/utils/version-utils.ts
|
|
935
|
-
import
|
|
3387
|
+
import colors10 from "colors";
|
|
936
3388
|
import fs9 from "fs";
|
|
937
|
-
import
|
|
3389
|
+
import path9 from "path";
|
|
938
3390
|
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
939
3391
|
import semver from "semver";
|
|
940
3392
|
var CHECK_VERSION_TIMEOUT = 2e3;
|
|
941
3393
|
var VERSION_CHECK_TAG = "next";
|
|
942
3394
|
function getVersion() {
|
|
943
3395
|
try {
|
|
944
|
-
const _dirname = typeof __dirname !== "undefined" ? __dirname :
|
|
945
|
-
return JSON.parse(fs9.readFileSync(
|
|
3396
|
+
const _dirname = typeof __dirname !== "undefined" ? __dirname : path9.dirname(fileURLToPath2(import.meta.url));
|
|
3397
|
+
return JSON.parse(fs9.readFileSync(path9.join(_dirname, "../package.json"), "utf8")).version;
|
|
946
3398
|
} catch {
|
|
947
3399
|
return void 0;
|
|
948
3400
|
}
|
|
@@ -957,7 +3409,7 @@ async function checkNewVersion() {
|
|
|
957
3409
|
return;
|
|
958
3410
|
}
|
|
959
3411
|
if (latestVersion && currVersion && semver.gt(latestVersion, currVersion)) {
|
|
960
|
-
console.log(`A newer version ${
|
|
3412
|
+
console.log(`A newer version ${colors10.cyan(latestVersion)} is available.`);
|
|
961
3413
|
}
|
|
962
3414
|
}
|
|
963
3415
|
__name(checkNewVersion, "checkNewVersion");
|
|
@@ -987,10 +3439,10 @@ async function run9(options) {
|
|
|
987
3439
|
];
|
|
988
3440
|
const log = options.logLevel?.filter((level) => allowedLogLevels.includes(level));
|
|
989
3441
|
const schemaFile = getSchemaFile(options.schema);
|
|
990
|
-
console.log(
|
|
3442
|
+
console.log(colors11.gray(`Loading ZModel schema from: ${schemaFile}`));
|
|
991
3443
|
let outputPath = getOutputPath(options, schemaFile);
|
|
992
|
-
if (!
|
|
993
|
-
outputPath =
|
|
3444
|
+
if (!path10.isAbsolute(outputPath)) {
|
|
3445
|
+
outputPath = path10.resolve(process.cwd(), outputPath);
|
|
994
3446
|
}
|
|
995
3447
|
const model = await loadSchemaDocument(schemaFile);
|
|
996
3448
|
const dataSource = model.declarations.find(isDataSource2);
|
|
@@ -1002,10 +3454,10 @@ async function run9(options) {
|
|
|
1002
3454
|
}
|
|
1003
3455
|
databaseUrl = evaluateUrl(schemaUrl);
|
|
1004
3456
|
}
|
|
1005
|
-
const provider =
|
|
3457
|
+
const provider = getStringLiteral2(dataSource?.fields.find((f) => f.name === "provider")?.value);
|
|
1006
3458
|
const dialect = createDialect(provider, databaseUrl, outputPath);
|
|
1007
3459
|
const jiti = createJiti2(import.meta.url);
|
|
1008
|
-
const schemaModule = await jiti.import(
|
|
3460
|
+
const schemaModule = await jiti.import(path10.join(outputPath, "schema"));
|
|
1009
3461
|
const schema = schemaModule.schema;
|
|
1010
3462
|
const omit = {};
|
|
1011
3463
|
for (const [modelName, modelDef] of Object.entries(schema.models)) {
|
|
@@ -1034,10 +3486,10 @@ async function run9(options) {
|
|
|
1034
3486
|
__name(run9, "run");
|
|
1035
3487
|
function evaluateUrl(schemaUrl) {
|
|
1036
3488
|
if (isLiteralExpr(schemaUrl)) {
|
|
1037
|
-
return
|
|
1038
|
-
} else if (
|
|
3489
|
+
return getStringLiteral2(schemaUrl);
|
|
3490
|
+
} else if (isInvocationExpr2(schemaUrl)) {
|
|
1039
3491
|
const envFunction = schemaUrl;
|
|
1040
|
-
const envName =
|
|
3492
|
+
const envName = getStringLiteral2(envFunction.args[0]?.value);
|
|
1041
3493
|
const envValue = process.env[envName];
|
|
1042
3494
|
if (!envValue) {
|
|
1043
3495
|
throw new CliError(`Environment variable ${envName} is not set`);
|
|
@@ -1069,24 +3521,24 @@ function createDialect(provider, databaseUrl, outputPath) {
|
|
|
1069
3521
|
let resolvedUrl = databaseUrl.trim();
|
|
1070
3522
|
if (resolvedUrl.startsWith("file:")) {
|
|
1071
3523
|
const filePath = resolvedUrl.substring("file:".length);
|
|
1072
|
-
if (!
|
|
1073
|
-
resolvedUrl =
|
|
3524
|
+
if (!path10.isAbsolute(filePath)) {
|
|
3525
|
+
resolvedUrl = path10.join(outputPath, filePath);
|
|
1074
3526
|
}
|
|
1075
3527
|
}
|
|
1076
|
-
console.log(
|
|
3528
|
+
console.log(colors11.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
|
|
1077
3529
|
return new SqliteDialect({
|
|
1078
3530
|
database: new SQLite(resolvedUrl)
|
|
1079
3531
|
});
|
|
1080
3532
|
}
|
|
1081
3533
|
case "postgresql":
|
|
1082
|
-
console.log(
|
|
3534
|
+
console.log(colors11.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
|
|
1083
3535
|
return new PostgresDialect({
|
|
1084
3536
|
pool: new PgPool({
|
|
1085
3537
|
connectionString: databaseUrl
|
|
1086
3538
|
})
|
|
1087
3539
|
});
|
|
1088
3540
|
case "mysql":
|
|
1089
|
-
console.log(
|
|
3541
|
+
console.log(colors11.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
|
|
1090
3542
|
return new MysqlDialect({
|
|
1091
3543
|
pool: createMysqlPool(databaseUrl)
|
|
1092
3544
|
});
|
|
@@ -1119,11 +3571,11 @@ function startServer(client, schema, options) {
|
|
|
1119
3571
|
});
|
|
1120
3572
|
const server = app.listen(options.port, () => {
|
|
1121
3573
|
console.log(`ZenStack proxy server is running on port: ${options.port}`);
|
|
1122
|
-
console.log(`You can visit ZenStack Studio at: ${
|
|
3574
|
+
console.log(`You can visit ZenStack Studio at: ${colors11.blue("https://studio.zenstack.dev")}`);
|
|
1123
3575
|
});
|
|
1124
3576
|
server.on("error", (err) => {
|
|
1125
3577
|
if (err.code === "EADDRINUSE") {
|
|
1126
|
-
console.error(
|
|
3578
|
+
console.error(colors11.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
|
|
1127
3579
|
} else {
|
|
1128
3580
|
throw new CliError(`Failed to start the server: ${err.message}`);
|
|
1129
3581
|
}
|
|
@@ -1153,7 +3605,7 @@ import fs13 from "fs";
|
|
|
1153
3605
|
import * as os2 from "os";
|
|
1154
3606
|
|
|
1155
3607
|
// src/constants.ts
|
|
1156
|
-
var TELEMETRY_TRACKING_TOKEN = "
|
|
3608
|
+
var TELEMETRY_TRACKING_TOKEN = "<TELEMETRY_TRACKING_TOKEN>";
|
|
1157
3609
|
|
|
1158
3610
|
// src/utils/is-ci.ts
|
|
1159
3611
|
import { env } from "process";
|
|
@@ -1416,7 +3868,7 @@ var proxyAction = /* @__PURE__ */ __name(async (options) => {
|
|
|
1416
3868
|
function createProgram() {
|
|
1417
3869
|
const program = new Command("zen").alias("zenstack").helpOption("-h, --help", "Show this help message").version(getVersion(), "-v --version", "Show CLI version");
|
|
1418
3870
|
const schemaExtensions = ZModelLanguageMetaData2.fileExtensions.join(", ");
|
|
1419
|
-
program.description(`${
|
|
3871
|
+
program.description(`${colors12.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
|
|
1420
3872
|
|
|
1421
3873
|
Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAfterError();
|
|
1422
3874
|
const schemaOption = new Option("--schema <file>", `schema file (with extension ${schemaExtensions}). Defaults to "zenstack/schema.zmodel" unless specified in package.json.`);
|
|
@@ -1431,6 +3883,7 @@ Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAf
|
|
|
1431
3883
|
migrateCommand.command("resolve").addOption(schemaOption).addOption(noVersionCheckOption).addOption(migrationsOption).addOption(new Option("--applied <migration>", "record a specific migration as applied")).addOption(new Option("--rolled-back <migration>", "record a specific migration as rolled back")).description("Resolve issues with database migrations in deployment databases").action((options) => migrateAction("resolve", options));
|
|
1432
3884
|
const dbCommand = program.command("db").description("Manage your database schema during development");
|
|
1433
3885
|
dbCommand.command("push").description("Push the state from your schema to your database").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new Option("--accept-data-loss", "ignore data loss warnings")).addOption(new Option("--force-reset", "force a reset of the database before push")).action((options) => dbAction("push", options));
|
|
3886
|
+
dbCommand.command("pull").description("Introspect your database.").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new Option("-o, --output <path>", "set custom output path for the introspected schema. If a file path is provided, all schemas are merged into that single file. If a directory path is provided, files are written to the directory and imports are kept.")).addOption(new Option("--model-casing <pascal|camel|snake|none>", "set the casing of generated models").default("pascal")).addOption(new Option("--field-casing <pascal|camel|snake|none>", "set the casing of generated fields").default("camel")).addOption(new Option("--always-map", "always add @map and @@map attributes to models and fields").default(false)).addOption(new Option("--quote <double|single>", "set the quote style of generated schema files").default("single")).addOption(new Option("--indent <number>", "set the indentation of the generated schema files").default(4).argParser(parseInt)).action((options) => dbAction("pull", options));
|
|
1434
3887
|
dbCommand.command("seed").description("Seed the database").allowExcessArguments(true).addHelpText("after", `
|
|
1435
3888
|
Seed script is configured under the "zenstack.seed" field in package.json.
|
|
1436
3889
|
E.g.:
|
|
@@ -1467,10 +3920,10 @@ async function main() {
|
|
|
1467
3920
|
if (e instanceof CommanderError) {
|
|
1468
3921
|
exitCode = e.exitCode;
|
|
1469
3922
|
} else if (e instanceof CliError) {
|
|
1470
|
-
console.error(
|
|
3923
|
+
console.error(colors12.red(e.message));
|
|
1471
3924
|
exitCode = 1;
|
|
1472
3925
|
} else {
|
|
1473
|
-
console.error(
|
|
3926
|
+
console.error(colors12.red(`Unhandled error: ${e}`));
|
|
1474
3927
|
exitCode = 1;
|
|
1475
3928
|
}
|
|
1476
3929
|
}
|