@zenstackhq/cli 3.3.2 → 3.4.0-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -29,8 +29,8 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
29
29
 
30
30
  // src/index.ts
31
31
  var import_config = require("dotenv/config");
32
- var import_language4 = require("@zenstackhq/language");
33
- var import_colors10 = __toESM(require("colors"), 1);
32
+ var import_language5 = require("@zenstackhq/language");
33
+ var import_colors12 = __toESM(require("colors"), 1);
34
34
  var import_commander = require("commander");
35
35
 
36
36
  // src/actions/check.ts
@@ -83,8 +83,10 @@ function getSchemaFile(file) {
83
83
  }
84
84
  }
85
85
  __name(getSchemaFile, "getSchemaFile");
86
- async function loadSchemaDocument(schemaFile) {
87
- const loadResult = await (0, import_language.loadDocument)(schemaFile);
86
+ async function loadSchemaDocument(schemaFile, opts = {}) {
87
+ const returnServices = opts.returnServices ?? false;
88
+ const mergeImports = opts.mergeImports ?? true;
89
+ const loadResult = await (0, import_language.loadDocument)(schemaFile, [], mergeImports);
88
90
  if (!loadResult.success) {
89
91
  loadResult.errors.forEach((err) => {
90
92
  console.error(import_colors.default.red(err));
@@ -94,6 +96,10 @@ async function loadSchemaDocument(schemaFile) {
94
96
  loadResult.warnings.forEach((warn) => {
95
97
  console.warn(import_colors.default.yellow(warn));
96
98
  });
99
+ if (returnServices) return {
100
+ model: loadResult.model,
101
+ services: loadResult.services
102
+ };
97
103
  return loadResult.model;
98
104
  }
99
105
  __name(loadSchemaDocument, "loadSchemaDocument");
@@ -198,7 +204,12 @@ async function run(options) {
198
204
  __name(run, "run");
199
205
 
200
206
  // src/actions/db.ts
207
+ var import_language2 = require("@zenstackhq/language");
208
+ var import_ast4 = require("@zenstackhq/language/ast");
209
+ var import_colors4 = __toESM(require("colors"), 1);
201
210
  var import_node_fs2 = __toESM(require("fs"), 1);
211
+ var import_node_path2 = __toESM(require("path"), 1);
212
+ var import_ora = __toESM(require("ora"), 1);
202
213
 
203
214
  // src/utils/exec-utils.ts
204
215
  var import_child_process = require("child_process");
@@ -248,12 +259,2048 @@ function execPrisma(args, options) {
248
259
  }
249
260
  __name(execPrisma, "execPrisma");
250
261
 
262
+ // src/actions/pull/index.ts
263
+ var import_colors3 = __toESM(require("colors"), 1);
264
+ var import_ast3 = require("@zenstackhq/language/ast");
265
+ var import_factory = require("@zenstackhq/language/factory");
266
+ var import_langium = require("langium");
267
+ var import_common_helpers = require("@zenstackhq/common-helpers");
268
+
269
+ // src/actions/pull/utils.ts
270
+ var import_ast2 = require("@zenstackhq/language/ast");
271
+ var import_utils = require("@zenstackhq/language/utils");
272
+ function getDatasource(model) {
273
+ const datasource = model.declarations.find((d) => d.$type === "DataSource");
274
+ if (!datasource) {
275
+ throw new CliError("No datasource declaration found in the schema.");
276
+ }
277
+ const urlField = datasource.fields.find((f) => f.name === "url");
278
+ if (!urlField) throw new CliError(`No url field found in the datasource declaration.`);
279
+ let url = (0, import_utils.getStringLiteral)(urlField.value);
280
+ if (!url && (0, import_ast2.isInvocationExpr)(urlField.value)) {
281
+ const envName = (0, import_utils.getStringLiteral)(urlField.value.args[0]?.value);
282
+ if (!envName) {
283
+ throw new CliError("The url field must be a string literal or an env().");
284
+ }
285
+ if (!process.env[envName]) {
286
+ throw new CliError(`Environment variable ${envName} is not set, please set it to the database connection string.`);
287
+ }
288
+ url = process.env[envName];
289
+ }
290
+ if (!url) {
291
+ throw new CliError("The url field must be a string literal or an env().");
292
+ }
293
+ if (url.startsWith("file:")) {
294
+ url = new URL(url, `file:${model.$document.uri.path}`).pathname;
295
+ if (process.platform === "win32" && url[0] === "/") url = url.slice(1);
296
+ }
297
+ const defaultSchemaField = datasource.fields.find((f) => f.name === "defaultSchema");
298
+ const defaultSchema = defaultSchemaField && (0, import_utils.getStringLiteral)(defaultSchemaField.value) || "public";
299
+ const schemasField = datasource.fields.find((f) => f.name === "schemas");
300
+ const schemas = schemasField && (0, import_utils.getLiteralArray)(schemasField.value)?.filter((s) => s !== void 0) || [];
301
+ const provider = (0, import_utils.getStringLiteral)(datasource.fields.find((f) => f.name === "provider")?.value);
302
+ if (!provider) {
303
+ throw new CliError(`Datasource "${datasource.name}" is missing a "provider" field.`);
304
+ }
305
+ return {
306
+ name: datasource.name,
307
+ provider,
308
+ url,
309
+ defaultSchema,
310
+ schemas,
311
+ allSchemas: [
312
+ defaultSchema,
313
+ ...schemas
314
+ ]
315
+ };
316
+ }
317
+ __name(getDatasource, "getDatasource");
318
+ function getDbName(decl, includeSchema = false) {
319
+ if (!("attributes" in decl)) return decl.name;
320
+ const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@schema");
321
+ let schema = "public";
322
+ if (schemaAttr) {
323
+ const schemaAttrValue = schemaAttr.args[0]?.value;
324
+ if (schemaAttrValue?.$type === "StringLiteral") {
325
+ schema = schemaAttrValue.value;
326
+ }
327
+ }
328
+ const formatName = /* @__PURE__ */ __name((name) => `${schema && includeSchema ? `${schema}.` : ""}${name}`, "formatName");
329
+ const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@map" || a.decl.ref?.name === "@map");
330
+ if (!nameAttr) return formatName(decl.name);
331
+ const attrValue = nameAttr.args[0]?.value;
332
+ if (attrValue?.$type !== "StringLiteral") return formatName(decl.name);
333
+ return formatName(attrValue.value);
334
+ }
335
+ __name(getDbName, "getDbName");
336
+ function getRelationFkName(decl) {
337
+ const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
338
+ const schemaAttrValue = relationAttr?.args.find((a) => a.name === "map")?.value;
339
+ return schemaAttrValue?.value;
340
+ }
341
+ __name(getRelationFkName, "getRelationFkName");
342
+ function getRelationFieldsKey(decl) {
343
+ const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
344
+ if (!relationAttr) return void 0;
345
+ const fieldsArg = relationAttr.args.find((a) => a.name === "fields")?.value;
346
+ if (!fieldsArg || fieldsArg.$type !== "ArrayExpr") return void 0;
347
+ const fieldNames = fieldsArg.items.filter((item) => item.$type === "ReferenceExpr").map((item) => item.target?.$refText || item.target?.ref?.name).filter((name) => !!name).sort();
348
+ return fieldNames.length > 0 ? fieldNames.join(",") : void 0;
349
+ }
350
+ __name(getRelationFieldsKey, "getRelationFieldsKey");
351
+ function getDeclarationRef(type2, name, services) {
352
+ const node = services.shared.workspace.IndexManager.allElements(type2).find((m) => m.node && getDbName(m.node) === name)?.node;
353
+ if (!node) throw new CliError(`Declaration not found: ${name}`);
354
+ return node;
355
+ }
356
+ __name(getDeclarationRef, "getDeclarationRef");
357
+ function getEnumRef(name, services) {
358
+ return getDeclarationRef("Enum", name, services);
359
+ }
360
+ __name(getEnumRef, "getEnumRef");
361
+ function getAttributeRef(name, services) {
362
+ return getDeclarationRef("Attribute", name, services);
363
+ }
364
+ __name(getAttributeRef, "getAttributeRef");
365
+ function getFunctionRef(name, services) {
366
+ return getDeclarationRef("FunctionDecl", name, services);
367
+ }
368
+ __name(getFunctionRef, "getFunctionRef");
369
+ function normalizeFloatDefault(val) {
370
+ if (/^-?\d+$/.test(val)) {
371
+ return (ab) => ab.NumberLiteral.setValue(val + ".0");
372
+ }
373
+ if (/^-?\d+\.\d+$/.test(val)) {
374
+ return (ab) => ab.NumberLiteral.setValue(val);
375
+ }
376
+ return (ab) => ab.NumberLiteral.setValue(val);
377
+ }
378
+ __name(normalizeFloatDefault, "normalizeFloatDefault");
379
+ function normalizeDecimalDefault(val) {
380
+ if (/^-?\d+$/.test(val)) {
381
+ return (ab) => ab.NumberLiteral.setValue(val + ".00");
382
+ }
383
+ if (/^-?\d+\.\d+$/.test(val)) {
384
+ const [integerPart, fractionalPart] = val.split(".");
385
+ let normalized = fractionalPart.replace(/0+$/, "");
386
+ if (normalized.length < 2) {
387
+ normalized = normalized.padEnd(2, "0");
388
+ }
389
+ return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`);
390
+ }
391
+ return (ab) => ab.NumberLiteral.setValue(val);
392
+ }
393
+ __name(normalizeDecimalDefault, "normalizeDecimalDefault");
394
+
395
+ // src/actions/pull/casing.ts
396
+ function resolveNameCasing(casing, originalName) {
397
+ let name = originalName;
398
+ const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? "_" : "";
399
+ switch (casing) {
400
+ case "pascal":
401
+ name = toPascalCase(originalName);
402
+ break;
403
+ case "camel":
404
+ name = toCamelCase(originalName);
405
+ break;
406
+ case "snake":
407
+ name = toSnakeCase(originalName);
408
+ break;
409
+ }
410
+ return {
411
+ modified: name !== originalName || fieldPrefix !== "",
412
+ name: `${fieldPrefix}${name}`
413
+ };
414
+ }
415
+ __name(resolveNameCasing, "resolveNameCasing");
416
+ function isAllUpperCase(str) {
417
+ return str === str.toUpperCase();
418
+ }
419
+ __name(isAllUpperCase, "isAllUpperCase");
420
+ function toPascalCase(str) {
421
+ if (isAllUpperCase(str)) return str;
422
+ return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase());
423
+ }
424
+ __name(toPascalCase, "toPascalCase");
425
+ function toCamelCase(str) {
426
+ if (isAllUpperCase(str)) return str;
427
+ return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase());
428
+ }
429
+ __name(toCamelCase, "toCamelCase");
430
+ function toSnakeCase(str) {
431
+ if (isAllUpperCase(str)) return str;
432
+ return str.replace(/[- ]+/g, "_").replace(/([a-z0-9])([A-Z])/g, "$1_$2").toLowerCase();
433
+ }
434
+ __name(toSnakeCase, "toSnakeCase");
435
+
436
+ // src/actions/pull/index.ts
437
+ function syncEnums({ dbEnums, model, oldModel, provider, options, services, defaultSchema }) {
438
+ if (provider.isSupportedFeature("NativeEnum")) {
439
+ for (const dbEnum of dbEnums) {
440
+ const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type);
441
+ if (modified) console.log(import_colors3.default.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`));
442
+ const factory = new import_factory.EnumFactory().setName(name);
443
+ if (modified || options.alwaysMap) factory.addAttribute((builder) => builder.setDecl(getAttributeRef("@@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)));
444
+ dbEnum.values.forEach((v) => {
445
+ const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, v);
446
+ factory.addField((builder) => {
447
+ builder.setName(name2);
448
+ if (modified2 || options.alwaysMap) builder.addAttribute((builder2) => builder2.setDecl(getAttributeRef("@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)));
449
+ return builder;
450
+ });
451
+ });
452
+ if (dbEnum.schema_name && dbEnum.schema_name !== "" && dbEnum.schema_name !== defaultSchema) {
453
+ factory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)));
454
+ }
455
+ model.declarations.push(factory.get({
456
+ $container: model
457
+ }));
458
+ }
459
+ } else {
460
+ const dummyBuildReference = /* @__PURE__ */ __name((_node, _property, _refNode, refText) => ({
461
+ $refText: refText
462
+ }), "dummyBuildReference");
463
+ oldModel.declarations.filter((d) => (0, import_ast3.isEnum)(d)).forEach((d) => {
464
+ const copy = import_langium.AstUtils.copyAstNode(d, dummyBuildReference);
465
+ copy.$container = model;
466
+ model.declarations.push(copy);
467
+ });
468
+ }
469
+ }
470
+ __name(syncEnums, "syncEnums");
471
+ function syncTable({ model, provider, table, services, options, defaultSchema }) {
472
+ const idAttribute = getAttributeRef("@id", services);
473
+ const modelIdAttribute = getAttributeRef("@@id", services);
474
+ const uniqueAttribute = getAttributeRef("@unique", services);
475
+ const modelUniqueAttribute = getAttributeRef("@@unique", services);
476
+ const fieldMapAttribute = getAttributeRef("@map", services);
477
+ const tableMapAttribute = getAttributeRef("@@map", services);
478
+ const modelindexAttribute = getAttributeRef("@@index", services);
479
+ const relations = [];
480
+ const { name, modified } = resolveNameCasing(options.modelCasing, table.name);
481
+ const multiPk = table.columns.filter((c) => c.pk).length > 1;
482
+ const modelFactory = new import_factory.DataModelFactory().setName(name).setIsView(table.type === "view");
483
+ modelFactory.setContainer(model);
484
+ if (modified || options.alwaysMap) {
485
+ modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)));
486
+ }
487
+ const fkGroups = /* @__PURE__ */ new Map();
488
+ table.columns.forEach((column) => {
489
+ if (column.foreign_key_table && column.foreign_key_name) {
490
+ const group = fkGroups.get(column.foreign_key_name) ?? [];
491
+ group.push(column);
492
+ fkGroups.set(column.foreign_key_name, group);
493
+ }
494
+ });
495
+ for (const [fkName, fkColumns] of fkGroups) {
496
+ const firstCol = fkColumns[0];
497
+ const isSingleColumnPk = fkColumns.length === 1 && !multiPk && firstCol.pk;
498
+ const isUniqueRelation = fkColumns.length === 1 && firstCol.unique || isSingleColumnPk;
499
+ relations.push({
500
+ schema: table.schema,
501
+ table: table.name,
502
+ columns: fkColumns.map((c) => c.name),
503
+ type: "one",
504
+ fk_name: fkName,
505
+ foreign_key_on_delete: firstCol.foreign_key_on_delete,
506
+ foreign_key_on_update: firstCol.foreign_key_on_update,
507
+ nullable: firstCol.nullable,
508
+ references: {
509
+ schema: firstCol.foreign_key_schema,
510
+ table: firstCol.foreign_key_table,
511
+ columns: fkColumns.map((c) => c.foreign_key_column),
512
+ type: isUniqueRelation ? "one" : "many"
513
+ }
514
+ });
515
+ }
516
+ table.columns.forEach((column) => {
517
+ const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, column.name);
518
+ const builtinType = provider.getBuiltinType(column.datatype);
519
+ modelFactory.addField((builder) => {
520
+ builder.setName(name2);
521
+ builder.setType((typeBuilder) => {
522
+ typeBuilder.setArray(builtinType.isArray);
523
+ typeBuilder.setOptional(builtinType.isArray ? false : column.nullable);
524
+ if (column.computed) {
525
+ typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
526
+ } else if (column.datatype === "enum") {
527
+ const ref = model.declarations.find((d) => (0, import_ast3.isEnum)(d) && getDbName(d) === column.datatype_name);
528
+ if (!ref) {
529
+ throw new CliError(`Enum ${column.datatype_name} not found`);
530
+ }
531
+ typeBuilder.setReference(ref);
532
+ } else {
533
+ if (builtinType.type !== "Unsupported") {
534
+ typeBuilder.setType(builtinType.type);
535
+ } else {
536
+ typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
537
+ }
538
+ }
539
+ return typeBuilder;
540
+ });
541
+ if (column.pk && !multiPk) {
542
+ builder.addAttribute((b) => b.setDecl(idAttribute));
543
+ }
544
+ const fieldAttrs = provider.getFieldAttributes({
545
+ fieldName: column.name,
546
+ fieldType: builtinType.type,
547
+ datatype: column.datatype,
548
+ length: column.length,
549
+ precision: column.precision,
550
+ services
551
+ });
552
+ fieldAttrs.forEach(builder.addAttribute.bind(builder));
553
+ if (column.default && !column.computed) {
554
+ const defaultExprBuilder = provider.getDefaultValue({
555
+ fieldType: builtinType.type,
556
+ datatype: column.datatype,
557
+ datatype_name: column.datatype_name,
558
+ defaultValue: column.default,
559
+ services,
560
+ enums: model.declarations.filter((d) => d.$type === "Enum")
561
+ });
562
+ if (defaultExprBuilder) {
563
+ const defaultAttr = new import_factory.DataFieldAttributeFactory().setDecl(getAttributeRef("@default", services)).addArg(defaultExprBuilder);
564
+ builder.addAttribute(defaultAttr);
565
+ }
566
+ }
567
+ if (column.unique && !column.pk) {
568
+ builder.addAttribute((b) => {
569
+ b.setDecl(uniqueAttribute);
570
+ const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name;
571
+ if (!isDefaultName) {
572
+ b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name), "map");
573
+ }
574
+ return b;
575
+ });
576
+ }
577
+ if (modified2 || options.alwaysMap) {
578
+ builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab2) => ab2.StringLiteral.setValue(column.name)));
579
+ }
580
+ return builder;
581
+ });
582
+ });
583
+ const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name);
584
+ if (multiPk) {
585
+ modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => {
586
+ const arrayExpr = argBuilder.ArrayExpr;
587
+ pkColumns.forEach((c) => {
588
+ const ref = modelFactory.node.fields.find((f) => getDbName(f) === c);
589
+ if (!ref) {
590
+ throw new CliError(`Field ${c} not found`);
591
+ }
592
+ arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref));
593
+ });
594
+ return arrayExpr;
595
+ }));
596
+ }
597
+ const hasUniqueConstraint = table.columns.some((c) => c.unique || c.pk) || table.indexes.some((i) => i.unique);
598
+ if (!hasUniqueConstraint) {
599
+ modelFactory.addAttribute((a) => a.setDecl(getAttributeRef("@@ignore", services)));
600
+ modelFactory.addComment("/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.");
601
+ }
602
+ const sortedIndexes = table.indexes.reverse().sort((a, b) => {
603
+ if (a.unique && !b.unique) return -1;
604
+ if (!a.unique && b.unique) return 1;
605
+ return 0;
606
+ });
607
+ sortedIndexes.forEach((index) => {
608
+ if (index.predicate) {
609
+ console.warn(import_colors3.default.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
610
+ - Model: "${table.name}", constraint: "${index.name}"`));
611
+ return;
612
+ }
613
+ if (index.columns.find((c) => c.expression)) {
614
+ console.warn(import_colors3.default.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
615
+ - Model: "${table.name}", constraint: "${index.name}"`));
616
+ return;
617
+ }
618
+ if (index.primary) {
619
+ return;
620
+ }
621
+ if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) {
622
+ return;
623
+ }
624
+ modelFactory.addAttribute((builder) => {
625
+ const attr = builder.setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute).addArg((argBuilder) => {
626
+ const arrayExpr = argBuilder.ArrayExpr;
627
+ index.columns.forEach((c) => {
628
+ const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name);
629
+ if (!ref) {
630
+ throw new CliError(`Column ${c.name} not found in model ${table.name}`);
631
+ }
632
+ arrayExpr.addItem((itemBuilder) => {
633
+ const refExpr = itemBuilder.ReferenceExpr.setTarget(ref);
634
+ if (c.order && c.order !== "ASC") refExpr.addArg((ab) => ab.StringLiteral.setValue("DESC"), "sort");
635
+ return refExpr;
636
+ });
637
+ });
638
+ return arrayExpr;
639
+ });
640
+ const suffix = index.unique ? "_key" : "_idx";
641
+ if (index.name !== `${table.name}_${index.columns.map((c) => c.name).join("_")}${suffix}`) {
642
+ attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), "map");
643
+ }
644
+ return attr;
645
+ });
646
+ });
647
+ if (table.schema && table.schema !== "" && table.schema !== defaultSchema) {
648
+ modelFactory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(table.schema)));
649
+ }
650
+ model.declarations.push(modelFactory.node);
651
+ return relations;
652
+ }
653
+ __name(syncTable, "syncTable");
654
+ function syncRelation({ model, relation, services, options, selfRelation, similarRelations }) {
655
+ const idAttribute = getAttributeRef("@id", services);
656
+ const uniqueAttribute = getAttributeRef("@unique", services);
657
+ const relationAttribute = getAttributeRef("@relation", services);
658
+ const fieldMapAttribute = getAttributeRef("@map", services);
659
+ const tableMapAttribute = getAttributeRef("@@map", services);
660
+ const includeRelationName = selfRelation || similarRelations > 0;
661
+ if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) {
662
+ throw new CliError("Cannot find required attributes in the model.");
663
+ }
664
+ const sourceModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.table);
665
+ if (!sourceModel) return;
666
+ const sourceFields = [];
667
+ for (const colName of relation.columns) {
668
+ const idx = sourceModel.fields.findIndex((f) => getDbName(f) === colName);
669
+ const field = sourceModel.fields[idx];
670
+ if (!field) return;
671
+ sourceFields.push({
672
+ field,
673
+ index: idx
674
+ });
675
+ }
676
+ const targetModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.references.table);
677
+ if (!targetModel) return;
678
+ const targetFields = [];
679
+ for (const colName of relation.references.columns) {
680
+ const field = targetModel.fields.find((f) => getDbName(f) === colName);
681
+ if (!field) return;
682
+ targetFields.push(field);
683
+ }
684
+ const firstSourceField = sourceFields[0].field;
685
+ const firstSourceFieldId = sourceFields[0].index;
686
+ const firstColumn = relation.columns[0];
687
+ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? "_" : "";
688
+ const relationName = `${relation.table}${similarRelations > 0 ? `_${firstColumn}` : ""}To${relation.references.table}`;
689
+ const sourceNameFromReference = firstSourceField.name.toLowerCase().endsWith("id") ? `${resolveNameCasing(options.fieldCasing, firstSourceField.name.slice(0, -2)).name}${relation.type === "many" ? "s" : ""}` : void 0;
690
+ const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference);
691
+ let { name: sourceFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${fieldPrefix}${(0, import_common_helpers.lowerCaseFirst)(sourceModel.name)}_${firstColumn}` : `${(!sourceFieldFromReference ? sourceNameFromReference : void 0) || (0, import_common_helpers.lowerCaseFirst)(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === "many" ? "s" : ""}`);
692
+ if (sourceModel.fields.find((f) => f.name === sourceFieldName)) {
693
+ sourceFieldName = `${sourceFieldName}To${(0, import_common_helpers.lowerCaseFirst)(targetModel.name)}_${relation.references.columns[0]}`;
694
+ }
695
+ const sourceFieldFactory = new import_factory.DataFieldFactory().setContainer(sourceModel).setName(sourceFieldName).setType((tb) => tb.setOptional(relation.nullable).setArray(relation.type === "many").setReference(targetModel));
696
+ sourceFieldFactory.addAttribute((ab) => {
697
+ ab.setDecl(relationAttribute);
698
+ if (includeRelationName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relationName));
699
+ ab.addArg((ab2) => {
700
+ const arrayExpr = ab2.ArrayExpr;
701
+ for (const { field } of sourceFields) {
702
+ arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
703
+ }
704
+ return arrayExpr;
705
+ }, "fields");
706
+ ab.addArg((ab2) => {
707
+ const arrayExpr = ab2.ArrayExpr;
708
+ for (const field of targetFields) {
709
+ arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
710
+ }
711
+ return arrayExpr;
712
+ }, "references");
713
+ const onDeleteDefault = relation.nullable ? "SET NULL" : "RESTRICT";
714
+ if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) {
715
+ const enumRef = getEnumRef("ReferentialAction", services);
716
+ if (!enumRef) throw new CliError("ReferentialAction enum not found");
717
+ const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_delete.replace(/ /g, "").toLowerCase());
718
+ if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_delete} not found`);
719
+ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onDelete");
720
+ }
721
+ if (relation.foreign_key_on_update && relation.foreign_key_on_update !== "CASCADE") {
722
+ const enumRef = getEnumRef("ReferentialAction", services);
723
+ if (!enumRef) throw new CliError("ReferentialAction enum not found");
724
+ const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_update.replace(/ /g, "").toLowerCase());
725
+ if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_update} not found`);
726
+ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onUpdate");
727
+ }
728
+ const defaultFkName = `${relation.table}_${relation.columns.join("_")}_fkey`;
729
+ if (relation.fk_name && relation.fk_name !== defaultFkName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relation.fk_name), "map");
730
+ return ab;
731
+ });
732
+ sourceModel.fields.splice(firstSourceFieldId, 0, sourceFieldFactory.node);
733
+ const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? "_" : "";
734
+ const { name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${oppositeFieldPrefix}${(0, import_common_helpers.lowerCaseFirst)(sourceModel.name)}_${firstColumn}` : `${(0, import_common_helpers.lowerCaseFirst)(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === "many" ? "s" : ""}`);
735
+ const targetFieldFactory = new import_factory.DataFieldFactory().setContainer(targetModel).setName(oppositeFieldName).setType((tb) => tb.setOptional(relation.references.type === "one").setArray(relation.references.type === "many").setReference(sourceModel));
736
+ if (includeRelationName) targetFieldFactory.addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab2) => ab2.StringLiteral.setValue(relationName)));
737
+ targetModel.fields.push(targetFieldFactory.node);
738
+ }
739
+ __name(syncRelation, "syncRelation");
740
+
741
+ // src/actions/pull/provider/mysql.ts
742
+ var import_factory2 = require("@zenstackhq/language/factory");
743
+ function normalizeGenerationExpression(typeDef) {
744
+ return typeDef.replace(/_([0-9A-Za-z_]+)\\?'/g, "'").replace(/\\'/g, "'");
745
+ }
746
+ __name(normalizeGenerationExpression, "normalizeGenerationExpression");
747
+ var mysql = {
748
+ isSupportedFeature(feature) {
749
+ switch (feature) {
750
+ case "NativeEnum":
751
+ return true;
752
+ case "Schema":
753
+ default:
754
+ return false;
755
+ }
756
+ },
757
+ getBuiltinType(type2) {
758
+ const t = (type2 || "").toLowerCase().trim();
759
+ const isArray = false;
760
+ switch (t) {
761
+ // integers
762
+ case "tinyint":
763
+ case "smallint":
764
+ case "mediumint":
765
+ case "int":
766
+ case "integer":
767
+ return {
768
+ type: "Int",
769
+ isArray
770
+ };
771
+ case "bigint":
772
+ return {
773
+ type: "BigInt",
774
+ isArray
775
+ };
776
+ // decimals and floats
777
+ case "decimal":
778
+ case "numeric":
779
+ return {
780
+ type: "Decimal",
781
+ isArray
782
+ };
783
+ case "float":
784
+ case "double":
785
+ case "real":
786
+ return {
787
+ type: "Float",
788
+ isArray
789
+ };
790
+ // boolean (MySQL uses TINYINT(1) for boolean)
791
+ case "boolean":
792
+ case "bool":
793
+ return {
794
+ type: "Boolean",
795
+ isArray
796
+ };
797
+ // strings
798
+ case "char":
799
+ case "varchar":
800
+ case "tinytext":
801
+ case "text":
802
+ case "mediumtext":
803
+ case "longtext":
804
+ return {
805
+ type: "String",
806
+ isArray
807
+ };
808
+ // dates/times
809
+ case "date":
810
+ case "time":
811
+ case "datetime":
812
+ case "timestamp":
813
+ case "year":
814
+ return {
815
+ type: "DateTime",
816
+ isArray
817
+ };
818
+ // binary
819
+ case "binary":
820
+ case "varbinary":
821
+ case "tinyblob":
822
+ case "blob":
823
+ case "mediumblob":
824
+ case "longblob":
825
+ return {
826
+ type: "Bytes",
827
+ isArray
828
+ };
829
+ // json
830
+ case "json":
831
+ return {
832
+ type: "Json",
833
+ isArray
834
+ };
835
+ default:
836
+ if (t.startsWith("enum(")) {
837
+ return {
838
+ type: "String",
839
+ isArray
840
+ };
841
+ }
842
+ if (t.startsWith("set(")) {
843
+ return {
844
+ type: "String",
845
+ isArray
846
+ };
847
+ }
848
+ return {
849
+ type: "Unsupported",
850
+ isArray
851
+ };
852
+ }
853
+ },
854
+ getDefaultDatabaseType(type2) {
855
+ switch (type2) {
856
+ case "String":
857
+ return {
858
+ type: "varchar",
859
+ precision: 191
860
+ };
861
+ case "Boolean":
862
+ return {
863
+ type: "boolean"
864
+ };
865
+ case "Int":
866
+ return {
867
+ type: "int"
868
+ };
869
+ case "BigInt":
870
+ return {
871
+ type: "bigint"
872
+ };
873
+ case "Float":
874
+ return {
875
+ type: "double"
876
+ };
877
+ case "Decimal":
878
+ return {
879
+ type: "decimal",
880
+ precision: 65
881
+ };
882
+ case "DateTime":
883
+ return {
884
+ type: "datetime",
885
+ precision: 3
886
+ };
887
+ case "Json":
888
+ return {
889
+ type: "json"
890
+ };
891
+ case "Bytes":
892
+ return {
893
+ type: "longblob"
894
+ };
895
+ }
896
+ },
897
+ async introspect(connectionString, options) {
898
+ const mysql2 = await import("mysql2/promise");
899
+ const connection = await mysql2.createConnection(connectionString);
900
+ try {
901
+ const url = new URL(connectionString);
902
+ const databaseName = url.pathname.replace("/", "");
903
+ if (!databaseName) {
904
+ throw new CliError("Database name not found in connection string");
905
+ }
906
+ const [tableRows] = await connection.execute(getTableIntrospectionQuery(), [
907
+ databaseName
908
+ ]);
909
+ const tables = [];
910
+ for (const row of tableRows) {
911
+ const columns = typeof row.columns === "string" ? JSON.parse(row.columns) : row.columns;
912
+ const indexes = typeof row.indexes === "string" ? JSON.parse(row.indexes) : row.indexes;
913
+ const sortedColumns = (columns || []).sort((a, b) => (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0)).map((col) => {
914
+ if (col.datatype === "enum" && col.datatype_name) {
915
+ return {
916
+ ...col,
917
+ datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name
918
+ };
919
+ }
920
+ if (col.computed && typeof col.datatype === "string") {
921
+ return {
922
+ ...col,
923
+ datatype: normalizeGenerationExpression(col.datatype)
924
+ };
925
+ }
926
+ return col;
927
+ });
928
+ const filteredIndexes = (indexes || []).filter((idx) => !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`));
929
+ tables.push({
930
+ schema: "",
931
+ name: row.name,
932
+ type: row.type,
933
+ definition: row.definition,
934
+ columns: sortedColumns,
935
+ indexes: filteredIndexes
936
+ });
937
+ }
938
+ const [enumRows] = await connection.execute(getEnumIntrospectionQuery(), [
939
+ databaseName
940
+ ]);
941
+ const enums = enumRows.map((row) => {
942
+ const values = parseEnumValues(row.column_type);
943
+ const syntheticName = `${row.table_name}_${row.column_name}`;
944
+ const { name } = resolveNameCasing(options.modelCasing, syntheticName);
945
+ return {
946
+ schema_name: "",
947
+ enum_type: name,
948
+ values
949
+ };
950
+ });
951
+ return {
952
+ tables,
953
+ enums
954
+ };
955
+ } finally {
956
+ await connection.end();
957
+ }
958
+ },
959
+ getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
960
+ const val = defaultValue.trim();
961
+ if (val.toUpperCase() === "NULL") {
962
+ return null;
963
+ }
964
+ if (datatype === "enum" && datatype_name) {
965
+ const enumDef = enums.find((e) => getDbName(e) === datatype_name);
966
+ if (enumDef) {
967
+ const enumValue = val.startsWith("'") && val.endsWith("'") ? val.slice(1, -1) : val;
968
+ const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
969
+ if (enumField) {
970
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
971
+ }
972
+ }
973
+ }
974
+ switch (fieldType) {
975
+ case "DateTime":
976
+ if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === "current_timestamp()" || val.toLowerCase() === "now()") {
977
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
978
+ }
979
+ return (ab) => ab.StringLiteral.setValue(val);
980
+ case "Int":
981
+ case "BigInt":
982
+ if (val.toLowerCase() === "auto_increment") {
983
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
984
+ }
985
+ return (ab) => ab.NumberLiteral.setValue(val);
986
+ case "Float":
987
+ return normalizeFloatDefault(val);
988
+ case "Decimal":
989
+ return normalizeDecimalDefault(val);
990
+ case "Boolean":
991
+ return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === "true" || val === "1" || val === "b'1'");
992
+ case "String":
993
+ if (val.toLowerCase() === "uuid()") {
994
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("uuid", services));
995
+ }
996
+ return (ab) => ab.StringLiteral.setValue(val);
997
+ }
998
+ if (val.includes("(") && val.includes(")")) {
999
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
1000
+ }
1001
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
1002
+ return null;
1003
+ },
1004
+ getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
1005
+ const factories = [];
1006
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
1007
+ factories.push(new import_factory2.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
1008
+ }
1009
+ const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`)?.node;
1010
+ const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
1011
+ if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
1012
+ const dbAttrFactory = new import_factory2.DataFieldAttributeFactory().setDecl(dbAttr);
1013
+ const sizeValue = length ?? precision;
1014
+ if (sizeValue !== void 0 && sizeValue !== null) {
1015
+ dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(sizeValue));
1016
+ }
1017
+ factories.push(dbAttrFactory);
1018
+ }
1019
+ return factories;
1020
+ }
1021
+ };
1022
+ function getTableIntrospectionQuery() {
1023
+ return `
1024
+ -- Main query: one row per table/view with columns and indexes as nested JSON arrays.
1025
+ -- Uses INFORMATION_SCHEMA which is MySQL's standard metadata catalog.
1026
+ SELECT
1027
+ t.TABLE_NAME AS \`name\`, -- table or view name
1028
+ CASE t.TABLE_TYPE -- map MySQL table type strings to our internal types
1029
+ WHEN 'BASE TABLE' THEN 'table'
1030
+ WHEN 'VIEW' THEN 'view'
1031
+ ELSE NULL
1032
+ END AS \`type\`,
1033
+ CASE -- for views, retrieve the SQL definition
1034
+ WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION
1035
+ ELSE NULL
1036
+ END AS \`definition\`,
1037
+
1038
+ -- ===== COLUMNS subquery =====
1039
+ -- Wraps an ordered subquery in JSON_ARRAYAGG to produce a JSON array of column objects.
1040
+ (
1041
+ SELECT JSON_ARRAYAGG(col_json)
1042
+ FROM (
1043
+ SELECT JSON_OBJECT(
1044
+ 'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting)
1045
+ 'name', c.COLUMN_NAME, -- column name
1046
+
1047
+ -- datatype: for generated/computed columns, construct the full DDL-like type definition
1048
+ -- (e.g., "int GENERATED ALWAYS AS (col1 + col2) STORED") so it can be rendered as
1049
+ -- Unsupported("..."); special-case tinyint(1) as 'boolean' (MySQL's boolean convention);
1050
+ -- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime').
1051
+ 'datatype', CASE
1052
+ WHEN c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '' THEN
1053
+ CONCAT(
1054
+ c.COLUMN_TYPE,
1055
+ ' GENERATED ALWAYS AS (',
1056
+ c.GENERATION_EXPRESSION,
1057
+ ') ',
1058
+ CASE
1059
+ WHEN c.EXTRA LIKE '%STORED GENERATED%' THEN 'STORED'
1060
+ ELSE 'VIRTUAL'
1061
+ END
1062
+ )
1063
+ WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean'
1064
+ ELSE c.DATA_TYPE
1065
+ END,
1066
+
1067
+ -- datatype_name: for enum columns, generate a synthetic name "TableName_ColumnName"
1068
+ -- (MySQL doesn't have named enum types like PostgreSQL)
1069
+ 'datatype_name', CASE
1070
+ WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME)
1071
+ ELSE NULL
1072
+ END,
1073
+
1074
+ 'datatype_schema', '', -- MySQL doesn't support multi-schema
1075
+ 'length', c.CHARACTER_MAXIMUM_LENGTH, -- max length for string types (e.g., VARCHAR(255) -> 255)
1076
+ 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), -- numeric or datetime precision
1077
+
1078
+ 'nullable', c.IS_NULLABLE = 'YES', -- true if column allows NULL
1079
+
1080
+ -- default: for auto_increment columns, report 'auto_increment' instead of NULL;
1081
+ -- otherwise use the COLUMN_DEFAULT value
1082
+ 'default', CASE
1083
+ WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment'
1084
+ ELSE c.COLUMN_DEFAULT
1085
+ END,
1086
+
1087
+ 'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key
1088
+
1089
+ -- unique: true if the column has a single-column unique index.
1090
+ -- COLUMN_KEY = 'UNI' covers most cases, but may not be set when the column
1091
+ -- also participates in other indexes (showing 'MUL' instead on some MySQL versions).
1092
+ -- Also check INFORMATION_SCHEMA.STATISTICS for single-column unique indexes
1093
+ -- (NON_UNIQUE = 0) to match the PostgreSQL introspection behavior.
1094
+ 'unique', (
1095
+ c.COLUMN_KEY = 'UNI'
1096
+ OR EXISTS (
1097
+ SELECT 1
1098
+ FROM INFORMATION_SCHEMA.STATISTICS s_uni
1099
+ WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
1100
+ AND s_uni.TABLE_NAME = c.TABLE_NAME
1101
+ AND s_uni.COLUMN_NAME = c.COLUMN_NAME
1102
+ AND s_uni.NON_UNIQUE = 0
1103
+ AND s_uni.INDEX_NAME != 'PRIMARY'
1104
+ AND (
1105
+ SELECT COUNT(*)
1106
+ FROM INFORMATION_SCHEMA.STATISTICS s_cnt
1107
+ WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
1108
+ AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
1109
+ AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
1110
+ ) = 1
1111
+ )
1112
+ ),
1113
+ 'unique_name', (
1114
+ SELECT COALESCE(
1115
+ CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END,
1116
+ (
1117
+ SELECT s_uni.INDEX_NAME
1118
+ FROM INFORMATION_SCHEMA.STATISTICS s_uni
1119
+ WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
1120
+ AND s_uni.TABLE_NAME = c.TABLE_NAME
1121
+ AND s_uni.COLUMN_NAME = c.COLUMN_NAME
1122
+ AND s_uni.NON_UNIQUE = 0
1123
+ AND s_uni.INDEX_NAME != 'PRIMARY'
1124
+ AND (
1125
+ SELECT COUNT(*)
1126
+ FROM INFORMATION_SCHEMA.STATISTICS s_cnt
1127
+ WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
1128
+ AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
1129
+ AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
1130
+ ) = 1
1131
+ LIMIT 1
1132
+ )
1133
+ )
1134
+ ),
1135
+
1136
+ -- computed: true if column has a generation expression (virtual or stored)
1137
+ 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '',
1138
+
1139
+ -- options: for enum columns, the full COLUMN_TYPE string (e.g., "enum('a','b','c')")
1140
+ -- which gets parsed into individual values later
1141
+ 'options', CASE
1142
+ WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE
1143
+ ELSE NULL
1144
+ END,
1145
+
1146
+ -- Foreign key info (NULL if column is not part of a FK)
1147
+ 'foreign_key_schema', NULL, -- MySQL doesn't support cross-schema FKs here
1148
+ 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, -- referenced table
1149
+ 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, -- referenced column
1150
+ 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, -- FK constraint name
1151
+ 'foreign_key_on_update', rc.UPDATE_RULE, -- referential action on update (CASCADE, SET NULL, etc.)
1152
+ 'foreign_key_on_delete', rc.DELETE_RULE -- referential action on delete
1153
+ ) AS col_json
1154
+
1155
+ FROM INFORMATION_SCHEMA.COLUMNS c -- one row per column in the database
1156
+
1157
+ -- Join KEY_COLUMN_USAGE to find foreign key references for this column.
1158
+ -- Filter to only FK entries (REFERENCED_TABLE_NAME IS NOT NULL).
1159
+ LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk
1160
+ ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA
1161
+ AND c.TABLE_NAME = kcu_fk.TABLE_NAME
1162
+ AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME
1163
+ AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL
1164
+
1165
+ -- Join REFERENTIAL_CONSTRAINTS to get ON UPDATE / ON DELETE rules for the FK.
1166
+ LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc
1167
+ ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
1168
+ AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
1169
+
1170
+ WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA
1171
+ AND c.TABLE_NAME = t.TABLE_NAME
1172
+ ORDER BY c.ORDINAL_POSITION -- preserve original column order
1173
+ ) AS cols_ordered
1174
+ ) AS \`columns\`,
1175
+
1176
+ -- ===== INDEXES subquery =====
1177
+ -- Aggregates all indexes for this table into a JSON array.
1178
+ (
1179
+ SELECT JSON_ARRAYAGG(idx_json)
1180
+ FROM (
1181
+ SELECT JSON_OBJECT(
1182
+ 'name', s.INDEX_NAME, -- index name (e.g., 'PRIMARY', 'idx_email')
1183
+ 'method', s.INDEX_TYPE, -- index type (e.g., 'BTREE', 'HASH', 'FULLTEXT')
1184
+ 'unique', s.NON_UNIQUE = 0, -- NON_UNIQUE=0 means it IS unique
1185
+ 'primary', s.INDEX_NAME = 'PRIMARY', -- MySQL names the PK index 'PRIMARY'
1186
+ 'valid', TRUE, -- MySQL doesn't expose index validity status
1187
+ 'ready', TRUE, -- MySQL doesn't expose index readiness status
1188
+ 'partial', FALSE, -- MySQL doesn't support partial indexes
1189
+ 'predicate', NULL, -- no WHERE clause on indexes in MySQL
1190
+
1191
+ -- Index columns: nested subquery for columns in this index
1192
+ 'columns', (
1193
+ SELECT JSON_ARRAYAGG(idx_col_json)
1194
+ FROM (
1195
+ SELECT JSON_OBJECT(
1196
+ 'name', s2.COLUMN_NAME, -- column name in the index
1197
+ 'expression', NULL, -- MySQL doesn't expose expression indexes via STATISTICS
1198
+ -- COLLATION: 'A' = ascending, 'D' = descending, NULL = not sorted
1199
+ 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END,
1200
+ 'nulls', NULL -- MySQL doesn't expose NULLS FIRST/LAST
1201
+ ) AS idx_col_json
1202
+ FROM INFORMATION_SCHEMA.STATISTICS s2 -- one row per column per index
1203
+ WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA
1204
+ AND s2.TABLE_NAME = s.TABLE_NAME
1205
+ AND s2.INDEX_NAME = s.INDEX_NAME
1206
+ ORDER BY s2.SEQ_IN_INDEX -- preserve column order within the index
1207
+ ) AS idx_cols_ordered
1208
+ )
1209
+ ) AS idx_json
1210
+ FROM (
1211
+ -- Deduplicate: STATISTICS has one row per (index, column), but we need one row per index.
1212
+ -- DISTINCT on INDEX_NAME gives us one entry per index with its metadata.
1213
+ SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME
1214
+ FROM INFORMATION_SCHEMA.STATISTICS
1215
+ WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME
1216
+ ) s
1217
+ ) AS idxs_ordered
1218
+ ) AS \`indexes\`
1219
+
1220
+ -- === Main FROM: INFORMATION_SCHEMA.TABLES lists all tables and views ===
1221
+ FROM INFORMATION_SCHEMA.TABLES t
1222
+ -- Join VIEWS to get VIEW_DEFINITION for view tables
1223
+ LEFT JOIN INFORMATION_SCHEMA.VIEWS v
1224
+ ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME
1225
+ WHERE t.TABLE_SCHEMA = ? -- only the target database
1226
+ AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW
1227
+ AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table
1228
+ ORDER BY t.TABLE_NAME;
1229
+ `;
1230
+ }
1231
+ __name(getTableIntrospectionQuery, "getTableIntrospectionQuery");
1232
+ function getEnumIntrospectionQuery() {
1233
+ return `
1234
+ SELECT
1235
+ c.TABLE_NAME AS table_name, -- table containing the enum column
1236
+ c.COLUMN_NAME AS column_name, -- column name
1237
+ c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')")
1238
+ FROM INFORMATION_SCHEMA.COLUMNS c
1239
+ WHERE c.TABLE_SCHEMA = ? -- only the target database
1240
+ AND c.DATA_TYPE = 'enum' -- only enum columns
1241
+ ORDER BY c.TABLE_NAME, c.COLUMN_NAME;
1242
+ `;
1243
+ }
1244
+ __name(getEnumIntrospectionQuery, "getEnumIntrospectionQuery");
1245
+ function parseEnumValues(columnType) {
1246
+ const match = columnType.match(/^enum\((.+)\)$/i);
1247
+ if (!match || !match[1]) return [];
1248
+ const valuesString = match[1];
1249
+ const values = [];
1250
+ let current = "";
1251
+ let inQuote = false;
1252
+ let i = 0;
1253
+ while (i < valuesString.length) {
1254
+ const char = valuesString[i];
1255
+ if (char === "'" && !inQuote) {
1256
+ inQuote = true;
1257
+ i++;
1258
+ continue;
1259
+ }
1260
+ if (char === "'" && inQuote) {
1261
+ if (valuesString[i + 1] === "'") {
1262
+ current += "'";
1263
+ i += 2;
1264
+ continue;
1265
+ }
1266
+ values.push(current);
1267
+ current = "";
1268
+ inQuote = false;
1269
+ i++;
1270
+ while (i < valuesString.length && (valuesString[i] === "," || valuesString[i] === " ")) {
1271
+ i++;
1272
+ }
1273
+ continue;
1274
+ }
1275
+ if (inQuote) {
1276
+ current += char;
1277
+ }
1278
+ i++;
1279
+ }
1280
+ return values;
1281
+ }
1282
+ __name(parseEnumValues, "parseEnumValues");
1283
+
1284
+ // src/actions/pull/provider/postgresql.ts
1285
+ var import_factory3 = require("@zenstackhq/language/factory");
1286
+ var import_pg = require("pg");
1287
+ var pgTypnameToStandard = {
1288
+ int2: "smallint",
1289
+ int4: "integer",
1290
+ int8: "bigint",
1291
+ float4: "real",
1292
+ float8: "double precision",
1293
+ bool: "boolean",
1294
+ bpchar: "character",
1295
+ numeric: "decimal"
1296
+ };
1297
+ var standardTypePrecisions = {
1298
+ int2: 16,
1299
+ smallint: 16,
1300
+ int4: 32,
1301
+ integer: 32,
1302
+ int8: 64,
1303
+ bigint: 64,
1304
+ float4: 24,
1305
+ real: 24,
1306
+ float8: 53,
1307
+ "double precision": 53
1308
+ };
1309
+ var pgTypnameToZenStackNativeType = {
1310
+ // integers
1311
+ int2: "SmallInt",
1312
+ smallint: "SmallInt",
1313
+ int4: "Integer",
1314
+ integer: "Integer",
1315
+ int8: "BigInt",
1316
+ bigint: "BigInt",
1317
+ // decimals and floats
1318
+ numeric: "Decimal",
1319
+ decimal: "Decimal",
1320
+ float4: "Real",
1321
+ real: "Real",
1322
+ float8: "DoublePrecision",
1323
+ "double precision": "DoublePrecision",
1324
+ // boolean
1325
+ bool: "Boolean",
1326
+ boolean: "Boolean",
1327
+ // strings
1328
+ text: "Text",
1329
+ varchar: "VarChar",
1330
+ "character varying": "VarChar",
1331
+ bpchar: "Char",
1332
+ character: "Char",
1333
+ // uuid
1334
+ uuid: "Uuid",
1335
+ // dates/times
1336
+ date: "Date",
1337
+ time: "Time",
1338
+ timetz: "Timetz",
1339
+ timestamp: "Timestamp",
1340
+ timestamptz: "Timestamptz",
1341
+ // binary
1342
+ bytea: "ByteA",
1343
+ // json
1344
+ json: "Json",
1345
+ jsonb: "JsonB",
1346
+ // xml
1347
+ xml: "Xml",
1348
+ // network types
1349
+ inet: "Inet",
1350
+ // bit strings
1351
+ bit: "Bit",
1352
+ varbit: "VarBit",
1353
+ // oid
1354
+ oid: "Oid",
1355
+ // money
1356
+ money: "Money",
1357
+ // citext extension
1358
+ citext: "Citext"
1359
+ };
1360
+ var postgresql = {
1361
+ isSupportedFeature(feature) {
1362
+ const supportedFeatures = [
1363
+ "Schema",
1364
+ "NativeEnum"
1365
+ ];
1366
+ return supportedFeatures.includes(feature);
1367
+ },
1368
+ getBuiltinType(type2) {
1369
+ const t = (type2 || "").toLowerCase();
1370
+ const isArray = t.startsWith("_");
1371
+ switch (t.replace(/^_/, "")) {
1372
+ // integers
1373
+ case "int2":
1374
+ case "smallint":
1375
+ case "int4":
1376
+ case "integer":
1377
+ return {
1378
+ type: "Int",
1379
+ isArray
1380
+ };
1381
+ case "int8":
1382
+ case "bigint":
1383
+ return {
1384
+ type: "BigInt",
1385
+ isArray
1386
+ };
1387
+ // decimals and floats
1388
+ case "numeric":
1389
+ case "decimal":
1390
+ return {
1391
+ type: "Decimal",
1392
+ isArray
1393
+ };
1394
+ case "float4":
1395
+ case "real":
1396
+ case "float8":
1397
+ case "double precision":
1398
+ return {
1399
+ type: "Float",
1400
+ isArray
1401
+ };
1402
+ // boolean
1403
+ case "bool":
1404
+ case "boolean":
1405
+ return {
1406
+ type: "Boolean",
1407
+ isArray
1408
+ };
1409
+ // strings
1410
+ case "text":
1411
+ case "varchar":
1412
+ case "bpchar":
1413
+ case "character varying":
1414
+ case "character":
1415
+ return {
1416
+ type: "String",
1417
+ isArray
1418
+ };
1419
+ // uuid
1420
+ case "uuid":
1421
+ return {
1422
+ type: "String",
1423
+ isArray
1424
+ };
1425
+ // dates/times
1426
+ case "date":
1427
+ case "time":
1428
+ case "timetz":
1429
+ case "timestamp":
1430
+ case "timestamptz":
1431
+ return {
1432
+ type: "DateTime",
1433
+ isArray
1434
+ };
1435
+ // binary
1436
+ case "bytea":
1437
+ return {
1438
+ type: "Bytes",
1439
+ isArray
1440
+ };
1441
+ // json
1442
+ case "json":
1443
+ case "jsonb":
1444
+ return {
1445
+ type: "Json",
1446
+ isArray
1447
+ };
1448
+ default:
1449
+ return {
1450
+ type: "Unsupported",
1451
+ isArray
1452
+ };
1453
+ }
1454
+ },
1455
+ async introspect(connectionString, options) {
1456
+ const client = new import_pg.Client({
1457
+ connectionString
1458
+ });
1459
+ await client.connect();
1460
+ try {
1461
+ const { rows: tables } = await client.query(tableIntrospectionQuery);
1462
+ const { rows: enums } = await client.query(enumIntrospectionQuery);
1463
+ const filteredTables = tables.filter((t) => options.schemas.includes(t.schema));
1464
+ const filteredEnums = enums.filter((e) => options.schemas.includes(e.schema_name));
1465
+ return {
1466
+ enums: filteredEnums,
1467
+ tables: filteredTables
1468
+ };
1469
+ } finally {
1470
+ await client.end();
1471
+ }
1472
+ },
1473
+ getDefaultDatabaseType(type2) {
1474
+ switch (type2) {
1475
+ case "String":
1476
+ return {
1477
+ type: "text"
1478
+ };
1479
+ case "Boolean":
1480
+ return {
1481
+ type: "boolean"
1482
+ };
1483
+ case "Int":
1484
+ return {
1485
+ type: "integer"
1486
+ };
1487
+ case "BigInt":
1488
+ return {
1489
+ type: "bigint"
1490
+ };
1491
+ case "Float":
1492
+ return {
1493
+ type: "double precision"
1494
+ };
1495
+ case "Decimal":
1496
+ return {
1497
+ type: "decimal"
1498
+ };
1499
+ case "DateTime":
1500
+ return {
1501
+ type: "timestamp",
1502
+ precision: 3
1503
+ };
1504
+ case "Json":
1505
+ return {
1506
+ type: "jsonb"
1507
+ };
1508
+ case "Bytes":
1509
+ return {
1510
+ type: "bytea"
1511
+ };
1512
+ }
1513
+ },
1514
+ getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
1515
+ const val = defaultValue.trim();
1516
+ if (datatype === "enum" && datatype_name) {
1517
+ const enumDef = enums.find((e) => getDbName(e) === datatype_name);
1518
+ if (enumDef) {
1519
+ const enumValue = val.replace(/'/g, "").split("::")[0]?.trim();
1520
+ const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
1521
+ if (enumField) {
1522
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
1523
+ }
1524
+ }
1525
+ return typeCastingConvert({
1526
+ defaultValue,
1527
+ enums,
1528
+ val,
1529
+ services
1530
+ });
1531
+ }
1532
+ switch (fieldType) {
1533
+ case "DateTime":
1534
+ if (val === "CURRENT_TIMESTAMP" || val === "now()") {
1535
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
1536
+ }
1537
+ if (val.includes("::")) {
1538
+ return typeCastingConvert({
1539
+ defaultValue,
1540
+ enums,
1541
+ val,
1542
+ services
1543
+ });
1544
+ }
1545
+ return (ab) => ab.StringLiteral.setValue(val);
1546
+ case "Int":
1547
+ case "BigInt":
1548
+ if (val.startsWith("nextval(")) {
1549
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
1550
+ }
1551
+ if (val.includes("::")) {
1552
+ return typeCastingConvert({
1553
+ defaultValue,
1554
+ enums,
1555
+ val,
1556
+ services
1557
+ });
1558
+ }
1559
+ return (ab) => ab.NumberLiteral.setValue(val);
1560
+ case "Float":
1561
+ if (val.includes("::")) {
1562
+ return typeCastingConvert({
1563
+ defaultValue,
1564
+ enums,
1565
+ val,
1566
+ services
1567
+ });
1568
+ }
1569
+ return normalizeFloatDefault(val);
1570
+ case "Decimal":
1571
+ if (val.includes("::")) {
1572
+ return typeCastingConvert({
1573
+ defaultValue,
1574
+ enums,
1575
+ val,
1576
+ services
1577
+ });
1578
+ }
1579
+ return normalizeDecimalDefault(val);
1580
+ case "Boolean":
1581
+ return (ab) => ab.BooleanLiteral.setValue(val === "true");
1582
+ case "String":
1583
+ if (val.includes("::")) {
1584
+ return typeCastingConvert({
1585
+ defaultValue,
1586
+ enums,
1587
+ val,
1588
+ services
1589
+ });
1590
+ }
1591
+ if (val.startsWith("'") && val.endsWith("'")) {
1592
+ return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'"));
1593
+ }
1594
+ return (ab) => ab.StringLiteral.setValue(val);
1595
+ }
1596
+ if (val.includes("(") && val.includes(")")) {
1597
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
1598
+ }
1599
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
1600
+ return null;
1601
+ },
1602
+ getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
1603
+ const factories = [];
1604
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
1605
+ factories.push(new import_factory3.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
1606
+ }
1607
+ const nativeTypeName = pgTypnameToZenStackNativeType[datatype.toLowerCase()] ?? datatype;
1608
+ const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${nativeTypeName.toLowerCase()}`)?.node;
1609
+ const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
1610
+ const normalizedDatatype = pgTypnameToStandard[datatype.toLowerCase()] ?? datatype.toLowerCase();
1611
+ const standardPrecision = standardTypePrecisions[datatype.toLowerCase()];
1612
+ const isStandardPrecision = standardPrecision !== void 0 && precision === standardPrecision;
1613
+ if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== normalizedDatatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
1614
+ const dbAttrFactory = new import_factory3.DataFieldAttributeFactory().setDecl(dbAttr);
1615
+ if ((length || precision) && !isStandardPrecision) {
1616
+ dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length || precision));
1617
+ }
1618
+ factories.push(dbAttrFactory);
1619
+ }
1620
+ return factories;
1621
+ }
1622
+ };
1623
+ var enumIntrospectionQuery = `
1624
+ SELECT
1625
+ n.nspname AS schema_name, -- schema the enum belongs to (e.g., 'public')
1626
+ t.typname AS enum_type, -- enum type name as defined in CREATE TYPE
1627
+ coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -- ordered list of enum labels as JSON array
1628
+ FROM pg_type t -- pg_type: catalog of all data types
1629
+ JOIN pg_enum e ON t.oid = e.enumtypid -- pg_enum: one row per enum label; join to get labels for this enum type
1630
+ JOIN pg_namespace n ON n.oid = t.typnamespace -- pg_namespace: schema info; join to get the schema name
1631
+ GROUP BY schema_name, enum_type -- one row per enum type, with all labels aggregated
1632
+ ORDER BY schema_name, enum_type;`;
1633
+ var tableIntrospectionQuery = `
1634
+ -- Main query: one row per table/view with columns and indexes as nested JSON arrays.
1635
+ -- Joins pg_class (tables/views) with pg_namespace (schemas).
1636
+ SELECT
1637
+ "ns"."nspname" AS "schema", -- schema name (e.g., 'public')
1638
+ "cls"."relname" AS "name", -- table or view name
1639
+ CASE "cls"."relkind" -- relkind: 'r' = ordinary table, 'v' = view
1640
+ WHEN 'r' THEN 'table'
1641
+ WHEN 'v' THEN 'view'
1642
+ ELSE NULL
1643
+ END AS "type",
1644
+ CASE -- for views, retrieve the SQL definition
1645
+ WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true)
1646
+ ELSE NULL
1647
+ END AS "definition",
1648
+
1649
+ -- ===== COLUMNS subquery =====
1650
+ -- Aggregates all columns for this table into a JSON array.
1651
+ (
1652
+ SELECT coalesce(json_agg(agg), '[]')
1653
+ FROM (
1654
+ SELECT
1655
+ "att"."attname" AS "name", -- column name
1656
+
1657
+ -- datatype: if the type is an enum, report 'enum';
1658
+ -- if the column is generated/computed, construct the full DDL-like type definition
1659
+ -- (e.g., "text GENERATED ALWAYS AS (expr) STORED") so it can be rendered as Unsupported("...");
1660
+ -- otherwise use the pg_type name.
1661
+ CASE
1662
+ WHEN EXISTS (
1663
+ SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
1664
+ WHERE "e"."enumtypid" = "typ"."oid"
1665
+ ) THEN 'enum'
1666
+ WHEN "att"."attgenerated" != '' THEN
1667
+ format_type("att"."atttypid", "att"."atttypmod")
1668
+ || ' GENERATED ALWAYS AS ('
1669
+ || pg_get_expr("def"."adbin", "def"."adrelid")
1670
+ || ') '
1671
+ || CASE "att"."attgenerated"
1672
+ WHEN 's' THEN 'STORED'
1673
+ WHEN 'v' THEN 'VIRTUAL'
1674
+ ELSE 'STORED'
1675
+ END
1676
+ ELSE "typ"."typname"::text -- internal type name (e.g., 'int4', 'varchar', 'text'); cast to text to prevent CASE from coercing result to name type (max 63 chars)
1677
+ END AS "datatype",
1678
+
1679
+ -- datatype_name: for enums only, the actual enum type name (used to look up the enum definition)
1680
+ CASE
1681
+ WHEN EXISTS (
1682
+ SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
1683
+ WHERE "e"."enumtypid" = "typ"."oid"
1684
+ ) THEN "typ"."typname"
1685
+ ELSE NULL
1686
+ END AS "datatype_name",
1687
+
1688
+ "tns"."nspname" AS "datatype_schema", -- schema where the data type is defined
1689
+ "c"."character_maximum_length" AS "length", -- max length for char/varchar types (from information_schema)
1690
+ COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", -- numeric or datetime precision
1691
+
1692
+ -- Foreign key info (NULL if column is not part of a FK constraint)
1693
+ "fk_ns"."nspname" AS "foreign_key_schema", -- schema of the referenced table
1694
+ "fk_cls"."relname" AS "foreign_key_table", -- referenced table name
1695
+ "fk_att"."attname" AS "foreign_key_column", -- referenced column name
1696
+ "fk_con"."conname" AS "foreign_key_name", -- FK constraint name
1697
+
1698
+ -- FK referential actions: decode single-char codes to human-readable strings
1699
+ CASE "fk_con"."confupdtype"
1700
+ WHEN 'a' THEN 'NO ACTION'
1701
+ WHEN 'r' THEN 'RESTRICT'
1702
+ WHEN 'c' THEN 'CASCADE'
1703
+ WHEN 'n' THEN 'SET NULL'
1704
+ WHEN 'd' THEN 'SET DEFAULT'
1705
+ ELSE NULL
1706
+ END AS "foreign_key_on_update",
1707
+ CASE "fk_con"."confdeltype"
1708
+ WHEN 'a' THEN 'NO ACTION'
1709
+ WHEN 'r' THEN 'RESTRICT'
1710
+ WHEN 'c' THEN 'CASCADE'
1711
+ WHEN 'n' THEN 'SET NULL'
1712
+ WHEN 'd' THEN 'SET DEFAULT'
1713
+ ELSE NULL
1714
+ END AS "foreign_key_on_delete",
1715
+
1716
+ -- pk: true if this column is part of the table's primary key constraint
1717
+ "pk_con"."conkey" IS NOT NULL AS "pk",
1718
+
1719
+ -- unique: true if the column has a single-column UNIQUE constraint OR a single-column unique index
1720
+ (
1721
+ -- Check for a single-column UNIQUE constraint (contype = 'u')
1722
+ EXISTS (
1723
+ SELECT 1
1724
+ FROM "pg_catalog"."pg_constraint" AS "u_con"
1725
+ WHERE "u_con"."contype" = 'u' -- 'u' = unique constraint
1726
+ AND "u_con"."conrelid" = "cls"."oid" -- on this table
1727
+ AND array_length("u_con"."conkey", 1) = 1 -- single-column only
1728
+ AND "att"."attnum" = ANY ("u_con"."conkey") -- this column is in the constraint
1729
+ )
1730
+ OR
1731
+ -- Check for a single-column unique index (may exist without an explicit constraint)
1732
+ EXISTS (
1733
+ SELECT 1
1734
+ FROM "pg_catalog"."pg_index" AS "u_idx"
1735
+ WHERE "u_idx"."indrelid" = "cls"."oid" -- on this table
1736
+ AND "u_idx"."indisunique" = TRUE -- it's a unique index
1737
+ AND "u_idx"."indnkeyatts" = 1 -- single key column
1738
+ AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) -- this column is the key
1739
+ )
1740
+ ) AS "unique",
1741
+
1742
+ -- unique_name: the name of the unique constraint or index (whichever exists first)
1743
+ (
1744
+ SELECT COALESCE(
1745
+ -- Try constraint name first
1746
+ (
1747
+ SELECT "u_con"."conname"
1748
+ FROM "pg_catalog"."pg_constraint" AS "u_con"
1749
+ WHERE "u_con"."contype" = 'u'
1750
+ AND "u_con"."conrelid" = "cls"."oid"
1751
+ AND array_length("u_con"."conkey", 1) = 1
1752
+ AND "att"."attnum" = ANY ("u_con"."conkey")
1753
+ LIMIT 1
1754
+ ),
1755
+ -- Fall back to unique index name
1756
+ (
1757
+ SELECT "u_idx_cls"."relname"
1758
+ FROM "pg_catalog"."pg_index" AS "u_idx"
1759
+ JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid"
1760
+ WHERE "u_idx"."indrelid" = "cls"."oid"
1761
+ AND "u_idx"."indisunique" = TRUE
1762
+ AND "u_idx"."indnkeyatts" = 1
1763
+ AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[])
1764
+ LIMIT 1
1765
+ )
1766
+ )
1767
+ ) AS "unique_name",
1768
+
1769
+ "att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column
1770
+ -- For generated columns, pg_attrdef stores the generation expression (not a default),
1771
+ -- so we must null it out to avoid emitting a spurious @default(dbgenerated(...)) attribute.
1772
+ CASE
1773
+ WHEN "att"."attgenerated" != '' THEN NULL
1774
+ ELSE pg_get_expr("def"."adbin", "def"."adrelid")
1775
+ END AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()')
1776
+ "att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values
1777
+
1778
+ -- options: for enum columns, aggregates all allowed enum labels into a JSON array
1779
+ coalesce(
1780
+ (
1781
+ SELECT json_agg("enm"."enumlabel") AS "o"
1782
+ FROM "pg_catalog"."pg_enum" AS "enm"
1783
+ WHERE "enm"."enumtypid" = "typ"."oid"
1784
+ ),
1785
+ '[]'
1786
+ ) AS "options"
1787
+
1788
+ -- === FROM / JOINs for the columns subquery ===
1789
+
1790
+ -- pg_attribute: one row per table column (attnum >= 0 excludes system columns)
1791
+ FROM "pg_catalog"."pg_attribute" AS "att"
1792
+
1793
+ -- pg_type: data type of the column (e.g., int4, text, custom_enum)
1794
+ INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid"
1795
+
1796
+ -- pg_namespace for the type: needed to determine which schema the type lives in
1797
+ INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace"
1798
+
1799
+ -- information_schema.columns: provides length/precision info not easily available from pg_catalog
1800
+ LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname"
1801
+ AND "c"."table_name" = "cls"."relname"
1802
+ AND "c"."column_name" = "att"."attname"
1803
+
1804
+ -- pg_constraint (primary key): join on contype='p' to detect if column is part of PK
1805
+ LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p'
1806
+ AND "pk_con"."conrelid" = "cls"."oid"
1807
+ AND "att"."attnum" = ANY ("pk_con"."conkey")
1808
+
1809
+ -- pg_constraint (foreign key): join on contype='f' to get FK details for this column
1810
+ LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f'
1811
+ AND "fk_con"."conrelid" = "cls"."oid"
1812
+ AND "att"."attnum" = ANY ("fk_con"."conkey")
1813
+
1814
+ -- pg_class for FK target table: resolve the referenced table's OID to its name
1815
+ LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid"
1816
+
1817
+ -- pg_namespace for FK target: get the schema of the referenced table
1818
+ LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace"
1819
+
1820
+ -- pg_attribute for FK target column: resolve the referenced column number to its name.
1821
+ -- Use array_position to correlate by position: find this source column's index in conkey,
1822
+ -- then pick the referenced attnum at that same index from confkey.
1823
+ -- This ensures composite FKs correctly map each source column to its corresponding target column.
1824
+ LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid"
1825
+ AND "fk_att"."attnum" = "fk_con"."confkey"[array_position("fk_con"."conkey", "att"."attnum")]
1826
+
1827
+ -- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr()
1828
+ LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum"
1829
+
1830
+ WHERE
1831
+ "att"."attrelid" = "cls"."oid" -- only columns belonging to this table
1832
+ AND "att"."attnum" >= 0 -- exclude system columns (ctid, xmin, etc. have attnum < 0)
1833
+ AND "att"."attisdropped" != TRUE -- exclude dropped (deleted) columns
1834
+ ORDER BY "att"."attnum" -- preserve original column order
1835
+ ) AS agg
1836
+ ) AS "columns",
1837
+
1838
+ -- ===== INDEXES subquery =====
1839
+ -- Aggregates all indexes for this table into a JSON array.
1840
+ (
1841
+ SELECT coalesce(json_agg(agg), '[]')
1842
+ FROM (
1843
+ SELECT
1844
+ "idx_cls"."relname" AS "name", -- index name
1845
+ "am"."amname" AS "method", -- access method (e.g., 'btree', 'hash', 'gin', 'gist')
1846
+ "idx"."indisunique" AS "unique", -- true if unique index
1847
+ "idx"."indisprimary" AS "primary", -- true if this is the PK index
1848
+ "idx"."indisvalid" AS "valid", -- false during concurrent index builds
1849
+ "idx"."indisready" AS "ready", -- true when index is ready for inserts
1850
+ ("idx"."indpred" IS NOT NULL) AS "partial", -- true if index has a WHERE clause (partial index)
1851
+ pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", -- the WHERE clause expression for partial indexes
1852
+
1853
+ -- Index columns: iterate over each position in the index key array
1854
+ (
1855
+ SELECT json_agg(
1856
+ json_build_object(
1857
+ -- 'name': column name, or for expression indexes the expression text
1858
+ 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)),
1859
+ -- 'expression': non-null only for expression-based index columns (e.g., lower(name))
1860
+ 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END,
1861
+ -- 'order': sort direction; bit 0 of indoption = 1 means DESC
1862
+ 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END,
1863
+ -- 'nulls': null ordering; bit 1 of indoption = 1 means NULLS FIRST
1864
+ 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END
1865
+ )
1866
+ ORDER BY "s"."i" -- preserve column order within the index
1867
+ )
1868
+ -- generate_subscripts creates one row per index key position (1-based)
1869
+ FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i")
1870
+ -- Join to pg_attribute to resolve column numbers to names
1871
+ -- NULL attname means it's an expression index column
1872
+ LEFT JOIN "pg_catalog"."pg_attribute" AS "att"
1873
+ ON "att"."attrelid" = "cls"."oid"
1874
+ AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"]
1875
+ ) AS "columns"
1876
+
1877
+ FROM "pg_catalog"."pg_index" AS "idx" -- pg_index: one row per index
1878
+ JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" -- index's own pg_class entry (for the name)
1879
+ JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" -- access method catalog
1880
+ WHERE "idx"."indrelid" = "cls"."oid" -- only indexes on this table
1881
+ ORDER BY "idx_cls"."relname"
1882
+ ) AS agg
1883
+ ) AS "indexes"
1884
+
1885
+ -- === Main FROM: pg_class (tables and views) joined with pg_namespace (schemas) ===
1886
+ FROM "pg_catalog"."pg_class" AS "cls"
1887
+ INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid"
1888
+ WHERE
1889
+ "ns"."nspname" !~ '^pg_' -- exclude PostgreSQL internal schemas (pg_catalog, pg_toast, etc.)
1890
+ AND "ns"."nspname" != 'information_schema' -- exclude the information_schema
1891
+ AND "cls"."relkind" IN ('r', 'v') -- only tables ('r') and views ('v')
1892
+ AND "cls"."relname" !~ '^pg_' -- exclude system tables starting with pg_
1893
+ AND "cls"."relname" !~ '_prisma_migrations' -- exclude Prisma migration tracking table
1894
+ ORDER BY "ns"."nspname", "cls"."relname" ASC;
1895
+ `;
1896
+ function typeCastingConvert({ defaultValue, enums, val, services }) {
1897
+ const [value, type2] = val.replace(/'/g, "").split("::").map((s) => s.trim());
1898
+ switch (type2) {
1899
+ case "character varying":
1900
+ case "uuid":
1901
+ case "json":
1902
+ case "jsonb":
1903
+ case "text":
1904
+ if (value === "NULL") return null;
1905
+ return (ab) => ab.StringLiteral.setValue(value);
1906
+ case "real":
1907
+ return (ab) => ab.NumberLiteral.setValue(value);
1908
+ default: {
1909
+ const enumDef = enums.find((e) => getDbName(e, true) === type2);
1910
+ if (!enumDef) {
1911
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
1912
+ }
1913
+ const enumField = enumDef.fields.find((v) => getDbName(v) === value);
1914
+ if (!enumField) {
1915
+ throw new CliError(`Enum value ${value} not found in enum ${type2} for default value ${defaultValue}`);
1916
+ }
1917
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
1918
+ }
1919
+ }
1920
+ }
1921
+ __name(typeCastingConvert, "typeCastingConvert");
1922
+
1923
+ // src/actions/pull/provider/sqlite.ts
1924
+ var import_factory4 = require("@zenstackhq/language/factory");
1925
+ var sqlite = {
1926
+ isSupportedFeature(feature) {
1927
+ switch (feature) {
1928
+ case "Schema":
1929
+ return false;
1930
+ case "NativeEnum":
1931
+ return false;
1932
+ default:
1933
+ return false;
1934
+ }
1935
+ },
1936
+ getBuiltinType(type2) {
1937
+ const t = (type2 || "").toLowerCase().trim().replace(/\(.*\)$/, "").trim();
1938
+ const isArray = false;
1939
+ switch (t) {
1940
+ // INTEGER types (SQLite: INT, INTEGER, TINYINT, SMALLINT, MEDIUMINT, INT2, INT8)
1941
+ case "integer":
1942
+ case "int":
1943
+ case "tinyint":
1944
+ case "smallint":
1945
+ case "mediumint":
1946
+ case "int2":
1947
+ case "int8":
1948
+ return {
1949
+ type: "Int",
1950
+ isArray
1951
+ };
1952
+ // BIGINT - map to BigInt for large integers
1953
+ case "bigint":
1954
+ case "unsigned big int":
1955
+ return {
1956
+ type: "BigInt",
1957
+ isArray
1958
+ };
1959
+ // TEXT types (SQLite: CHARACTER, VARCHAR, VARYING CHARACTER, NCHAR, NATIVE CHARACTER, NVARCHAR, TEXT, CLOB)
1960
+ case "text":
1961
+ case "varchar":
1962
+ case "char":
1963
+ case "character":
1964
+ case "varying character":
1965
+ case "nchar":
1966
+ case "native character":
1967
+ case "nvarchar":
1968
+ case "clob":
1969
+ return {
1970
+ type: "String",
1971
+ isArray
1972
+ };
1973
+ // BLOB type
1974
+ case "blob":
1975
+ return {
1976
+ type: "Bytes",
1977
+ isArray
1978
+ };
1979
+ // REAL types (SQLite: REAL, DOUBLE, DOUBLE PRECISION, FLOAT)
1980
+ case "real":
1981
+ case "float":
1982
+ case "double":
1983
+ case "double precision":
1984
+ return {
1985
+ type: "Float",
1986
+ isArray
1987
+ };
1988
+ // NUMERIC types (SQLite: NUMERIC, DECIMAL)
1989
+ case "numeric":
1990
+ case "decimal":
1991
+ return {
1992
+ type: "Decimal",
1993
+ isArray
1994
+ };
1995
+ // DateTime types
1996
+ case "datetime":
1997
+ case "date":
1998
+ case "time":
1999
+ case "timestamp":
2000
+ return {
2001
+ type: "DateTime",
2002
+ isArray
2003
+ };
2004
+ // JSON types
2005
+ case "json":
2006
+ case "jsonb":
2007
+ return {
2008
+ type: "Json",
2009
+ isArray
2010
+ };
2011
+ // Boolean types
2012
+ case "boolean":
2013
+ case "bool":
2014
+ return {
2015
+ type: "Boolean",
2016
+ isArray
2017
+ };
2018
+ default: {
2019
+ if (!t) {
2020
+ return {
2021
+ type: "Bytes",
2022
+ isArray
2023
+ };
2024
+ }
2025
+ if (t.includes("int")) {
2026
+ return {
2027
+ type: "Int",
2028
+ isArray
2029
+ };
2030
+ }
2031
+ if (t.includes("char") || t.includes("clob") || t.includes("text")) {
2032
+ return {
2033
+ type: "String",
2034
+ isArray
2035
+ };
2036
+ }
2037
+ if (t.includes("blob")) {
2038
+ return {
2039
+ type: "Bytes",
2040
+ isArray
2041
+ };
2042
+ }
2043
+ if (t.includes("real") || t.includes("floa") || t.includes("doub")) {
2044
+ return {
2045
+ type: "Float",
2046
+ isArray
2047
+ };
2048
+ }
2049
+ return {
2050
+ type: "Unsupported",
2051
+ isArray
2052
+ };
2053
+ }
2054
+ }
2055
+ },
2056
+ getDefaultDatabaseType() {
2057
+ return void 0;
2058
+ },
2059
+ async introspect(connectionString, _options) {
2060
+ const SQLite2 = (await import("better-sqlite3")).default;
2061
+ const db = new SQLite2(connectionString, {
2062
+ readonly: true
2063
+ });
2064
+ try {
2065
+ const all = /* @__PURE__ */ __name((sql) => {
2066
+ const stmt = db.prepare(sql);
2067
+ return stmt.all();
2068
+ }, "all");
2069
+ const tablesRaw = all("SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name");
2070
+ const autoIncrementTables = /* @__PURE__ */ new Set();
2071
+ for (const t of tablesRaw) {
2072
+ if (t.type === "table" && t.definition) {
2073
+ if (/\bAUTOINCREMENT\b/i.test(t.definition)) {
2074
+ autoIncrementTables.add(t.name);
2075
+ }
2076
+ }
2077
+ }
2078
+ const tables = [];
2079
+ for (const t of tablesRaw) {
2080
+ const tableName = t.name;
2081
+ const schema = "";
2082
+ const hasAutoIncrement = autoIncrementTables.has(tableName);
2083
+ const columnsInfo = all(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`);
2084
+ const tableNameEsc = tableName.replace(/'/g, "''");
2085
+ const idxList = all(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith("sqlite_autoindex_"));
2086
+ const uniqueSingleColumn = /* @__PURE__ */ new Set();
2087
+ const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1);
2088
+ for (const idx of uniqueIndexRows) {
2089
+ const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
2090
+ if (idxCols.length === 1 && idxCols[0]?.name) {
2091
+ uniqueSingleColumn.add(idxCols[0].name);
2092
+ }
2093
+ }
2094
+ const indexes = idxList.map((idx) => {
2095
+ const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
2096
+ return {
2097
+ name: idx.name,
2098
+ method: null,
2099
+ unique: idx.unique === 1,
2100
+ primary: false,
2101
+ valid: true,
2102
+ ready: true,
2103
+ partial: idx.partial === 1,
2104
+ predicate: idx.partial === 1 ? "[partial]" : null,
2105
+ columns: idxCols.map((col) => ({
2106
+ name: col.name,
2107
+ expression: null,
2108
+ order: null,
2109
+ nulls: null
2110
+ }))
2111
+ };
2112
+ });
2113
+ const fkRows = all(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`);
2114
+ const fkConstraintNames = /* @__PURE__ */ new Map();
2115
+ if (t.definition) {
2116
+ const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(([^)]+)\)/gi;
2117
+ let match;
2118
+ while ((match = fkRegex.exec(t.definition)) !== null) {
2119
+ const constraintName = match[1] || match[2];
2120
+ const columnList = match[3];
2121
+ if (constraintName && columnList) {
2122
+ const columns2 = columnList.split(",").map((col) => col.trim().replace(/^["'`]|["'`]$/g, ""));
2123
+ for (const col of columns2) {
2124
+ if (col) {
2125
+ fkConstraintNames.set(col, constraintName);
2126
+ }
2127
+ }
2128
+ }
2129
+ }
2130
+ }
2131
+ const fkByColumn = /* @__PURE__ */ new Map();
2132
+ for (const fk of fkRows) {
2133
+ fkByColumn.set(fk.from, {
2134
+ foreign_key_schema: "",
2135
+ foreign_key_table: fk.table || null,
2136
+ foreign_key_column: fk.to || null,
2137
+ foreign_key_name: fkConstraintNames.get(fk.from) ?? null,
2138
+ foreign_key_on_update: fk.on_update ?? null,
2139
+ foreign_key_on_delete: fk.on_delete ?? null
2140
+ });
2141
+ }
2142
+ const generatedColDefs = t.definition ? extractColumnTypeDefs(t.definition) : /* @__PURE__ */ new Map();
2143
+ const columns = [];
2144
+ for (const c of columnsInfo) {
2145
+ const hidden = c.hidden ?? 0;
2146
+ if (hidden === 1) continue;
2147
+ const isGenerated = hidden === 2 || hidden === 3;
2148
+ const fk = fkByColumn.get(c.name);
2149
+ let defaultValue = c.dflt_value;
2150
+ if (hasAutoIncrement && c.pk) {
2151
+ defaultValue = "autoincrement";
2152
+ }
2153
+ let datatype = c.type || "";
2154
+ if (isGenerated) {
2155
+ const fullDef = generatedColDefs.get(c.name);
2156
+ if (fullDef) {
2157
+ datatype = fullDef;
2158
+ }
2159
+ }
2160
+ columns.push({
2161
+ name: c.name,
2162
+ datatype,
2163
+ datatype_name: null,
2164
+ length: null,
2165
+ precision: null,
2166
+ datatype_schema: schema,
2167
+ foreign_key_schema: fk?.foreign_key_schema ?? null,
2168
+ foreign_key_table: fk?.foreign_key_table ?? null,
2169
+ foreign_key_column: fk?.foreign_key_column ?? null,
2170
+ foreign_key_name: fk?.foreign_key_name ?? null,
2171
+ foreign_key_on_update: fk?.foreign_key_on_update ?? null,
2172
+ foreign_key_on_delete: fk?.foreign_key_on_delete ?? null,
2173
+ pk: !!c.pk,
2174
+ computed: isGenerated,
2175
+ nullable: c.notnull !== 1,
2176
+ default: defaultValue,
2177
+ unique: uniqueSingleColumn.has(c.name),
2178
+ unique_name: null
2179
+ });
2180
+ }
2181
+ tables.push({
2182
+ schema,
2183
+ name: tableName,
2184
+ columns,
2185
+ type: t.type,
2186
+ definition: t.definition,
2187
+ indexes
2188
+ });
2189
+ }
2190
+ const enums = [];
2191
+ return {
2192
+ tables,
2193
+ enums
2194
+ };
2195
+ } finally {
2196
+ db.close();
2197
+ }
2198
+ },
2199
+ getDefaultValue({ defaultValue, fieldType, services, enums }) {
2200
+ const val = defaultValue.trim();
2201
+ switch (fieldType) {
2202
+ case "DateTime":
2203
+ if (val === "CURRENT_TIMESTAMP" || val === "now()") {
2204
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
2205
+ }
2206
+ return (ab) => ab.StringLiteral.setValue(val);
2207
+ case "Int":
2208
+ case "BigInt":
2209
+ if (val === "autoincrement") {
2210
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
2211
+ }
2212
+ return (ab) => ab.NumberLiteral.setValue(val);
2213
+ case "Float":
2214
+ return normalizeFloatDefault(val);
2215
+ case "Decimal":
2216
+ return normalizeDecimalDefault(val);
2217
+ case "Boolean":
2218
+ return (ab) => ab.BooleanLiteral.setValue(val === "true" || val === "1");
2219
+ case "String":
2220
+ if (val.startsWith("'") && val.endsWith("'")) {
2221
+ const strippedName = val.slice(1, -1);
2222
+ const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName));
2223
+ if (enumDef) {
2224
+ const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName);
2225
+ if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField);
2226
+ }
2227
+ return (ab) => ab.StringLiteral.setValue(strippedName);
2228
+ }
2229
+ return (ab) => ab.StringLiteral.setValue(val);
2230
+ }
2231
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
2232
+ return null;
2233
+ },
2234
+ getFieldAttributes({ fieldName, fieldType, services }) {
2235
+ const factories = [];
2236
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
2237
+ factories.push(new import_factory4.DataFieldAttributeFactory().setDecl(getAttributeRef("@updatedAt", services)));
2238
+ }
2239
+ return factories;
2240
+ }
2241
+ };
2242
+ function extractColumnTypeDefs(ddl) {
2243
+ const openIdx = ddl.indexOf("(");
2244
+ if (openIdx === -1) return /* @__PURE__ */ new Map();
2245
+ let depth = 1;
2246
+ let closeIdx = -1;
2247
+ for (let i = openIdx + 1; i < ddl.length; i++) {
2248
+ if (ddl[i] === "(") depth++;
2249
+ else if (ddl[i] === ")") {
2250
+ depth--;
2251
+ if (depth === 0) {
2252
+ closeIdx = i;
2253
+ break;
2254
+ }
2255
+ }
2256
+ }
2257
+ if (closeIdx === -1) return /* @__PURE__ */ new Map();
2258
+ const content = ddl.substring(openIdx + 1, closeIdx);
2259
+ const defs = [];
2260
+ let current = "";
2261
+ depth = 0;
2262
+ for (const char of content) {
2263
+ if (char === "(") depth++;
2264
+ else if (char === ")") depth--;
2265
+ else if (char === "," && depth === 0) {
2266
+ defs.push(current.trim());
2267
+ current = "";
2268
+ continue;
2269
+ }
2270
+ current += char;
2271
+ }
2272
+ if (current.trim()) defs.push(current.trim());
2273
+ const result = /* @__PURE__ */ new Map();
2274
+ for (const def of defs) {
2275
+ const nameMatch = def.match(/^(?:["'`]([^"'`]+)["'`]|(\w+))\s+(.+)/s);
2276
+ if (nameMatch) {
2277
+ const name = nameMatch[1] || nameMatch[2];
2278
+ const typeDef = nameMatch[3];
2279
+ if (name && typeDef) {
2280
+ result.set(name, typeDef.trim());
2281
+ }
2282
+ }
2283
+ }
2284
+ return result;
2285
+ }
2286
+ __name(extractColumnTypeDefs, "extractColumnTypeDefs");
2287
+
2288
+ // src/actions/pull/provider/index.ts
2289
+ var providers = {
2290
+ mysql,
2291
+ postgresql,
2292
+ sqlite
2293
+ };
2294
+
251
2295
  // src/actions/db.ts
252
2296
  async function run2(command, options) {
253
2297
  switch (command) {
254
2298
  case "push":
255
2299
  await runPush(options);
256
2300
  break;
2301
+ case "pull":
2302
+ await runPull(options);
2303
+ break;
257
2304
  }
258
2305
  }
259
2306
  __name(run2, "run");
@@ -281,37 +2328,442 @@ async function runPush(options) {
281
2328
  }
282
2329
  }
283
2330
  __name(runPush, "runPush");
2331
+ async function runPull(options) {
2332
+ const spinner = (0, import_ora.default)();
2333
+ try {
2334
+ const schemaFile = getSchemaFile(options.schema);
2335
+ const outPath = options.output ? import_node_path2.default.resolve(options.output) : void 0;
2336
+ const treatAsFile = !!outPath && (import_node_fs2.default.existsSync(outPath) && import_node_fs2.default.lstatSync(outPath).isFile() || import_node_path2.default.extname(outPath) !== "");
2337
+ const { model, services } = await loadSchemaDocument(schemaFile, {
2338
+ returnServices: true,
2339
+ mergeImports: treatAsFile
2340
+ });
2341
+ const SUPPORTED_PROVIDERS = Object.keys(providers);
2342
+ const datasource = getDatasource(model);
2343
+ if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) {
2344
+ throw new CliError(`Unsupported datasource provider: ${datasource.provider}`);
2345
+ }
2346
+ const provider = providers[datasource.provider];
2347
+ if (!provider) {
2348
+ throw new CliError(`No introspection provider found for: ${datasource.provider}`);
2349
+ }
2350
+ spinner.start("Introspecting database...");
2351
+ const { enums, tables } = await provider.introspect(datasource.url, {
2352
+ schemas: datasource.allSchemas,
2353
+ modelCasing: options.modelCasing
2354
+ });
2355
+ spinner.succeed("Database introspected");
2356
+ console.log(import_colors4.default.blue("Syncing schema..."));
2357
+ const newModel = {
2358
+ $type: "Model",
2359
+ $container: void 0,
2360
+ $containerProperty: void 0,
2361
+ $containerIndex: void 0,
2362
+ declarations: [
2363
+ ...model.declarations.filter((d) => [
2364
+ "DataSource"
2365
+ ].includes(d.$type))
2366
+ ],
2367
+ imports: model.imports
2368
+ };
2369
+ syncEnums({
2370
+ dbEnums: enums,
2371
+ model: newModel,
2372
+ services,
2373
+ options,
2374
+ defaultSchema: datasource.defaultSchema,
2375
+ oldModel: model,
2376
+ provider
2377
+ });
2378
+ const resolvedRelations = [];
2379
+ for (const table of tables) {
2380
+ const relations = syncTable({
2381
+ table,
2382
+ model: newModel,
2383
+ provider,
2384
+ services,
2385
+ options,
2386
+ defaultSchema: datasource.defaultSchema,
2387
+ oldModel: model
2388
+ });
2389
+ resolvedRelations.push(...relations);
2390
+ }
2391
+ for (const relation of resolvedRelations) {
2392
+ const similarRelations = resolvedRelations.filter((rr) => {
2393
+ return rr !== relation && (rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table || rr.schema === relation.references.schema && rr.columns[0] === relation.references.columns[0] && rr.references.schema === relation.schema && rr.references.table === relation.table);
2394
+ }).length;
2395
+ const selfRelation = relation.references.schema === relation.schema && relation.references.table === relation.table;
2396
+ syncRelation({
2397
+ model: newModel,
2398
+ relation,
2399
+ services,
2400
+ options,
2401
+ selfRelation,
2402
+ similarRelations
2403
+ });
2404
+ }
2405
+ console.log(import_colors4.default.blue("Schema synced"));
2406
+ const baseDir = import_node_path2.default.dirname(import_node_path2.default.resolve(schemaFile));
2407
+ const baseDirUrlPath = new URL(`file://${baseDir}`).pathname;
2408
+ const docs = services.shared.workspace.LangiumDocuments.all.filter(({ uri }) => uri.path.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())).toArray();
2409
+ const docsSet = new Set(docs.map((d) => d.uri.toString()));
2410
+ console.log(import_colors4.default.bold("\nApplying changes to ZModel..."));
2411
+ const deletedModels = [];
2412
+ const deletedEnums = [];
2413
+ const addedModels = [];
2414
+ const addedEnums = [];
2415
+ const modelChanges = /* @__PURE__ */ new Map();
2416
+ const getModelChanges = /* @__PURE__ */ __name((modelName) => {
2417
+ if (!modelChanges.has(modelName)) {
2418
+ modelChanges.set(modelName, {
2419
+ addedFields: [],
2420
+ deletedFields: [],
2421
+ updatedFields: [],
2422
+ addedAttributes: [],
2423
+ deletedAttributes: [],
2424
+ updatedAttributes: []
2425
+ });
2426
+ }
2427
+ return modelChanges.get(modelName);
2428
+ }, "getModelChanges");
2429
+ services.shared.workspace.IndexManager.allElements("DataModel", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
2430
+ const model2 = decl.node.$container;
2431
+ const index = model2.declarations.findIndex((d) => d === decl.node);
2432
+ model2.declarations.splice(index, 1);
2433
+ deletedModels.push(import_colors4.default.red(`- Model ${decl.name} deleted`));
2434
+ });
2435
+ if (provider.isSupportedFeature("NativeEnum")) services.shared.workspace.IndexManager.allElements("Enum", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
2436
+ const model2 = decl.node.$container;
2437
+ const index = model2.declarations.findIndex((d) => d === decl.node);
2438
+ model2.declarations.splice(index, 1);
2439
+ deletedEnums.push(import_colors4.default.red(`- Enum ${decl.name} deleted`));
2440
+ });
2441
+ newModel.declarations.filter((d) => [
2442
+ import_ast4.DataModel,
2443
+ import_ast4.Enum
2444
+ ].includes(d.$type)).forEach((_declaration) => {
2445
+ const newDataModel = _declaration;
2446
+ const declarations = services.shared.workspace.IndexManager.allElements(newDataModel.$type, docsSet).toArray();
2447
+ const originalDataModel = declarations.find((d) => getDbName(d.node) === getDbName(newDataModel))?.node;
2448
+ if (!originalDataModel) {
2449
+ if (newDataModel.$type === "DataModel") {
2450
+ addedModels.push(import_colors4.default.green(`+ Model ${newDataModel.name} added`));
2451
+ } else if (newDataModel.$type === "Enum") {
2452
+ addedEnums.push(import_colors4.default.green(`+ Enum ${newDataModel.name} added`));
2453
+ }
2454
+ model.declarations.push(newDataModel);
2455
+ newDataModel.$container = model;
2456
+ newDataModel.fields.forEach((f) => {
2457
+ if (f.$type === "DataField" && f.type.reference?.ref) {
2458
+ const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
2459
+ if (ref && f.type.reference) {
2460
+ f.type.reference = {
2461
+ ref,
2462
+ $refText: ref.name ?? f.type.reference.$refText
2463
+ };
2464
+ }
2465
+ }
2466
+ });
2467
+ return;
2468
+ }
2469
+ newDataModel.fields.forEach((f) => {
2470
+ let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f));
2471
+ const isRelationField = f.$type === "DataField" && !!f.attributes?.some((a) => a?.decl?.ref?.name === "@relation");
2472
+ if (originalFields.length === 0 && isRelationField && !getRelationFieldsKey(f)) {
2473
+ return;
2474
+ }
2475
+ if (originalFields.length === 0) {
2476
+ const newFieldsKey = getRelationFieldsKey(f);
2477
+ if (newFieldsKey) {
2478
+ originalFields = originalDataModel.fields.filter((d) => getRelationFieldsKey(d) === newFieldsKey);
2479
+ }
2480
+ }
2481
+ if (originalFields.length === 0) {
2482
+ originalFields = originalDataModel.fields.filter((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
2483
+ }
2484
+ if (originalFields.length === 0) {
2485
+ originalFields = originalDataModel.fields.filter((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
2486
+ }
2487
+ if (originalFields.length > 1) {
2488
+ const isBackReferenceField = !getRelationFieldsKey(f);
2489
+ if (!isBackReferenceField) {
2490
+ console.warn(import_colors4.default.yellow(`Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(", ")}](${f.name})`));
2491
+ }
2492
+ return;
2493
+ }
2494
+ const originalField = originalFields.at(0);
2495
+ if (originalField && f.$type === "DataField" && originalField.$type === "DataField") {
2496
+ const newType = f.type;
2497
+ const oldType = originalField.type;
2498
+ const fieldUpdates = [];
2499
+ const isOldTypeEnumWithoutNativeSupport = oldType.reference?.ref?.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
2500
+ if (newType.type && oldType.type !== newType.type && !isOldTypeEnumWithoutNativeSupport) {
2501
+ fieldUpdates.push(`type: ${oldType.type} -> ${newType.type}`);
2502
+ oldType.type = newType.type;
2503
+ }
2504
+ if (newType.reference?.ref && oldType.reference?.ref) {
2505
+ const newRefName = getDbName(newType.reference.ref);
2506
+ const oldRefName = getDbName(oldType.reference.ref);
2507
+ if (newRefName !== oldRefName) {
2508
+ fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`);
2509
+ oldType.reference = {
2510
+ ref: newType.reference.ref,
2511
+ $refText: newType.reference.$refText
2512
+ };
2513
+ }
2514
+ } else if (newType.reference?.ref && !oldType.reference) {
2515
+ fieldUpdates.push(`type: ${oldType.type} -> ${newType.reference.$refText}`);
2516
+ oldType.reference = newType.reference;
2517
+ oldType.type = void 0;
2518
+ } else if (!newType.reference && oldType.reference?.ref && newType.type) {
2519
+ const isEnumWithoutNativeSupport = oldType.reference.ref.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
2520
+ if (!isEnumWithoutNativeSupport) {
2521
+ fieldUpdates.push(`type: ${oldType.reference.$refText} -> ${newType.type}`);
2522
+ oldType.type = newType.type;
2523
+ oldType.reference = void 0;
2524
+ }
2525
+ }
2526
+ if (!!newType.optional !== !!oldType.optional) {
2527
+ fieldUpdates.push(`optional: ${!!oldType.optional} -> ${!!newType.optional}`);
2528
+ oldType.optional = newType.optional;
2529
+ }
2530
+ if (!!newType.array !== !!oldType.array) {
2531
+ fieldUpdates.push(`array: ${!!oldType.array} -> ${!!newType.array}`);
2532
+ oldType.array = newType.array;
2533
+ }
2534
+ if (fieldUpdates.length > 0) {
2535
+ getModelChanges(originalDataModel.name).updatedFields.push(import_colors4.default.yellow(`~ ${originalField.name} (${fieldUpdates.join(", ")})`));
2536
+ }
2537
+ const newDefaultAttr = f.attributes.find((a) => a.decl.$refText === "@default");
2538
+ const oldDefaultAttr = originalField.attributes.find((a) => a.decl.$refText === "@default");
2539
+ if (newDefaultAttr && oldDefaultAttr) {
2540
+ const serializeArgs = /* @__PURE__ */ __name((args) => args.map((arg) => {
2541
+ if (arg.value?.$type === "StringLiteral") return `"${arg.value.value}"`;
2542
+ if (arg.value?.$type === "NumberLiteral") return String(arg.value.value);
2543
+ if (arg.value?.$type === "BooleanLiteral") return String(arg.value.value);
2544
+ if (arg.value?.$type === "InvocationExpr") return arg.value.function?.$refText ?? "";
2545
+ if (arg.value?.$type === "ReferenceExpr") return arg.value.target?.$refText ?? "";
2546
+ if (arg.value?.$type === "ArrayExpr") {
2547
+ return `[${(arg.value.items ?? []).map((item) => {
2548
+ if (item.$type === "ReferenceExpr") return item.target?.$refText ?? "";
2549
+ return item.$type ?? "unknown";
2550
+ }).join(",")}]`;
2551
+ }
2552
+ return arg.value?.$type ?? "unknown";
2553
+ }).join(","), "serializeArgs");
2554
+ const newArgsStr = serializeArgs(newDefaultAttr.args ?? []);
2555
+ const oldArgsStr = serializeArgs(oldDefaultAttr.args ?? []);
2556
+ if (newArgsStr !== oldArgsStr) {
2557
+ oldDefaultAttr.args = newDefaultAttr.args.map((arg) => ({
2558
+ ...arg,
2559
+ $container: oldDefaultAttr
2560
+ }));
2561
+ getModelChanges(originalDataModel.name).updatedAttributes.push(import_colors4.default.yellow(`~ @default on ${originalDataModel.name}.${originalField.name}`));
2562
+ }
2563
+ }
2564
+ }
2565
+ if (!originalField) {
2566
+ getModelChanges(originalDataModel.name).addedFields.push(import_colors4.default.green(`+ ${f.name}`));
2567
+ f.$container = originalDataModel;
2568
+ originalDataModel.fields.push(f);
2569
+ if (f.$type === "DataField" && f.type.reference?.ref) {
2570
+ const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
2571
+ if (ref) {
2572
+ f.type.reference = {
2573
+ ref,
2574
+ $refText: ref.name ?? f.type.reference.$refText
2575
+ };
2576
+ }
2577
+ }
2578
+ return;
2579
+ }
2580
+ originalField.attributes.filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && ![
2581
+ "@map",
2582
+ "@@map",
2583
+ "@default",
2584
+ "@updatedAt"
2585
+ ].includes(attr.decl.$refText)).forEach((attr) => {
2586
+ const field = attr.$container;
2587
+ const index = field.attributes.findIndex((d) => d === attr);
2588
+ field.attributes.splice(index, 1);
2589
+ getModelChanges(originalDataModel.name).deletedAttributes.push(import_colors4.default.yellow(`- ${attr.decl.$refText} from field: ${originalDataModel.name}.${field.name}`));
2590
+ });
2591
+ f.attributes.filter((attr) => !originalField.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && ![
2592
+ "@map",
2593
+ "@@map",
2594
+ "@default",
2595
+ "@updatedAt"
2596
+ ].includes(attr.decl.$refText)).forEach((attr) => {
2597
+ const cloned = {
2598
+ ...attr,
2599
+ $container: originalField
2600
+ };
2601
+ originalField.attributes.push(cloned);
2602
+ getModelChanges(originalDataModel.name).addedAttributes.push(import_colors4.default.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`));
2603
+ });
2604
+ });
2605
+ originalDataModel.fields.filter((f) => {
2606
+ const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f));
2607
+ if (matchByDbName) return false;
2608
+ const originalFieldsKey = getRelationFieldsKey(f);
2609
+ if (originalFieldsKey) {
2610
+ const matchByFieldsKey = newDataModel.fields.find((d) => getRelationFieldsKey(d) === originalFieldsKey);
2611
+ if (matchByFieldsKey) return false;
2612
+ }
2613
+ const matchByFkName = newDataModel.fields.find((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
2614
+ if (matchByFkName) return false;
2615
+ const matchByTypeRef = newDataModel.fields.find((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
2616
+ return !matchByTypeRef;
2617
+ }).forEach((f) => {
2618
+ const _model = f.$container;
2619
+ const index = _model.fields.findIndex((d) => d === f);
2620
+ _model.fields.splice(index, 1);
2621
+ getModelChanges(_model.name).deletedFields.push(import_colors4.default.red(`- ${f.name}`));
2622
+ });
2623
+ });
2624
+ if (deletedModels.length > 0) {
2625
+ console.log(import_colors4.default.bold("\nDeleted Models:"));
2626
+ deletedModels.forEach((msg) => {
2627
+ console.log(msg);
2628
+ });
2629
+ }
2630
+ if (deletedEnums.length > 0) {
2631
+ console.log(import_colors4.default.bold("\nDeleted Enums:"));
2632
+ deletedEnums.forEach((msg) => {
2633
+ console.log(msg);
2634
+ });
2635
+ }
2636
+ if (addedModels.length > 0) {
2637
+ console.log(import_colors4.default.bold("\nAdded Models:"));
2638
+ addedModels.forEach((msg) => {
2639
+ console.log(msg);
2640
+ });
2641
+ }
2642
+ if (addedEnums.length > 0) {
2643
+ console.log(import_colors4.default.bold("\nAdded Enums:"));
2644
+ addedEnums.forEach((msg) => {
2645
+ console.log(msg);
2646
+ });
2647
+ }
2648
+ if (modelChanges.size > 0) {
2649
+ console.log(import_colors4.default.bold("\nModel Changes:"));
2650
+ modelChanges.forEach((changes, modelName) => {
2651
+ const hasChanges = changes.addedFields.length > 0 || changes.deletedFields.length > 0 || changes.updatedFields.length > 0 || changes.addedAttributes.length > 0 || changes.deletedAttributes.length > 0 || changes.updatedAttributes.length > 0;
2652
+ if (hasChanges) {
2653
+ console.log(import_colors4.default.cyan(` ${modelName}:`));
2654
+ if (changes.addedFields.length > 0) {
2655
+ console.log(import_colors4.default.gray(" Added Fields:"));
2656
+ changes.addedFields.forEach((msg) => {
2657
+ console.log(` ${msg}`);
2658
+ });
2659
+ }
2660
+ if (changes.deletedFields.length > 0) {
2661
+ console.log(import_colors4.default.gray(" Deleted Fields:"));
2662
+ changes.deletedFields.forEach((msg) => {
2663
+ console.log(` ${msg}`);
2664
+ });
2665
+ }
2666
+ if (changes.updatedFields.length > 0) {
2667
+ console.log(import_colors4.default.gray(" Updated Fields:"));
2668
+ changes.updatedFields.forEach((msg) => {
2669
+ console.log(` ${msg}`);
2670
+ });
2671
+ }
2672
+ if (changes.addedAttributes.length > 0) {
2673
+ console.log(import_colors4.default.gray(" Added Attributes:"));
2674
+ changes.addedAttributes.forEach((msg) => {
2675
+ console.log(` ${msg}`);
2676
+ });
2677
+ }
2678
+ if (changes.deletedAttributes.length > 0) {
2679
+ console.log(import_colors4.default.gray(" Deleted Attributes:"));
2680
+ changes.deletedAttributes.forEach((msg) => {
2681
+ console.log(` ${msg}`);
2682
+ });
2683
+ }
2684
+ if (changes.updatedAttributes.length > 0) {
2685
+ console.log(import_colors4.default.gray(" Updated Attributes:"));
2686
+ changes.updatedAttributes.forEach((msg) => {
2687
+ console.log(` ${msg}`);
2688
+ });
2689
+ }
2690
+ }
2691
+ });
2692
+ }
2693
+ const generator = new import_language2.ZModelCodeGenerator({
2694
+ quote: options.quote,
2695
+ indent: options.indent
2696
+ });
2697
+ if (options.output) {
2698
+ if (treatAsFile) {
2699
+ const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(newModel));
2700
+ console.log(import_colors4.default.blue(`Writing to ${outPath}`));
2701
+ import_node_fs2.default.mkdirSync(import_node_path2.default.dirname(outPath), {
2702
+ recursive: true
2703
+ });
2704
+ import_node_fs2.default.writeFileSync(outPath, zmodelSchema);
2705
+ } else {
2706
+ import_node_fs2.default.mkdirSync(outPath, {
2707
+ recursive: true
2708
+ });
2709
+ const baseDir2 = import_node_path2.default.dirname(import_node_path2.default.resolve(schemaFile));
2710
+ for (const { uri, parseResult: { value: documentModel } } of docs) {
2711
+ const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(documentModel));
2712
+ const relPath = import_node_path2.default.relative(baseDir2, uri.fsPath);
2713
+ const targetFile = import_node_path2.default.join(outPath, relPath);
2714
+ import_node_fs2.default.mkdirSync(import_node_path2.default.dirname(targetFile), {
2715
+ recursive: true
2716
+ });
2717
+ console.log(import_colors4.default.blue(`Writing to ${targetFile}`));
2718
+ import_node_fs2.default.writeFileSync(targetFile, zmodelSchema);
2719
+ }
2720
+ }
2721
+ } else {
2722
+ for (const { uri, parseResult: { value: documentModel } } of docs) {
2723
+ const zmodelSchema = await (0, import_language2.formatDocument)(generator.generate(documentModel));
2724
+ console.log(import_colors4.default.blue(`Writing to ${import_node_path2.default.relative(process.cwd(), uri.fsPath).replace(/\\/g, "/")}`));
2725
+ import_node_fs2.default.writeFileSync(uri.fsPath, zmodelSchema);
2726
+ }
2727
+ }
2728
+ console.log(import_colors4.default.green.bold("\nPull completed successfully!"));
2729
+ } catch (error) {
2730
+ spinner.fail("Pull failed");
2731
+ console.error(error);
2732
+ throw error;
2733
+ }
2734
+ }
2735
+ __name(runPull, "runPull");
284
2736
 
285
2737
  // src/actions/format.ts
286
- var import_language2 = require("@zenstackhq/language");
287
- var import_colors3 = __toESM(require("colors"), 1);
2738
+ var import_language3 = require("@zenstackhq/language");
2739
+ var import_colors5 = __toESM(require("colors"), 1);
288
2740
  var import_node_fs3 = __toESM(require("fs"), 1);
289
2741
  async function run3(options) {
290
2742
  const schemaFile = getSchemaFile(options.schema);
291
2743
  let formattedContent;
292
2744
  try {
293
- formattedContent = await (0, import_language2.formatDocument)(import_node_fs3.default.readFileSync(schemaFile, "utf-8"));
2745
+ formattedContent = await (0, import_language3.formatDocument)(import_node_fs3.default.readFileSync(schemaFile, "utf-8"));
294
2746
  } catch (error) {
295
- console.error(import_colors3.default.red("\u2717 Schema formatting failed."));
2747
+ console.error(import_colors5.default.red("\u2717 Schema formatting failed."));
296
2748
  throw error;
297
2749
  }
298
2750
  import_node_fs3.default.writeFileSync(schemaFile, formattedContent, "utf-8");
299
- console.log(import_colors3.default.green("\u2713 Schema formatting completed successfully."));
2751
+ console.log(import_colors5.default.green("\u2713 Schema formatting completed successfully."));
300
2752
  }
301
2753
  __name(run3, "run");
302
2754
 
303
2755
  // src/actions/generate.ts
304
- var import_common_helpers = require("@zenstackhq/common-helpers");
305
- var import_language3 = require("@zenstackhq/language");
306
- var import_ast2 = require("@zenstackhq/language/ast");
307
- var import_utils = require("@zenstackhq/language/utils");
308
- var import_colors4 = __toESM(require("colors"), 1);
2756
+ var import_common_helpers2 = require("@zenstackhq/common-helpers");
2757
+ var import_language4 = require("@zenstackhq/language");
2758
+ var import_ast5 = require("@zenstackhq/language/ast");
2759
+ var import_utils7 = require("@zenstackhq/language/utils");
2760
+ var import_colors6 = __toESM(require("colors"), 1);
309
2761
  var import_jiti = require("jiti");
310
2762
  var import_node_fs6 = __toESM(require("fs"), 1);
311
- var import_node_path4 = __toESM(require("path"), 1);
2763
+ var import_node_path5 = __toESM(require("path"), 1);
312
2764
  var import_node_url = require("url");
313
2765
  var import_chokidar = require("chokidar");
314
- var import_ora = __toESM(require("ora"), 1);
2766
+ var import_ora2 = __toESM(require("ora"), 1);
315
2767
 
316
2768
  // src/plugins/index.ts
317
2769
  var plugins_exports = {};
@@ -323,16 +2775,16 @@ __export(plugins_exports, {
323
2775
  // src/plugins/prisma.ts
324
2776
  var import_sdk2 = require("@zenstackhq/sdk");
325
2777
  var import_node_fs4 = __toESM(require("fs"), 1);
326
- var import_node_path2 = __toESM(require("path"), 1);
2778
+ var import_node_path3 = __toESM(require("path"), 1);
327
2779
  var plugin = {
328
2780
  name: "Prisma Schema Generator",
329
2781
  statusText: "Generating Prisma schema",
330
2782
  async generate({ model, defaultOutputPath, pluginOptions }) {
331
- let outFile = import_node_path2.default.join(defaultOutputPath, "schema.prisma");
2783
+ let outFile = import_node_path3.default.join(defaultOutputPath, "schema.prisma");
332
2784
  if (typeof pluginOptions["output"] === "string") {
333
- outFile = import_node_path2.default.resolve(defaultOutputPath, pluginOptions["output"]);
334
- if (!import_node_fs4.default.existsSync(import_node_path2.default.dirname(outFile))) {
335
- import_node_fs4.default.mkdirSync(import_node_path2.default.dirname(outFile), {
2785
+ outFile = import_node_path3.default.resolve(defaultOutputPath, pluginOptions["output"]);
2786
+ if (!import_node_fs4.default.existsSync(import_node_path3.default.dirname(outFile))) {
2787
+ import_node_fs4.default.mkdirSync(import_node_path3.default.dirname(outFile), {
336
2788
  recursive: true
337
2789
  });
338
2790
  }
@@ -346,14 +2798,14 @@ var prisma_default = plugin;
346
2798
  // src/plugins/typescript.ts
347
2799
  var import_sdk3 = require("@zenstackhq/sdk");
348
2800
  var import_node_fs5 = __toESM(require("fs"), 1);
349
- var import_node_path3 = __toESM(require("path"), 1);
2801
+ var import_node_path4 = __toESM(require("path"), 1);
350
2802
  var plugin2 = {
351
2803
  name: "TypeScript Schema Generator",
352
2804
  statusText: "Generating TypeScript schema",
353
2805
  async generate({ model, defaultOutputPath, pluginOptions }) {
354
2806
  let outDir = defaultOutputPath;
355
2807
  if (typeof pluginOptions["output"] === "string") {
356
- outDir = import_node_path3.default.resolve(defaultOutputPath, pluginOptions["output"]);
2808
+ outDir = import_node_path4.default.resolve(defaultOutputPath, pluginOptions["output"]);
357
2809
  if (!import_node_fs5.default.existsSync(outDir)) {
358
2810
  import_node_fs5.default.mkdirSync(outDir, {
359
2811
  recursive: true
@@ -382,10 +2834,10 @@ async function run4(options) {
382
2834
  if (options.watch) {
383
2835
  const logsEnabled = !options.silent;
384
2836
  if (logsEnabled) {
385
- console.log(import_colors4.default.green(`
2837
+ console.log(import_colors6.default.green(`
386
2838
  Enabled watch mode!`));
387
2839
  }
388
- const schemaExtensions = import_language3.ZModelLanguageMetaData.fileExtensions;
2840
+ const schemaExtensions = import_language4.ZModelLanguageMetaData.fileExtensions;
389
2841
  const getRootModelWatchPaths = /* @__PURE__ */ __name((model2) => new Set(model2.declarations.filter((v) => v.$cstNode?.parent?.element.$type === "Model" && !!v.$cstNode.parent.element.$document?.uri?.fsPath).map((v) => v.$cstNode.parent.element.$document.uri.fsPath)), "getRootModelWatchPaths");
390
2842
  const watchedPaths = getRootModelWatchPaths(model);
391
2843
  if (logsEnabled) {
@@ -403,7 +2855,7 @@ ${logPaths}`);
403
2855
  ignorePermissionErrors: true,
404
2856
  ignored: /* @__PURE__ */ __name((at) => !schemaExtensions.some((ext) => at.endsWith(ext)), "ignored")
405
2857
  });
406
- const reGenerateSchema = (0, import_common_helpers.singleDebounce)(async () => {
2858
+ const reGenerateSchema = (0, import_common_helpers2.singleDebounce)(async () => {
407
2859
  if (logsEnabled) {
408
2860
  console.log("Got changes, run generation!");
409
2861
  }
@@ -459,14 +2911,14 @@ async function pureGenerate(options, fromWatch) {
459
2911
  const outputPath = getOutputPath(options, schemaFile);
460
2912
  await runPlugins(schemaFile, model, outputPath, options);
461
2913
  if (!options.silent) {
462
- console.log(import_colors4.default.green(`Generation completed successfully in ${Date.now() - start}ms.
2914
+ console.log(import_colors6.default.green(`Generation completed successfully in ${Date.now() - start}ms.
463
2915
  `));
464
2916
  if (!fromWatch) {
465
2917
  console.log(`You can now create a ZenStack client with it.
466
2918
 
467
2919
  \`\`\`ts
468
2920
  import { ZenStackClient } from '@zenstackhq/orm';
469
- import { schema } from '${import_node_path4.default.relative(".", outputPath)}/schema';
2921
+ import { schema } from '${import_node_path5.default.relative(".", outputPath)}/schema';
470
2922
 
471
2923
  const client = new ZenStackClient(schema, {
472
2924
  dialect: { ... }
@@ -480,7 +2932,7 @@ Check documentation: https://zenstack.dev/docs/`);
480
2932
  }
481
2933
  __name(pureGenerate, "pureGenerate");
482
2934
  async function runPlugins(schemaFile, model, outputPath, options) {
483
- const plugins = model.declarations.filter(import_ast2.isPlugin);
2935
+ const plugins = model.declarations.filter(import_ast5.isPlugin);
484
2936
  const processedPlugins = [];
485
2937
  for (const plugin3 of plugins) {
486
2938
  const provider = getPluginProvider(plugin3);
@@ -491,7 +2943,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
491
2943
  throw new CliError(`Unknown core plugin: ${provider}`);
492
2944
  }
493
2945
  } else {
494
- cliPlugin = await loadPluginModule(provider, import_node_path4.default.dirname(schemaFile));
2946
+ cliPlugin = await loadPluginModule(provider, import_node_path5.default.dirname(schemaFile));
495
2947
  }
496
2948
  if (cliPlugin) {
497
2949
  const pluginOptions = getPluginOptions(plugin3);
@@ -527,10 +2979,10 @@ async function runPlugins(schemaFile, model, outputPath, options) {
527
2979
  }
528
2980
  });
529
2981
  for (const { cliPlugin, pluginOptions } of processedPlugins) {
530
- (0, import_common_helpers.invariant)(typeof cliPlugin.generate === "function", `Plugin ${cliPlugin.name} does not have a generate function`);
2982
+ (0, import_common_helpers2.invariant)(typeof cliPlugin.generate === "function", `Plugin ${cliPlugin.name} does not have a generate function`);
531
2983
  let spinner;
532
2984
  if (!options.silent) {
533
- spinner = (0, import_ora.default)(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
2985
+ spinner = (0, import_ora2.default)(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
534
2986
  }
535
2987
  try {
536
2988
  await cliPlugin.generate({
@@ -549,7 +3001,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
549
3001
  __name(runPlugins, "runPlugins");
550
3002
  function getPluginProvider(plugin3) {
551
3003
  const providerField = plugin3.fields.find((f) => f.name === "provider");
552
- (0, import_common_helpers.invariant)(providerField, `Plugin ${plugin3.name} does not have a provider field`);
3004
+ (0, import_common_helpers2.invariant)(providerField, `Plugin ${plugin3.name} does not have a provider field`);
553
3005
  const provider = providerField.value.value;
554
3006
  return provider;
555
3007
  }
@@ -560,7 +3012,7 @@ function getPluginOptions(plugin3) {
560
3012
  if (field.name === "provider") {
561
3013
  continue;
562
3014
  }
563
- const value = (0, import_utils.getLiteral)(field.value) ?? (0, import_utils.getLiteralArray)(field.value);
3015
+ const value = (0, import_utils7.getLiteral)(field.value) ?? (0, import_utils7.getLiteralArray)(field.value);
564
3016
  if (value === void 0) {
565
3017
  console.warn(`Plugin "${plugin3.name}" option "${field.name}" has unsupported value, skipping`);
566
3018
  continue;
@@ -573,7 +3025,7 @@ __name(getPluginOptions, "getPluginOptions");
573
3025
  async function loadPluginModule(provider, basePath) {
574
3026
  let moduleSpec = provider;
575
3027
  if (moduleSpec.startsWith(".")) {
576
- moduleSpec = import_node_path4.default.resolve(basePath, moduleSpec);
3028
+ moduleSpec = import_node_path5.default.resolve(basePath, moduleSpec);
577
3029
  }
578
3030
  const importAsEsm = /* @__PURE__ */ __name(async (spec) => {
579
3031
  try {
@@ -611,13 +3063,13 @@ async function loadPluginModule(provider, basePath) {
611
3063
  }
612
3064
  }
613
3065
  for (const suffix of esmSuffixes) {
614
- const indexPath = import_node_path4.default.join(moduleSpec, `index${suffix}`);
3066
+ const indexPath = import_node_path5.default.join(moduleSpec, `index${suffix}`);
615
3067
  if (import_node_fs6.default.existsSync(indexPath)) {
616
3068
  return await importAsEsm((0, import_node_url.pathToFileURL)(indexPath).toString());
617
3069
  }
618
3070
  }
619
3071
  for (const suffix of tsSuffixes) {
620
- const indexPath = import_node_path4.default.join(moduleSpec, `index${suffix}`);
3072
+ const indexPath = import_node_path5.default.join(moduleSpec, `index${suffix}`);
621
3073
  if (import_node_fs6.default.existsSync(indexPath)) {
622
3074
  return await importAsTs(indexPath);
623
3075
  }
@@ -631,8 +3083,8 @@ async function loadPluginModule(provider, basePath) {
631
3083
  __name(loadPluginModule, "loadPluginModule");
632
3084
 
633
3085
  // src/actions/info.ts
634
- var import_colors5 = __toESM(require("colors"), 1);
635
- var import_node_path5 = __toESM(require("path"), 1);
3086
+ var import_colors7 = __toESM(require("colors"), 1);
3087
+ var import_node_path6 = __toESM(require("path"), 1);
636
3088
  async function run5(projectPath) {
637
3089
  const packages = await getZenStackPackages(projectPath);
638
3090
  if (!packages) {
@@ -645,18 +3097,18 @@ async function run5(projectPath) {
645
3097
  if (version2) {
646
3098
  versions.add(version2);
647
3099
  }
648
- console.log(` ${import_colors5.default.green(pkg.padEnd(20))} ${version2}`);
3100
+ console.log(` ${import_colors7.default.green(pkg.padEnd(20))} ${version2}`);
649
3101
  }
650
3102
  if (versions.size > 1) {
651
- console.warn(import_colors5.default.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
3103
+ console.warn(import_colors7.default.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
652
3104
  }
653
3105
  }
654
3106
  __name(run5, "run");
655
3107
  async function getZenStackPackages(projectPath) {
656
3108
  let pkgJson;
657
- const resolvedPath = import_node_path5.default.resolve(projectPath);
3109
+ const resolvedPath = import_node_path6.default.resolve(projectPath);
658
3110
  try {
659
- pkgJson = (await import(import_node_path5.default.join(resolvedPath, "package.json"), {
3111
+ pkgJson = (await import(import_node_path6.default.join(resolvedPath, "package.json"), {
660
3112
  with: {
661
3113
  type: "json"
662
3114
  }
@@ -694,10 +3146,10 @@ async function getZenStackPackages(projectPath) {
694
3146
  __name(getZenStackPackages, "getZenStackPackages");
695
3147
 
696
3148
  // src/actions/init.ts
697
- var import_colors6 = __toESM(require("colors"), 1);
3149
+ var import_colors8 = __toESM(require("colors"), 1);
698
3150
  var import_node_fs7 = __toESM(require("fs"), 1);
699
- var import_node_path6 = __toESM(require("path"), 1);
700
- var import_ora2 = __toESM(require("ora"), 1);
3151
+ var import_node_path7 = __toESM(require("path"), 1);
3152
+ var import_ora3 = __toESM(require("ora"), 1);
701
3153
  var import_package_manager_detector = require("package-manager-detector");
702
3154
 
703
3155
  // src/actions/templates.ts
@@ -748,7 +3200,7 @@ async function run6(projectPath) {
748
3200
  name: "npm"
749
3201
  };
750
3202
  }
751
- console.log(import_colors6.default.gray(`Using package manager: ${pm.agent}`));
3203
+ console.log(import_colors8.default.gray(`Using package manager: ${pm.agent}`));
752
3204
  for (const pkg of packages) {
753
3205
  const resolved = (0, import_package_manager_detector.resolveCommand)(pm.agent, "add", [
754
3206
  pkg.name,
@@ -759,7 +3211,7 @@ async function run6(projectPath) {
759
3211
  if (!resolved) {
760
3212
  throw new CliError(`Unable to determine how to install package "${pkg.name}". Please install it manually.`);
761
3213
  }
762
- const spinner = (0, import_ora2.default)(`Installing "${pkg.name}"`).start();
3214
+ const spinner = (0, import_ora3.default)(`Installing "${pkg.name}"`).start();
763
3215
  try {
764
3216
  execSync(`${resolved.command} ${resolved.args.join(" ")}`, {
765
3217
  cwd: projectPath
@@ -771,38 +3223,38 @@ async function run6(projectPath) {
771
3223
  }
772
3224
  }
773
3225
  const generationFolder = "zenstack";
774
- if (!import_node_fs7.default.existsSync(import_node_path6.default.join(projectPath, generationFolder))) {
775
- import_node_fs7.default.mkdirSync(import_node_path6.default.join(projectPath, generationFolder));
3226
+ if (!import_node_fs7.default.existsSync(import_node_path7.default.join(projectPath, generationFolder))) {
3227
+ import_node_fs7.default.mkdirSync(import_node_path7.default.join(projectPath, generationFolder));
776
3228
  }
777
- if (!import_node_fs7.default.existsSync(import_node_path6.default.join(projectPath, generationFolder, "schema.zmodel"))) {
778
- import_node_fs7.default.writeFileSync(import_node_path6.default.join(projectPath, generationFolder, "schema.zmodel"), STARTER_ZMODEL);
3229
+ if (!import_node_fs7.default.existsSync(import_node_path7.default.join(projectPath, generationFolder, "schema.zmodel"))) {
3230
+ import_node_fs7.default.writeFileSync(import_node_path7.default.join(projectPath, generationFolder, "schema.zmodel"), STARTER_ZMODEL);
779
3231
  } else {
780
- console.log(import_colors6.default.yellow("Schema file already exists. Skipping generation of sample."));
3232
+ console.log(import_colors8.default.yellow("Schema file already exists. Skipping generation of sample."));
781
3233
  }
782
- console.log(import_colors6.default.green("ZenStack project initialized successfully!"));
783
- console.log(import_colors6.default.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
784
- console.log(import_colors6.default.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
3234
+ console.log(import_colors8.default.green("ZenStack project initialized successfully!"));
3235
+ console.log(import_colors8.default.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
3236
+ console.log(import_colors8.default.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
785
3237
  }
786
3238
  __name(run6, "run");
787
3239
 
788
3240
  // src/actions/migrate.ts
789
3241
  var import_node_fs8 = __toESM(require("fs"), 1);
790
- var import_node_path7 = __toESM(require("path"), 1);
3242
+ var import_node_path8 = __toESM(require("path"), 1);
791
3243
 
792
3244
  // src/actions/seed.ts
793
- var import_colors7 = __toESM(require("colors"), 1);
3245
+ var import_colors9 = __toESM(require("colors"), 1);
794
3246
  var import_execa = require("execa");
795
3247
  async function run7(options, args) {
796
3248
  const pkgJsonConfig = getPkgJsonConfig(process.cwd());
797
3249
  if (!pkgJsonConfig.seed) {
798
3250
  if (!options.noWarnings) {
799
- console.warn(import_colors7.default.yellow("No seed script defined in package.json. Skipping seeding."));
3251
+ console.warn(import_colors9.default.yellow("No seed script defined in package.json. Skipping seeding."));
800
3252
  }
801
3253
  return;
802
3254
  }
803
3255
  const command = `${pkgJsonConfig.seed}${args.length > 0 ? " " + args.join(" ") : ""}`;
804
3256
  if (options.printStatus) {
805
- console.log(import_colors7.default.gray(`Running seed script "${command}"...`));
3257
+ console.log(import_colors9.default.gray(`Running seed script "${command}"...`));
806
3258
  }
807
3259
  try {
808
3260
  await (0, import_execa.execaCommand)(command, {
@@ -810,7 +3262,7 @@ async function run7(options, args) {
810
3262
  stderr: "inherit"
811
3263
  });
812
3264
  } catch (err) {
813
- console.error(import_colors7.default.red(err instanceof Error ? err.message : String(err)));
3265
+ console.error(import_colors9.default.red(err instanceof Error ? err.message : String(err)));
814
3266
  throw new CliError("Failed to seed the database. Please check the error message above for details.");
815
3267
  }
816
3268
  }
@@ -820,7 +3272,7 @@ __name(run7, "run");
820
3272
  async function run8(command, options) {
821
3273
  const schemaFile = getSchemaFile(options.schema);
822
3274
  await requireDataSourceUrl(schemaFile);
823
- const prismaSchemaDir = options.migrations ? import_node_path7.default.dirname(options.migrations) : void 0;
3275
+ const prismaSchemaDir = options.migrations ? import_node_path8.default.dirname(options.migrations) : void 0;
824
3276
  const prismaSchemaFile = await generateTempPrismaSchema(schemaFile, prismaSchemaDir);
825
3277
  try {
826
3278
  switch (command) {
@@ -931,27 +3383,27 @@ function handleSubProcessError2(err) {
931
3383
  __name(handleSubProcessError2, "handleSubProcessError");
932
3384
 
933
3385
  // src/actions/proxy.ts
934
- var import_ast3 = require("@zenstackhq/language/ast");
935
- var import_utils2 = require("@zenstackhq/language/utils");
3386
+ var import_ast6 = require("@zenstackhq/language/ast");
3387
+ var import_utils8 = require("@zenstackhq/language/utils");
936
3388
  var import_orm = require("@zenstackhq/orm");
937
- var import_mysql = require("@zenstackhq/orm/dialects/mysql");
3389
+ var import_mysql2 = require("@zenstackhq/orm/dialects/mysql");
938
3390
  var import_postgres = require("@zenstackhq/orm/dialects/postgres");
939
- var import_sqlite = require("@zenstackhq/orm/dialects/sqlite");
3391
+ var import_sqlite2 = require("@zenstackhq/orm/dialects/sqlite");
940
3392
  var import_api = require("@zenstackhq/server/api");
941
3393
  var import_express = require("@zenstackhq/server/express");
942
3394
  var import_better_sqlite3 = __toESM(require("better-sqlite3"), 1);
943
- var import_colors9 = __toESM(require("colors"), 1);
3395
+ var import_colors11 = __toESM(require("colors"), 1);
944
3396
  var import_cors = __toESM(require("cors"), 1);
945
3397
  var import_express2 = __toESM(require("express"), 1);
946
3398
  var import_jiti2 = require("jiti");
947
- var import_mysql2 = require("mysql2");
948
- var import_node_path9 = __toESM(require("path"), 1);
949
- var import_pg = require("pg");
3399
+ var import_mysql22 = require("mysql2");
3400
+ var import_node_path10 = __toESM(require("path"), 1);
3401
+ var import_pg2 = require("pg");
950
3402
 
951
3403
  // src/utils/version-utils.ts
952
- var import_colors8 = __toESM(require("colors"), 1);
3404
+ var import_colors10 = __toESM(require("colors"), 1);
953
3405
  var import_node_fs9 = __toESM(require("fs"), 1);
954
- var import_node_path8 = __toESM(require("path"), 1);
3406
+ var import_node_path9 = __toESM(require("path"), 1);
955
3407
  var import_node_url2 = require("url");
956
3408
  var import_semver = __toESM(require("semver"), 1);
957
3409
  var import_meta2 = {};
@@ -959,8 +3411,8 @@ var CHECK_VERSION_TIMEOUT = 2e3;
959
3411
  var VERSION_CHECK_TAG = "next";
960
3412
  function getVersion() {
961
3413
  try {
962
- const _dirname = typeof __dirname !== "undefined" ? __dirname : import_node_path8.default.dirname((0, import_node_url2.fileURLToPath)(import_meta2.url));
963
- return JSON.parse(import_node_fs9.default.readFileSync(import_node_path8.default.join(_dirname, "../package.json"), "utf8")).version;
3414
+ const _dirname = typeof __dirname !== "undefined" ? __dirname : import_node_path9.default.dirname((0, import_node_url2.fileURLToPath)(import_meta2.url));
3415
+ return JSON.parse(import_node_fs9.default.readFileSync(import_node_path9.default.join(_dirname, "../package.json"), "utf8")).version;
964
3416
  } catch {
965
3417
  return void 0;
966
3418
  }
@@ -975,7 +3427,7 @@ async function checkNewVersion() {
975
3427
  return;
976
3428
  }
977
3429
  if (latestVersion && currVersion && import_semver.default.gt(latestVersion, currVersion)) {
978
- console.log(`A newer version ${import_colors8.default.cyan(latestVersion)} is available.`);
3430
+ console.log(`A newer version ${import_colors10.default.cyan(latestVersion)} is available.`);
979
3431
  }
980
3432
  }
981
3433
  __name(checkNewVersion, "checkNewVersion");
@@ -1006,13 +3458,13 @@ async function run9(options) {
1006
3458
  ];
1007
3459
  const log = options.logLevel?.filter((level) => allowedLogLevels.includes(level));
1008
3460
  const schemaFile = getSchemaFile(options.schema);
1009
- console.log(import_colors9.default.gray(`Loading ZModel schema from: ${schemaFile}`));
3461
+ console.log(import_colors11.default.gray(`Loading ZModel schema from: ${schemaFile}`));
1010
3462
  let outputPath = getOutputPath(options, schemaFile);
1011
- if (!import_node_path9.default.isAbsolute(outputPath)) {
1012
- outputPath = import_node_path9.default.resolve(process.cwd(), outputPath);
3463
+ if (!import_node_path10.default.isAbsolute(outputPath)) {
3464
+ outputPath = import_node_path10.default.resolve(process.cwd(), outputPath);
1013
3465
  }
1014
3466
  const model = await loadSchemaDocument(schemaFile);
1015
- const dataSource = model.declarations.find(import_ast3.isDataSource);
3467
+ const dataSource = model.declarations.find(import_ast6.isDataSource);
1016
3468
  let databaseUrl = options.databaseUrl;
1017
3469
  if (!databaseUrl) {
1018
3470
  const schemaUrl = dataSource?.fields.find((f) => f.name === "url")?.value;
@@ -1021,13 +3473,27 @@ async function run9(options) {
1021
3473
  }
1022
3474
  databaseUrl = evaluateUrl(schemaUrl);
1023
3475
  }
1024
- const provider = (0, import_utils2.getStringLiteral)(dataSource?.fields.find((f) => f.name === "provider")?.value);
3476
+ const provider = (0, import_utils8.getStringLiteral)(dataSource?.fields.find((f) => f.name === "provider")?.value);
1025
3477
  const dialect = createDialect(provider, databaseUrl, outputPath);
1026
3478
  const jiti = (0, import_jiti2.createJiti)(import_meta3.url);
1027
- const schemaModule = await jiti.import(import_node_path9.default.join(outputPath, "schema"));
1028
- const db = new import_orm.ZenStackClient(schemaModule.schema, {
3479
+ const schemaModule = await jiti.import(import_node_path10.default.join(outputPath, "schema"));
3480
+ const schema = schemaModule.schema;
3481
+ const omit = {};
3482
+ for (const [modelName, modelDef] of Object.entries(schema.models)) {
3483
+ const computedFields = {};
3484
+ for (const [fieldName, fieldDef] of Object.entries(modelDef.fields)) {
3485
+ if (fieldDef.computed === true) {
3486
+ computedFields[fieldName] = true;
3487
+ }
3488
+ }
3489
+ if (Object.keys(computedFields).length > 0) {
3490
+ omit[modelName] = computedFields;
3491
+ }
3492
+ }
3493
+ const db = new import_orm.ZenStackClient(schema, {
1029
3494
  dialect,
1030
- log: log && log.length > 0 ? log : void 0
3495
+ log: log && log.length > 0 ? log : void 0,
3496
+ omit: Object.keys(omit).length > 0 ? omit : void 0
1031
3497
  });
1032
3498
  try {
1033
3499
  await db.$connect();
@@ -1038,11 +3504,11 @@ async function run9(options) {
1038
3504
  }
1039
3505
  __name(run9, "run");
1040
3506
  function evaluateUrl(schemaUrl) {
1041
- if ((0, import_ast3.isLiteralExpr)(schemaUrl)) {
1042
- return (0, import_utils2.getStringLiteral)(schemaUrl);
1043
- } else if ((0, import_ast3.isInvocationExpr)(schemaUrl)) {
3507
+ if ((0, import_ast6.isLiteralExpr)(schemaUrl)) {
3508
+ return (0, import_utils8.getStringLiteral)(schemaUrl);
3509
+ } else if ((0, import_ast6.isInvocationExpr)(schemaUrl)) {
1044
3510
  const envFunction = schemaUrl;
1045
- const envName = (0, import_utils2.getStringLiteral)(envFunction.args[0]?.value);
3511
+ const envName = (0, import_utils8.getStringLiteral)(envFunction.args[0]?.value);
1046
3512
  const envValue = process.env[envName];
1047
3513
  if (!envValue) {
1048
3514
  throw new CliError(`Environment variable ${envName} is not set`);
@@ -1074,26 +3540,26 @@ function createDialect(provider, databaseUrl, outputPath) {
1074
3540
  let resolvedUrl = databaseUrl.trim();
1075
3541
  if (resolvedUrl.startsWith("file:")) {
1076
3542
  const filePath = resolvedUrl.substring("file:".length);
1077
- if (!import_node_path9.default.isAbsolute(filePath)) {
1078
- resolvedUrl = import_node_path9.default.join(outputPath, filePath);
3543
+ if (!import_node_path10.default.isAbsolute(filePath)) {
3544
+ resolvedUrl = import_node_path10.default.join(outputPath, filePath);
1079
3545
  }
1080
3546
  }
1081
- console.log(import_colors9.default.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
1082
- return new import_sqlite.SqliteDialect({
3547
+ console.log(import_colors11.default.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
3548
+ return new import_sqlite2.SqliteDialect({
1083
3549
  database: new import_better_sqlite3.default(resolvedUrl)
1084
3550
  });
1085
3551
  }
1086
3552
  case "postgresql":
1087
- console.log(import_colors9.default.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
3553
+ console.log(import_colors11.default.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
1088
3554
  return new import_postgres.PostgresDialect({
1089
- pool: new import_pg.Pool({
3555
+ pool: new import_pg2.Pool({
1090
3556
  connectionString: databaseUrl
1091
3557
  })
1092
3558
  });
1093
3559
  case "mysql":
1094
- console.log(import_colors9.default.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
1095
- return new import_mysql.MysqlDialect({
1096
- pool: (0, import_mysql2.createPool)(databaseUrl)
3560
+ console.log(import_colors11.default.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
3561
+ return new import_mysql2.MysqlDialect({
3562
+ pool: (0, import_mysql22.createPool)(databaseUrl)
1097
3563
  });
1098
3564
  default:
1099
3565
  throw new CliError(`Unsupported database provider: ${provider}`);
@@ -1124,11 +3590,11 @@ function startServer(client, schema, options) {
1124
3590
  });
1125
3591
  const server = app.listen(options.port, () => {
1126
3592
  console.log(`ZenStack proxy server is running on port: ${options.port}`);
1127
- console.log(`You can visit ZenStack Studio at: ${import_colors9.default.blue("https://studio.zenstack.dev")}`);
3593
+ console.log(`You can visit ZenStack Studio at: ${import_colors11.default.blue("https://studio.zenstack.dev")}`);
1128
3594
  });
1129
3595
  server.on("error", (err) => {
1130
3596
  if (err.code === "EADDRINUSE") {
1131
- console.error(import_colors9.default.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
3597
+ console.error(import_colors11.default.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
1132
3598
  } else {
1133
3599
  throw new CliError(`Failed to start the server: ${err.message}`);
1134
3600
  }
@@ -1158,7 +3624,7 @@ var import_node_fs13 = __toESM(require("fs"), 1);
1158
3624
  var os2 = __toESM(require("os"), 1);
1159
3625
 
1160
3626
  // src/constants.ts
1161
- var TELEMETRY_TRACKING_TOKEN = "74944eb779d7d3b4ce185be843fde9fc";
3627
+ var TELEMETRY_TRACKING_TOKEN = "<TELEMETRY_TRACKING_TOKEN>";
1162
3628
 
1163
3629
  // src/utils/is-ci.ts
1164
3630
  var import_node_process = require("process");
@@ -1421,8 +3887,8 @@ var proxyAction = /* @__PURE__ */ __name(async (options) => {
1421
3887
  }, "proxyAction");
1422
3888
  function createProgram() {
1423
3889
  const program = new import_commander.Command("zen").alias("zenstack").helpOption("-h, --help", "Show this help message").version(getVersion(), "-v --version", "Show CLI version");
1424
- const schemaExtensions = import_language4.ZModelLanguageMetaData.fileExtensions.join(", ");
1425
- program.description(`${import_colors10.default.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
3890
+ const schemaExtensions = import_language5.ZModelLanguageMetaData.fileExtensions.join(", ");
3891
+ program.description(`${import_colors12.default.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
1426
3892
 
1427
3893
  Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAfterError();
1428
3894
  const schemaOption = new import_commander.Option("--schema <file>", `schema file (with extension ${schemaExtensions}). Defaults to "zenstack/schema.zmodel" unless specified in package.json.`);
@@ -1437,6 +3903,7 @@ Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAf
1437
3903
  migrateCommand.command("resolve").addOption(schemaOption).addOption(noVersionCheckOption).addOption(migrationsOption).addOption(new import_commander.Option("--applied <migration>", "record a specific migration as applied")).addOption(new import_commander.Option("--rolled-back <migration>", "record a specific migration as rolled back")).description("Resolve issues with database migrations in deployment databases").action((options) => migrateAction("resolve", options));
1438
3904
  const dbCommand = program.command("db").description("Manage your database schema during development");
1439
3905
  dbCommand.command("push").description("Push the state from your schema to your database").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new import_commander.Option("--accept-data-loss", "ignore data loss warnings")).addOption(new import_commander.Option("--force-reset", "force a reset of the database before push")).action((options) => dbAction("push", options));
3906
+ dbCommand.command("pull").description("Introspect your database.").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new import_commander.Option("-o, --output <path>", "set custom output path for the introspected schema. If a file path is provided, all schemas are merged into that single file. If a directory path is provided, files are written to the directory and imports are kept.")).addOption(new import_commander.Option("--model-casing <pascal|camel|snake|none>", "set the casing of generated models").default("pascal")).addOption(new import_commander.Option("--field-casing <pascal|camel|snake|none>", "set the casing of generated fields").default("camel")).addOption(new import_commander.Option("--always-map", "always add @map and @@map attributes to models and fields").default(false)).addOption(new import_commander.Option("--quote <double|single>", "set the quote style of generated schema files").default("single")).addOption(new import_commander.Option("--indent <number>", "set the indentation of the generated schema files").default(4).argParser(parseInt)).action((options) => dbAction("pull", options));
1440
3907
  dbCommand.command("seed").description("Seed the database").allowExcessArguments(true).addHelpText("after", `
1441
3908
  Seed script is configured under the "zenstack.seed" field in package.json.
1442
3909
  E.g.:
@@ -1473,10 +3940,10 @@ async function main() {
1473
3940
  if (e instanceof import_commander.CommanderError) {
1474
3941
  exitCode = e.exitCode;
1475
3942
  } else if (e instanceof CliError) {
1476
- console.error(import_colors10.default.red(e.message));
3943
+ console.error(import_colors12.default.red(e.message));
1477
3944
  exitCode = 1;
1478
3945
  } else {
1479
- console.error(import_colors10.default.red(`Unhandled error: ${e}`));
3946
+ console.error(import_colors12.default.red(`Unhandled error: ${e}`));
1480
3947
  exitCode = 1;
1481
3948
  }
1482
3949
  }