@zenstackhq/cli 3.3.3 → 3.4.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -14,7 +14,7 @@ var __export = (target, all) => {
14
14
  // src/index.ts
15
15
  import "dotenv/config";
16
16
  import { ZModelLanguageMetaData as ZModelLanguageMetaData2 } from "@zenstackhq/language";
17
- import colors10 from "colors";
17
+ import colors12 from "colors";
18
18
  import { Command, CommanderError, Option } from "commander";
19
19
 
20
20
  // src/actions/check.ts
@@ -26,6 +26,7 @@ import { isDataSource } from "@zenstackhq/language/ast";
26
26
  import { PrismaSchemaGenerator } from "@zenstackhq/sdk";
27
27
  import colors from "colors";
28
28
  import fs from "fs";
29
+ import { createRequire } from "module";
29
30
  import path from "path";
30
31
 
31
32
  // src/cli-error.ts
@@ -67,8 +68,10 @@ function getSchemaFile(file) {
67
68
  }
68
69
  }
69
70
  __name(getSchemaFile, "getSchemaFile");
70
- async function loadSchemaDocument(schemaFile) {
71
- const loadResult = await loadDocument(schemaFile);
71
+ async function loadSchemaDocument(schemaFile, opts = {}) {
72
+ const returnServices = opts.returnServices ?? false;
73
+ const mergeImports = opts.mergeImports ?? true;
74
+ const loadResult = await loadDocument(schemaFile, [], mergeImports);
72
75
  if (!loadResult.success) {
73
76
  loadResult.errors.forEach((err) => {
74
77
  console.error(colors.red(err));
@@ -78,6 +81,10 @@ async function loadSchemaDocument(schemaFile) {
78
81
  loadResult.warnings.forEach((warn) => {
79
82
  console.warn(colors.yellow(warn));
80
83
  });
84
+ if (returnServices) return {
85
+ model: loadResult.model,
86
+ services: loadResult.services
87
+ };
81
88
  return loadResult.model;
82
89
  }
83
90
  __name(loadSchemaDocument, "loadSchemaDocument");
@@ -135,10 +142,10 @@ function findUp(names, cwd = process.cwd(), multiple = false, result = []) {
135
142
  }
136
143
  const target = names.find((name) => fs.existsSync(path.join(cwd, name)));
137
144
  if (multiple === false && target) {
138
- return path.join(cwd, target);
145
+ return path.resolve(cwd, target);
139
146
  }
140
147
  if (target) {
141
- result.push(path.join(cwd, target));
148
+ result.push(path.resolve(cwd, target));
142
149
  }
143
150
  const up = path.resolve(cwd, "..");
144
151
  if (up === cwd) {
@@ -167,6 +174,44 @@ function getOutputPath(options, schemaFile) {
167
174
  }
168
175
  }
169
176
  __name(getOutputPath, "getOutputPath");
177
+ async function getZenStackPackages(searchPath) {
178
+ const pkgJsonFile = findUp([
179
+ "package.json"
180
+ ], searchPath, false);
181
+ if (!pkgJsonFile) {
182
+ return [];
183
+ }
184
+ let pkgJson;
185
+ try {
186
+ pkgJson = JSON.parse(fs.readFileSync(pkgJsonFile, "utf8"));
187
+ } catch {
188
+ return [];
189
+ }
190
+ const packages = Array.from(new Set([
191
+ ...Object.keys(pkgJson.dependencies ?? {}),
192
+ ...Object.keys(pkgJson.devDependencies ?? {})
193
+ ].filter((p) => p.startsWith("@zenstackhq/")))).sort();
194
+ const require2 = createRequire(pkgJsonFile);
195
+ const result = packages.map((pkg) => {
196
+ try {
197
+ const depPkgJson = require2(`${pkg}/package.json`);
198
+ if (depPkgJson.private) {
199
+ return void 0;
200
+ }
201
+ return {
202
+ pkg,
203
+ version: depPkgJson.version
204
+ };
205
+ } catch {
206
+ return {
207
+ pkg,
208
+ version: void 0
209
+ };
210
+ }
211
+ });
212
+ return result.filter((p) => !!p);
213
+ }
214
+ __name(getZenStackPackages, "getZenStackPackages");
170
215
 
171
216
  // src/actions/check.ts
172
217
  async function run(options) {
@@ -182,7 +227,12 @@ async function run(options) {
182
227
  __name(run, "run");
183
228
 
184
229
  // src/actions/db.ts
230
+ import { formatDocument, ZModelCodeGenerator } from "@zenstackhq/language";
231
+ import { DataModel, Enum } from "@zenstackhq/language/ast";
232
+ import colors4 from "colors";
185
233
  import fs2 from "fs";
234
+ import path2 from "path";
235
+ import ora from "ora";
186
236
 
187
237
  // src/utils/exec-utils.ts
188
238
  import { execSync as _exec } from "child_process";
@@ -231,12 +281,2156 @@ function execPrisma(args, options) {
231
281
  }
232
282
  __name(execPrisma, "execPrisma");
233
283
 
284
+ // src/actions/pull/index.ts
285
+ import colors3 from "colors";
286
+ import { isEnum } from "@zenstackhq/language/ast";
287
+ import { DataFieldAttributeFactory, DataFieldFactory, DataModelFactory, EnumFactory } from "@zenstackhq/language/factory";
288
+ import { AstUtils } from "langium";
289
+ import { lowerCaseFirst } from "@zenstackhq/common-helpers";
290
+
291
+ // src/actions/pull/utils.ts
292
+ import { isInvocationExpr } from "@zenstackhq/language/ast";
293
+ import { getLiteralArray, getStringLiteral } from "@zenstackhq/language/utils";
294
+ function isDatabaseManagedAttribute(name) {
295
+ return [
296
+ "@relation",
297
+ "@id",
298
+ "@unique"
299
+ ].includes(name) || name.startsWith("@db.");
300
+ }
301
+ __name(isDatabaseManagedAttribute, "isDatabaseManagedAttribute");
302
+ function getDatasource(model) {
303
+ const datasource = model.declarations.find((d) => d.$type === "DataSource");
304
+ if (!datasource) {
305
+ throw new CliError("No datasource declaration found in the schema.");
306
+ }
307
+ const urlField = datasource.fields.find((f) => f.name === "url");
308
+ if (!urlField) throw new CliError(`No url field found in the datasource declaration.`);
309
+ let url = getStringLiteral(urlField.value);
310
+ if (!url && isInvocationExpr(urlField.value)) {
311
+ const envName = getStringLiteral(urlField.value.args[0]?.value);
312
+ if (!envName) {
313
+ throw new CliError("The url field must be a string literal or an env().");
314
+ }
315
+ if (!process.env[envName]) {
316
+ throw new CliError(`Environment variable ${envName} is not set, please set it to the database connection string.`);
317
+ }
318
+ url = process.env[envName];
319
+ }
320
+ if (!url) {
321
+ throw new CliError("The url field must be a string literal or an env().");
322
+ }
323
+ if (url.startsWith("file:")) {
324
+ url = new URL(url, `file:${model.$document.uri.path}`).pathname;
325
+ if (process.platform === "win32" && url[0] === "/") url = url.slice(1);
326
+ }
327
+ const defaultSchemaField = datasource.fields.find((f) => f.name === "defaultSchema");
328
+ const defaultSchema = defaultSchemaField && getStringLiteral(defaultSchemaField.value) || "public";
329
+ const schemasField = datasource.fields.find((f) => f.name === "schemas");
330
+ const schemas = schemasField && getLiteralArray(schemasField.value)?.filter((s) => s !== void 0) || [];
331
+ const provider = getStringLiteral(datasource.fields.find((f) => f.name === "provider")?.value);
332
+ if (!provider) {
333
+ throw new CliError(`Datasource "${datasource.name}" is missing a "provider" field.`);
334
+ }
335
+ return {
336
+ name: datasource.name,
337
+ provider,
338
+ url,
339
+ defaultSchema,
340
+ schemas,
341
+ allSchemas: [
342
+ defaultSchema,
343
+ ...schemas
344
+ ]
345
+ };
346
+ }
347
+ __name(getDatasource, "getDatasource");
348
+ function getDbName(decl, includeSchema = false) {
349
+ if (!("attributes" in decl)) return decl.name;
350
+ const schemaAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@schema");
351
+ let schema = "public";
352
+ if (schemaAttr) {
353
+ const schemaAttrValue = schemaAttr.args[0]?.value;
354
+ if (schemaAttrValue?.$type === "StringLiteral") {
355
+ schema = schemaAttrValue.value;
356
+ }
357
+ }
358
+ const formatName = /* @__PURE__ */ __name((name) => `${schema && includeSchema ? `${schema}.` : ""}${name}`, "formatName");
359
+ const nameAttr = decl.attributes.find((a) => a.decl.ref?.name === "@@map" || a.decl.ref?.name === "@map");
360
+ if (!nameAttr) return formatName(decl.name);
361
+ const attrValue = nameAttr.args[0]?.value;
362
+ if (attrValue?.$type !== "StringLiteral") return formatName(decl.name);
363
+ return formatName(attrValue.value);
364
+ }
365
+ __name(getDbName, "getDbName");
366
+ function getRelationFkName(decl) {
367
+ const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
368
+ const schemaAttrValue = relationAttr?.args.find((a) => a.name === "map")?.value;
369
+ return schemaAttrValue?.value;
370
+ }
371
+ __name(getRelationFkName, "getRelationFkName");
372
+ function getRelationFieldsKey(decl) {
373
+ const relationAttr = decl?.attributes.find((a) => a.decl.ref?.name === "@relation");
374
+ if (!relationAttr) return void 0;
375
+ const fieldsArg = relationAttr.args.find((a) => a.name === "fields")?.value;
376
+ if (!fieldsArg || fieldsArg.$type !== "ArrayExpr") return void 0;
377
+ const fieldNames = fieldsArg.items.filter((item) => item.$type === "ReferenceExpr").map((item) => item.target?.$refText || item.target?.ref?.name).filter((name) => !!name).sort();
378
+ return fieldNames.length > 0 ? fieldNames.join(",") : void 0;
379
+ }
380
+ __name(getRelationFieldsKey, "getRelationFieldsKey");
381
+ function getDeclarationRef(type2, name, services) {
382
+ const node = services.shared.workspace.IndexManager.allElements(type2).find((m) => m.node && getDbName(m.node) === name)?.node;
383
+ if (!node) throw new CliError(`Declaration not found: ${name}`);
384
+ return node;
385
+ }
386
+ __name(getDeclarationRef, "getDeclarationRef");
387
+ function getEnumRef(name, services) {
388
+ return getDeclarationRef("Enum", name, services);
389
+ }
390
+ __name(getEnumRef, "getEnumRef");
391
+ function getAttributeRef(name, services) {
392
+ return getDeclarationRef("Attribute", name, services);
393
+ }
394
+ __name(getAttributeRef, "getAttributeRef");
395
+ function getFunctionRef(name, services) {
396
+ return getDeclarationRef("FunctionDecl", name, services);
397
+ }
398
+ __name(getFunctionRef, "getFunctionRef");
399
+ function normalizeFloatDefault(val) {
400
+ if (/^-?\d+$/.test(val)) {
401
+ return (ab) => ab.NumberLiteral.setValue(val + ".0");
402
+ }
403
+ if (/^-?\d+\.\d+$/.test(val)) {
404
+ return (ab) => ab.NumberLiteral.setValue(val);
405
+ }
406
+ return (ab) => ab.NumberLiteral.setValue(val);
407
+ }
408
+ __name(normalizeFloatDefault, "normalizeFloatDefault");
409
+ function normalizeDecimalDefault(val) {
410
+ if (/^-?\d+$/.test(val)) {
411
+ return (ab) => ab.NumberLiteral.setValue(val + ".00");
412
+ }
413
+ if (/^-?\d+\.\d+$/.test(val)) {
414
+ const [integerPart, fractionalPart] = val.split(".");
415
+ let normalized = fractionalPart.replace(/0+$/, "");
416
+ if (normalized.length < 2) {
417
+ normalized = normalized.padEnd(2, "0");
418
+ }
419
+ return (ab) => ab.NumberLiteral.setValue(`${integerPart}.${normalized}`);
420
+ }
421
+ return (ab) => ab.NumberLiteral.setValue(val);
422
+ }
423
+ __name(normalizeDecimalDefault, "normalizeDecimalDefault");
424
+
425
+ // src/actions/pull/casing.ts
426
+ function resolveNameCasing(casing, originalName) {
427
+ let name = originalName;
428
+ const fieldPrefix = /[0-9]/g.test(name.charAt(0)) ? "_" : "";
429
+ switch (casing) {
430
+ case "pascal":
431
+ name = toPascalCase(originalName);
432
+ break;
433
+ case "camel":
434
+ name = toCamelCase(originalName);
435
+ break;
436
+ case "snake":
437
+ name = toSnakeCase(originalName);
438
+ break;
439
+ }
440
+ return {
441
+ modified: name !== originalName || fieldPrefix !== "",
442
+ name: `${fieldPrefix}${name}`
443
+ };
444
+ }
445
+ __name(resolveNameCasing, "resolveNameCasing");
446
+ function isAllUpperCase(str) {
447
+ return str === str.toUpperCase();
448
+ }
449
+ __name(isAllUpperCase, "isAllUpperCase");
450
+ function toPascalCase(str) {
451
+ if (isAllUpperCase(str)) return str;
452
+ return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toUpperCase());
453
+ }
454
+ __name(toPascalCase, "toPascalCase");
455
+ function toCamelCase(str) {
456
+ if (isAllUpperCase(str)) return str;
457
+ return str.replace(/[_\- ]+(\w)/g, (_, c) => c.toUpperCase()).replace(/^\w/, (c) => c.toLowerCase());
458
+ }
459
+ __name(toCamelCase, "toCamelCase");
460
+ function toSnakeCase(str) {
461
+ if (isAllUpperCase(str)) return str;
462
+ return str.replace(/[- ]+/g, "_").replace(/([a-z0-9])([A-Z])/g, "$1_$2").toLowerCase();
463
+ }
464
+ __name(toSnakeCase, "toSnakeCase");
465
+
466
+ // src/actions/pull/index.ts
467
+ function syncEnums({ dbEnums, model, oldModel, provider, options, services, defaultSchema }) {
468
+ if (provider.isSupportedFeature("NativeEnum")) {
469
+ for (const dbEnum of dbEnums) {
470
+ const { modified, name } = resolveNameCasing(options.modelCasing, dbEnum.enum_type);
471
+ if (modified) console.log(colors3.gray(`Mapping enum ${dbEnum.enum_type} to ${name}`));
472
+ const factory = new EnumFactory().setName(name);
473
+ if (modified || options.alwaysMap) factory.addAttribute((builder) => builder.setDecl(getAttributeRef("@@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(dbEnum.enum_type)));
474
+ dbEnum.values.forEach((v) => {
475
+ const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, v);
476
+ factory.addField((builder) => {
477
+ builder.setName(name2);
478
+ if (modified2 || options.alwaysMap) builder.addAttribute((builder2) => builder2.setDecl(getAttributeRef("@map", services)).addArg((argBuilder) => argBuilder.StringLiteral.setValue(v)));
479
+ return builder;
480
+ });
481
+ });
482
+ if (dbEnum.schema_name && dbEnum.schema_name !== "" && dbEnum.schema_name !== defaultSchema) {
483
+ factory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(dbEnum.schema_name)));
484
+ }
485
+ model.declarations.push(factory.get({
486
+ $container: model
487
+ }));
488
+ }
489
+ } else {
490
+ const dummyBuildReference = /* @__PURE__ */ __name((_node, _property, _refNode, refText) => ({
491
+ $refText: refText
492
+ }), "dummyBuildReference");
493
+ oldModel.declarations.filter((d) => isEnum(d)).forEach((d) => {
494
+ const copy = AstUtils.copyAstNode(d, dummyBuildReference);
495
+ copy.$container = model;
496
+ model.declarations.push(copy);
497
+ });
498
+ }
499
+ }
500
+ __name(syncEnums, "syncEnums");
501
+ function syncTable({ model, provider, table, services, options, defaultSchema }) {
502
+ const idAttribute = getAttributeRef("@id", services);
503
+ const modelIdAttribute = getAttributeRef("@@id", services);
504
+ const uniqueAttribute = getAttributeRef("@unique", services);
505
+ const modelUniqueAttribute = getAttributeRef("@@unique", services);
506
+ const fieldMapAttribute = getAttributeRef("@map", services);
507
+ const tableMapAttribute = getAttributeRef("@@map", services);
508
+ const modelindexAttribute = getAttributeRef("@@index", services);
509
+ const relations = [];
510
+ const { name, modified } = resolveNameCasing(options.modelCasing, table.name);
511
+ const multiPk = table.columns.filter((c) => c.pk).length > 1;
512
+ const modelFactory = new DataModelFactory().setName(name).setIsView(table.type === "view");
513
+ modelFactory.setContainer(model);
514
+ if (modified || options.alwaysMap) {
515
+ modelFactory.addAttribute((builder) => builder.setDecl(tableMapAttribute).addArg((argBuilder) => argBuilder.StringLiteral.setValue(table.name)));
516
+ }
517
+ const fkGroups = /* @__PURE__ */ new Map();
518
+ table.columns.forEach((column) => {
519
+ if (column.foreign_key_table && column.foreign_key_name) {
520
+ const group = fkGroups.get(column.foreign_key_name) ?? [];
521
+ group.push(column);
522
+ fkGroups.set(column.foreign_key_name, group);
523
+ }
524
+ });
525
+ for (const [fkName, fkColumns] of fkGroups) {
526
+ const firstCol = fkColumns[0];
527
+ const isSingleColumnPk = fkColumns.length === 1 && !multiPk && firstCol.pk;
528
+ const isUniqueRelation = fkColumns.length === 1 && firstCol.unique || isSingleColumnPk;
529
+ relations.push({
530
+ schema: table.schema,
531
+ table: table.name,
532
+ columns: fkColumns.map((c) => c.name),
533
+ type: "one",
534
+ fk_name: fkName,
535
+ foreign_key_on_delete: firstCol.foreign_key_on_delete,
536
+ foreign_key_on_update: firstCol.foreign_key_on_update,
537
+ nullable: firstCol.nullable,
538
+ references: {
539
+ schema: firstCol.foreign_key_schema,
540
+ table: firstCol.foreign_key_table,
541
+ columns: fkColumns.map((c) => c.foreign_key_column),
542
+ type: isUniqueRelation ? "one" : "many"
543
+ }
544
+ });
545
+ }
546
+ table.columns.forEach((column) => {
547
+ const { name: name2, modified: modified2 } = resolveNameCasing(options.fieldCasing, column.name);
548
+ const builtinType = provider.getBuiltinType(column.datatype);
549
+ modelFactory.addField((builder) => {
550
+ builder.setName(name2);
551
+ builder.setType((typeBuilder) => {
552
+ typeBuilder.setArray(builtinType.isArray);
553
+ typeBuilder.setOptional(builtinType.isArray ? false : column.nullable);
554
+ if (column.computed) {
555
+ typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
556
+ } else if (column.datatype === "enum") {
557
+ const ref = model.declarations.find((d) => isEnum(d) && getDbName(d) === column.datatype_name);
558
+ if (!ref) {
559
+ throw new CliError(`Enum ${column.datatype_name} not found`);
560
+ }
561
+ typeBuilder.setReference(ref);
562
+ } else {
563
+ if (builtinType.type !== "Unsupported") {
564
+ typeBuilder.setType(builtinType.type);
565
+ } else {
566
+ typeBuilder.setUnsupported((unsupportedBuilder) => unsupportedBuilder.setValue((lt) => lt.StringLiteral.setValue(column.datatype)));
567
+ }
568
+ }
569
+ return typeBuilder;
570
+ });
571
+ if (column.pk && !multiPk) {
572
+ builder.addAttribute((b) => b.setDecl(idAttribute));
573
+ }
574
+ const fieldAttrs = provider.getFieldAttributes({
575
+ fieldName: column.name,
576
+ fieldType: builtinType.type,
577
+ datatype: column.datatype,
578
+ length: column.length,
579
+ precision: column.precision,
580
+ services
581
+ });
582
+ fieldAttrs.forEach(builder.addAttribute.bind(builder));
583
+ if (column.default && !column.computed) {
584
+ const defaultExprBuilder = provider.getDefaultValue({
585
+ fieldType: builtinType.type,
586
+ datatype: column.datatype,
587
+ datatype_name: column.datatype_name,
588
+ defaultValue: column.default,
589
+ services,
590
+ enums: model.declarations.filter((d) => d.$type === "Enum")
591
+ });
592
+ if (defaultExprBuilder) {
593
+ const defaultAttr = new DataFieldAttributeFactory().setDecl(getAttributeRef("@default", services)).addArg(defaultExprBuilder);
594
+ builder.addAttribute(defaultAttr);
595
+ }
596
+ }
597
+ if (column.unique && !column.pk) {
598
+ builder.addAttribute((b) => {
599
+ b.setDecl(uniqueAttribute);
600
+ const isDefaultName = !column.unique_name || column.unique_name === `${table.name}_${column.name}_key` || column.unique_name === column.name;
601
+ if (!isDefaultName) {
602
+ b.addArg((ab) => ab.StringLiteral.setValue(column.unique_name), "map");
603
+ }
604
+ return b;
605
+ });
606
+ }
607
+ if (modified2 || options.alwaysMap) {
608
+ builder.addAttribute((ab) => ab.setDecl(fieldMapAttribute).addArg((ab2) => ab2.StringLiteral.setValue(column.name)));
609
+ }
610
+ return builder;
611
+ });
612
+ });
613
+ const pkColumns = table.columns.filter((c) => c.pk).map((c) => c.name);
614
+ if (multiPk) {
615
+ modelFactory.addAttribute((builder) => builder.setDecl(modelIdAttribute).addArg((argBuilder) => {
616
+ const arrayExpr = argBuilder.ArrayExpr;
617
+ pkColumns.forEach((c) => {
618
+ const ref = modelFactory.node.fields.find((f) => getDbName(f) === c);
619
+ if (!ref) {
620
+ throw new CliError(`Field ${c} not found`);
621
+ }
622
+ arrayExpr.addItem((itemBuilder) => itemBuilder.ReferenceExpr.setTarget(ref));
623
+ });
624
+ return arrayExpr;
625
+ }));
626
+ }
627
+ const hasUniqueConstraint = table.columns.some((c) => c.unique || c.pk) || table.indexes.some((i) => i.unique);
628
+ if (!hasUniqueConstraint) {
629
+ modelFactory.addAttribute((a) => a.setDecl(getAttributeRef("@@ignore", services)));
630
+ modelFactory.addComment("/// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by Zenstack Client.");
631
+ }
632
+ const sortedIndexes = table.indexes.reverse().sort((a, b) => {
633
+ if (a.unique && !b.unique) return -1;
634
+ if (!a.unique && b.unique) return 1;
635
+ return 0;
636
+ });
637
+ sortedIndexes.forEach((index) => {
638
+ if (index.predicate) {
639
+ console.warn(colors3.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
640
+ - Model: "${table.name}", constraint: "${index.name}"`));
641
+ return;
642
+ }
643
+ if (index.columns.find((c) => c.expression)) {
644
+ console.warn(colors3.yellow(`These constraints are not supported by Zenstack. Read more: https://pris.ly/d/check-constraints
645
+ - Model: "${table.name}", constraint: "${index.name}"`));
646
+ return;
647
+ }
648
+ if (index.primary) {
649
+ return;
650
+ }
651
+ if (index.columns.length === 1 && (index.columns.find((c) => pkColumns.includes(c.name)) || index.unique)) {
652
+ return;
653
+ }
654
+ modelFactory.addAttribute((builder) => {
655
+ const attr = builder.setDecl(index.unique ? modelUniqueAttribute : modelindexAttribute).addArg((argBuilder) => {
656
+ const arrayExpr = argBuilder.ArrayExpr;
657
+ index.columns.forEach((c) => {
658
+ const ref = modelFactory.node.fields.find((f) => getDbName(f) === c.name);
659
+ if (!ref) {
660
+ throw new CliError(`Column ${c.name} not found in model ${table.name}`);
661
+ }
662
+ arrayExpr.addItem((itemBuilder) => {
663
+ const refExpr = itemBuilder.ReferenceExpr.setTarget(ref);
664
+ if (c.order && c.order !== "ASC") refExpr.addArg((ab) => ab.StringLiteral.setValue("DESC"), "sort");
665
+ return refExpr;
666
+ });
667
+ });
668
+ return arrayExpr;
669
+ });
670
+ const suffix = index.unique ? "_key" : "_idx";
671
+ if (index.name !== `${table.name}_${index.columns.map((c) => c.name).join("_")}${suffix}`) {
672
+ attr.addArg((argBuilder) => argBuilder.StringLiteral.setValue(index.name), "map");
673
+ }
674
+ return attr;
675
+ });
676
+ });
677
+ if (table.schema && table.schema !== "" && table.schema !== defaultSchema) {
678
+ modelFactory.addAttribute((b) => b.setDecl(getAttributeRef("@@schema", services)).addArg((a) => a.StringLiteral.setValue(table.schema)));
679
+ }
680
+ model.declarations.push(modelFactory.node);
681
+ return relations;
682
+ }
683
+ __name(syncTable, "syncTable");
684
+ function syncRelation({ model, relation, services, options, selfRelation, similarRelations }) {
685
+ const idAttribute = getAttributeRef("@id", services);
686
+ const uniqueAttribute = getAttributeRef("@unique", services);
687
+ const relationAttribute = getAttributeRef("@relation", services);
688
+ const fieldMapAttribute = getAttributeRef("@map", services);
689
+ const tableMapAttribute = getAttributeRef("@@map", services);
690
+ const includeRelationName = selfRelation || similarRelations > 0;
691
+ if (!idAttribute || !uniqueAttribute || !relationAttribute || !fieldMapAttribute || !tableMapAttribute) {
692
+ throw new CliError("Cannot find required attributes in the model.");
693
+ }
694
+ const sourceModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.table);
695
+ if (!sourceModel) return;
696
+ const sourceFields = [];
697
+ for (const colName of relation.columns) {
698
+ const idx = sourceModel.fields.findIndex((f) => getDbName(f) === colName);
699
+ const field = sourceModel.fields[idx];
700
+ if (!field) return;
701
+ sourceFields.push({
702
+ field,
703
+ index: idx
704
+ });
705
+ }
706
+ const targetModel = model.declarations.find((d) => d.$type === "DataModel" && getDbName(d) === relation.references.table);
707
+ if (!targetModel) return;
708
+ const targetFields = [];
709
+ for (const colName of relation.references.columns) {
710
+ const field = targetModel.fields.find((f) => getDbName(f) === colName);
711
+ if (!field) return;
712
+ targetFields.push(field);
713
+ }
714
+ const firstSourceField = sourceFields[0].field;
715
+ const firstSourceFieldId = sourceFields[0].index;
716
+ const firstColumn = relation.columns[0];
717
+ const fieldPrefix = /[0-9]/g.test(sourceModel.name.charAt(0)) ? "_" : "";
718
+ const relationName = `${relation.table}${similarRelations > 0 ? `_${firstColumn}` : ""}To${relation.references.table}`;
719
+ const sourceNameFromReference = firstSourceField.name.toLowerCase().endsWith("id") ? `${resolveNameCasing(options.fieldCasing, firstSourceField.name.slice(0, -2)).name}${relation.type === "many" ? "s" : ""}` : void 0;
720
+ const sourceFieldFromReference = sourceModel.fields.find((f) => f.name === sourceNameFromReference);
721
+ let { name: sourceFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${fieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${(!sourceFieldFromReference ? sourceNameFromReference : void 0) || lowerCaseFirst(resolveNameCasing(options.fieldCasing, targetModel.name).name)}${relation.type === "many" ? "s" : ""}`);
722
+ if (sourceModel.fields.find((f) => f.name === sourceFieldName)) {
723
+ sourceFieldName = `${sourceFieldName}To${lowerCaseFirst(targetModel.name)}_${relation.references.columns[0]}`;
724
+ }
725
+ const sourceFieldFactory = new DataFieldFactory().setContainer(sourceModel).setName(sourceFieldName).setType((tb) => tb.setOptional(relation.nullable).setArray(relation.type === "many").setReference(targetModel));
726
+ sourceFieldFactory.addAttribute((ab) => {
727
+ ab.setDecl(relationAttribute);
728
+ if (includeRelationName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relationName));
729
+ ab.addArg((ab2) => {
730
+ const arrayExpr = ab2.ArrayExpr;
731
+ for (const { field } of sourceFields) {
732
+ arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
733
+ }
734
+ return arrayExpr;
735
+ }, "fields");
736
+ ab.addArg((ab2) => {
737
+ const arrayExpr = ab2.ArrayExpr;
738
+ for (const field of targetFields) {
739
+ arrayExpr.addItem((aeb) => aeb.ReferenceExpr.setTarget(field));
740
+ }
741
+ return arrayExpr;
742
+ }, "references");
743
+ const onDeleteDefault = relation.nullable ? "SET NULL" : "RESTRICT";
744
+ if (relation.foreign_key_on_delete && relation.foreign_key_on_delete !== onDeleteDefault) {
745
+ const enumRef = getEnumRef("ReferentialAction", services);
746
+ if (!enumRef) throw new CliError("ReferentialAction enum not found");
747
+ const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_delete.replace(/ /g, "").toLowerCase());
748
+ if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_delete} not found`);
749
+ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onDelete");
750
+ }
751
+ if (relation.foreign_key_on_update && relation.foreign_key_on_update !== "CASCADE") {
752
+ const enumRef = getEnumRef("ReferentialAction", services);
753
+ if (!enumRef) throw new CliError("ReferentialAction enum not found");
754
+ const enumFieldRef = enumRef.fields.find((f) => f.name.toLowerCase() === relation.foreign_key_on_update.replace(/ /g, "").toLowerCase());
755
+ if (!enumFieldRef) throw new CliError(`ReferentialAction ${relation.foreign_key_on_update} not found`);
756
+ ab.addArg((a) => a.ReferenceExpr.setTarget(enumFieldRef), "onUpdate");
757
+ }
758
+ const defaultFkName = `${relation.table}_${relation.columns.join("_")}_fkey`;
759
+ if (relation.fk_name && relation.fk_name !== defaultFkName) ab.addArg((ab2) => ab2.StringLiteral.setValue(relation.fk_name), "map");
760
+ return ab;
761
+ });
762
+ sourceModel.fields.splice(firstSourceFieldId, 0, sourceFieldFactory.node);
763
+ const oppositeFieldPrefix = /[0-9]/g.test(targetModel.name.charAt(0)) ? "_" : "";
764
+ let { name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, similarRelations > 0 ? `${oppositeFieldPrefix}${lowerCaseFirst(sourceModel.name)}_${firstColumn}` : `${lowerCaseFirst(resolveNameCasing(options.fieldCasing, sourceModel.name).name)}${relation.references.type === "many" ? "s" : ""}`);
765
+ if (targetModel.fields.find((f) => f.name === oppositeFieldName)) {
766
+ ({ name: oppositeFieldName } = resolveNameCasing(options.fieldCasing, `${lowerCaseFirst(sourceModel.name)}_${firstColumn}To${relation.references.table}_${relation.references.columns[0]}`));
767
+ }
768
+ const targetFieldFactory = new DataFieldFactory().setContainer(targetModel).setName(oppositeFieldName).setType((tb) => tb.setOptional(relation.references.type === "one").setArray(relation.references.type === "many").setReference(sourceModel));
769
+ if (includeRelationName) targetFieldFactory.addAttribute((ab) => ab.setDecl(relationAttribute).addArg((ab2) => ab2.StringLiteral.setValue(relationName)));
770
+ targetModel.fields.push(targetFieldFactory.node);
771
+ }
772
+ __name(syncRelation, "syncRelation");
773
+ function consolidateEnums({ newModel, oldModel }) {
774
+ const newEnums = newModel.declarations.filter((d) => isEnum(d));
775
+ const newDataModels = newModel.declarations.filter((d) => d.$type === "DataModel");
776
+ const oldDataModels = oldModel.declarations.filter((d) => d.$type === "DataModel");
777
+ const enumMapping = /* @__PURE__ */ new Map();
778
+ for (const newEnum of newEnums) {
779
+ for (const newDM of newDataModels) {
780
+ for (const field of newDM.fields) {
781
+ if (field.$type !== "DataField" || field.type.reference?.ref !== newEnum) continue;
782
+ const oldDM = oldDataModels.find((d) => getDbName(d) === getDbName(newDM));
783
+ if (!oldDM) continue;
784
+ const oldField = oldDM.fields.find((f) => getDbName(f) === getDbName(field));
785
+ if (!oldField || oldField.$type !== "DataField" || !oldField.type.reference?.ref) continue;
786
+ const oldEnum = oldField.type.reference.ref;
787
+ if (!isEnum(oldEnum)) continue;
788
+ enumMapping.set(newEnum, oldEnum);
789
+ break;
790
+ }
791
+ if (enumMapping.has(newEnum)) break;
792
+ }
793
+ }
794
+ const reverseMapping = /* @__PURE__ */ new Map();
795
+ for (const [newEnum, oldEnum] of enumMapping) {
796
+ if (!reverseMapping.has(oldEnum)) {
797
+ reverseMapping.set(oldEnum, []);
798
+ }
799
+ reverseMapping.get(oldEnum).push(newEnum);
800
+ }
801
+ for (const [oldEnum, newEnumsGroup] of reverseMapping) {
802
+ const keepEnum = newEnumsGroup[0];
803
+ if (newEnumsGroup.length === 1 && keepEnum.name === oldEnum.name) continue;
804
+ const oldValues = new Set(oldEnum.fields.map((f) => getDbName(f)));
805
+ const allMatch = newEnumsGroup.every((ne) => {
806
+ const newValues = new Set(ne.fields.map((f) => getDbName(f)));
807
+ return oldValues.size === newValues.size && [
808
+ ...oldValues
809
+ ].every((v) => newValues.has(v));
810
+ });
811
+ if (!allMatch) continue;
812
+ keepEnum.name = oldEnum.name;
813
+ keepEnum.attributes = oldEnum.attributes.map((attr) => {
814
+ const copy = {
815
+ ...attr,
816
+ $container: keepEnum
817
+ };
818
+ return copy;
819
+ });
820
+ for (let i = 1; i < newEnumsGroup.length; i++) {
821
+ const idx = newModel.declarations.indexOf(newEnumsGroup[i]);
822
+ if (idx >= 0) {
823
+ newModel.declarations.splice(idx, 1);
824
+ }
825
+ }
826
+ for (const newDM of newDataModels) {
827
+ for (const field of newDM.fields) {
828
+ if (field.$type !== "DataField") continue;
829
+ const ref = field.type.reference?.ref;
830
+ if (ref && newEnumsGroup.includes(ref)) {
831
+ field.type.reference = {
832
+ ref: keepEnum,
833
+ $refText: keepEnum.name
834
+ };
835
+ }
836
+ }
837
+ }
838
+ console.log(colors3.gray(`Consolidated enum${newEnumsGroup.length > 1 ? "s" : ""} ${newEnumsGroup.map((e) => e.name).join(", ")} \u2192 ${oldEnum.name}`));
839
+ }
840
+ }
841
+ __name(consolidateEnums, "consolidateEnums");
842
+
843
+ // src/actions/pull/provider/mysql.ts
844
+ import { DataFieldAttributeFactory as DataFieldAttributeFactory2 } from "@zenstackhq/language/factory";
845
+ function normalizeGenerationExpression(typeDef) {
846
+ return typeDef.replace(/_([0-9A-Za-z_]+)\\?'/g, "'").replace(/\\'/g, "'");
847
+ }
848
+ __name(normalizeGenerationExpression, "normalizeGenerationExpression");
849
+ var mysql = {
850
+ isSupportedFeature(feature) {
851
+ switch (feature) {
852
+ case "NativeEnum":
853
+ return true;
854
+ case "Schema":
855
+ default:
856
+ return false;
857
+ }
858
+ },
859
+ getBuiltinType(type2) {
860
+ const t = (type2 || "").toLowerCase().trim();
861
+ const isArray = false;
862
+ switch (t) {
863
+ // integers
864
+ case "tinyint":
865
+ case "smallint":
866
+ case "mediumint":
867
+ case "int":
868
+ case "integer":
869
+ return {
870
+ type: "Int",
871
+ isArray
872
+ };
873
+ case "bigint":
874
+ return {
875
+ type: "BigInt",
876
+ isArray
877
+ };
878
+ // decimals and floats
879
+ case "decimal":
880
+ case "numeric":
881
+ return {
882
+ type: "Decimal",
883
+ isArray
884
+ };
885
+ case "float":
886
+ case "double":
887
+ case "real":
888
+ return {
889
+ type: "Float",
890
+ isArray
891
+ };
892
+ // boolean (MySQL uses TINYINT(1) for boolean)
893
+ case "boolean":
894
+ case "bool":
895
+ return {
896
+ type: "Boolean",
897
+ isArray
898
+ };
899
+ // strings
900
+ case "char":
901
+ case "varchar":
902
+ case "tinytext":
903
+ case "text":
904
+ case "mediumtext":
905
+ case "longtext":
906
+ return {
907
+ type: "String",
908
+ isArray
909
+ };
910
+ // dates/times
911
+ case "date":
912
+ case "time":
913
+ case "datetime":
914
+ case "timestamp":
915
+ case "year":
916
+ return {
917
+ type: "DateTime",
918
+ isArray
919
+ };
920
+ // binary
921
+ case "binary":
922
+ case "varbinary":
923
+ case "tinyblob":
924
+ case "blob":
925
+ case "mediumblob":
926
+ case "longblob":
927
+ return {
928
+ type: "Bytes",
929
+ isArray
930
+ };
931
+ // json
932
+ case "json":
933
+ return {
934
+ type: "Json",
935
+ isArray
936
+ };
937
+ default:
938
+ if (t.startsWith("enum(")) {
939
+ return {
940
+ type: "String",
941
+ isArray
942
+ };
943
+ }
944
+ if (t.startsWith("set(")) {
945
+ return {
946
+ type: "String",
947
+ isArray
948
+ };
949
+ }
950
+ return {
951
+ type: "Unsupported",
952
+ isArray
953
+ };
954
+ }
955
+ },
956
+ getDefaultDatabaseType(type2) {
957
+ switch (type2) {
958
+ case "String":
959
+ return {
960
+ type: "varchar",
961
+ precision: 191
962
+ };
963
+ case "Boolean":
964
+ return {
965
+ type: "boolean"
966
+ };
967
+ case "Int":
968
+ return {
969
+ type: "int"
970
+ };
971
+ case "BigInt":
972
+ return {
973
+ type: "bigint"
974
+ };
975
+ case "Float":
976
+ return {
977
+ type: "double"
978
+ };
979
+ case "Decimal":
980
+ return {
981
+ type: "decimal",
982
+ precision: 65
983
+ };
984
+ case "DateTime":
985
+ return {
986
+ type: "datetime",
987
+ precision: 3
988
+ };
989
+ case "Json":
990
+ return {
991
+ type: "json"
992
+ };
993
+ case "Bytes":
994
+ return {
995
+ type: "longblob"
996
+ };
997
+ }
998
+ },
999
+ async introspect(connectionString, options) {
1000
+ const mysql2 = await import("mysql2/promise");
1001
+ const connection = await mysql2.createConnection(connectionString);
1002
+ try {
1003
+ const url = new URL(connectionString);
1004
+ const databaseName = url.pathname.replace("/", "");
1005
+ if (!databaseName) {
1006
+ throw new CliError("Database name not found in connection string");
1007
+ }
1008
+ const [tableRows] = await connection.execute(getTableIntrospectionQuery(), [
1009
+ databaseName
1010
+ ]);
1011
+ const tables = [];
1012
+ for (const row of tableRows) {
1013
+ const columns = typeof row.columns === "string" ? JSON.parse(row.columns) : row.columns;
1014
+ const indexes = typeof row.indexes === "string" ? JSON.parse(row.indexes) : row.indexes;
1015
+ const sortedColumns = (columns || []).sort((a, b) => (a.ordinal_position ?? 0) - (b.ordinal_position ?? 0)).map((col) => {
1016
+ if (col.datatype === "enum" && col.datatype_name) {
1017
+ return {
1018
+ ...col,
1019
+ datatype_name: resolveNameCasing(options.modelCasing, col.datatype_name).name
1020
+ };
1021
+ }
1022
+ if (col.computed && typeof col.datatype === "string") {
1023
+ return {
1024
+ ...col,
1025
+ datatype: normalizeGenerationExpression(col.datatype)
1026
+ };
1027
+ }
1028
+ return col;
1029
+ });
1030
+ const filteredIndexes = (indexes || []).filter((idx) => !(idx.columns.length === 1 && idx.name === `${row.name}_${idx.columns[0]?.name}_fkey`));
1031
+ tables.push({
1032
+ schema: "",
1033
+ name: row.name,
1034
+ type: row.type,
1035
+ definition: row.definition,
1036
+ columns: sortedColumns,
1037
+ indexes: filteredIndexes
1038
+ });
1039
+ }
1040
+ const [enumRows] = await connection.execute(getEnumIntrospectionQuery(), [
1041
+ databaseName
1042
+ ]);
1043
+ const enums = enumRows.map((row) => {
1044
+ const values = parseEnumValues(row.column_type);
1045
+ const syntheticName = `${row.table_name}_${row.column_name}`;
1046
+ const { name } = resolveNameCasing(options.modelCasing, syntheticName);
1047
+ return {
1048
+ schema_name: "",
1049
+ enum_type: name,
1050
+ values
1051
+ };
1052
+ });
1053
+ return {
1054
+ tables,
1055
+ enums
1056
+ };
1057
+ } finally {
1058
+ await connection.end();
1059
+ }
1060
+ },
1061
+ getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
1062
+ const val = defaultValue.trim();
1063
+ if (val.toUpperCase() === "NULL") {
1064
+ return null;
1065
+ }
1066
+ if (datatype === "enum" && datatype_name) {
1067
+ const enumDef = enums.find((e) => getDbName(e) === datatype_name);
1068
+ if (enumDef) {
1069
+ const enumValue = val.startsWith("'") && val.endsWith("'") ? val.slice(1, -1) : val;
1070
+ const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
1071
+ if (enumField) {
1072
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
1073
+ }
1074
+ }
1075
+ }
1076
+ switch (fieldType) {
1077
+ case "DateTime":
1078
+ if (/^CURRENT_TIMESTAMP(\(\d*\))?$/i.test(val) || val.toLowerCase() === "current_timestamp()" || val.toLowerCase() === "now()") {
1079
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
1080
+ }
1081
+ return (ab) => ab.StringLiteral.setValue(val);
1082
+ case "Int":
1083
+ case "BigInt":
1084
+ if (val.toLowerCase() === "auto_increment") {
1085
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
1086
+ }
1087
+ return (ab) => ab.NumberLiteral.setValue(val);
1088
+ case "Float":
1089
+ return normalizeFloatDefault(val);
1090
+ case "Decimal":
1091
+ return normalizeDecimalDefault(val);
1092
+ case "Boolean":
1093
+ return (ab) => ab.BooleanLiteral.setValue(val.toLowerCase() === "true" || val === "1" || val === "b'1'");
1094
+ case "String":
1095
+ if (val.toLowerCase() === "uuid()") {
1096
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("uuid", services));
1097
+ }
1098
+ return (ab) => ab.StringLiteral.setValue(val);
1099
+ case "Json":
1100
+ return (ab) => ab.StringLiteral.setValue(val);
1101
+ case "Bytes":
1102
+ return (ab) => ab.StringLiteral.setValue(val);
1103
+ }
1104
+ if (val.includes("(") && val.includes(")")) {
1105
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
1106
+ }
1107
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
1108
+ return null;
1109
+ },
1110
+ getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
1111
+ const factories = [];
1112
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
1113
+ factories.push(new DataFieldAttributeFactory2().setDecl(getAttributeRef("@updatedAt", services)));
1114
+ }
1115
+ const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${datatype.toLowerCase()}`)?.node;
1116
+ const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
1117
+ if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== datatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
1118
+ const dbAttrFactory = new DataFieldAttributeFactory2().setDecl(dbAttr);
1119
+ const sizeValue = length ?? precision;
1120
+ if (sizeValue !== void 0 && sizeValue !== null) {
1121
+ dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(sizeValue));
1122
+ }
1123
+ factories.push(dbAttrFactory);
1124
+ }
1125
+ return factories;
1126
+ }
1127
+ };
1128
+ function getTableIntrospectionQuery() {
1129
+ return `
1130
+ -- Main query: one row per table/view with columns and indexes as nested JSON arrays.
1131
+ -- Uses INFORMATION_SCHEMA which is MySQL's standard metadata catalog.
1132
+ SELECT
1133
+ t.TABLE_NAME AS \`name\`, -- table or view name
1134
+ CASE t.TABLE_TYPE -- map MySQL table type strings to our internal types
1135
+ WHEN 'BASE TABLE' THEN 'table'
1136
+ WHEN 'VIEW' THEN 'view'
1137
+ ELSE NULL
1138
+ END AS \`type\`,
1139
+ CASE -- for views, retrieve the SQL definition
1140
+ WHEN t.TABLE_TYPE = 'VIEW' THEN v.VIEW_DEFINITION
1141
+ ELSE NULL
1142
+ END AS \`definition\`,
1143
+
1144
+ -- ===== COLUMNS subquery =====
1145
+ -- Wraps an ordered subquery in JSON_ARRAYAGG to produce a JSON array of column objects.
1146
+ (
1147
+ SELECT JSON_ARRAYAGG(col_json)
1148
+ FROM (
1149
+ SELECT JSON_OBJECT(
1150
+ 'ordinal_position', c.ORDINAL_POSITION, -- column position (used for sorting)
1151
+ 'name', c.COLUMN_NAME, -- column name
1152
+
1153
+ -- datatype: for generated/computed columns, construct the full DDL-like type definition
1154
+ -- (e.g., "int GENERATED ALWAYS AS (col1 + col2) STORED") so it can be rendered as
1155
+ -- Unsupported("..."); special-case tinyint(1) as 'boolean' (MySQL's boolean convention);
1156
+ -- otherwise use the DATA_TYPE (e.g., 'int', 'varchar', 'datetime').
1157
+ 'datatype', CASE
1158
+ WHEN c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '' THEN
1159
+ CONCAT(
1160
+ c.COLUMN_TYPE,
1161
+ ' GENERATED ALWAYS AS (',
1162
+ c.GENERATION_EXPRESSION,
1163
+ ') ',
1164
+ CASE
1165
+ WHEN c.EXTRA LIKE '%STORED GENERATED%' THEN 'STORED'
1166
+ ELSE 'VIRTUAL'
1167
+ END
1168
+ )
1169
+ WHEN c.DATA_TYPE = 'tinyint' AND c.COLUMN_TYPE = 'tinyint(1)' THEN 'boolean'
1170
+ ELSE c.DATA_TYPE
1171
+ END,
1172
+
1173
+ -- datatype_name: for enum columns, generate a synthetic name "TableName_ColumnName"
1174
+ -- (MySQL doesn't have named enum types like PostgreSQL)
1175
+ 'datatype_name', CASE
1176
+ WHEN c.DATA_TYPE = 'enum' THEN CONCAT(t.TABLE_NAME, '_', c.COLUMN_NAME)
1177
+ ELSE NULL
1178
+ END,
1179
+
1180
+ 'datatype_schema', '', -- MySQL doesn't support multi-schema
1181
+ 'length', c.CHARACTER_MAXIMUM_LENGTH, -- max length for string types (e.g., VARCHAR(255) -> 255)
1182
+ 'precision', COALESCE(c.NUMERIC_PRECISION, c.DATETIME_PRECISION), -- numeric or datetime precision
1183
+
1184
+ 'nullable', c.IS_NULLABLE = 'YES', -- true if column allows NULL
1185
+
1186
+ -- default: for auto_increment columns, report 'auto_increment' instead of NULL;
1187
+ -- otherwise use the COLUMN_DEFAULT value
1188
+ 'default', CASE
1189
+ WHEN c.EXTRA LIKE '%auto_increment%' THEN 'auto_increment'
1190
+ ELSE c.COLUMN_DEFAULT
1191
+ END,
1192
+
1193
+ 'pk', c.COLUMN_KEY = 'PRI', -- true if column is part of the primary key
1194
+
1195
+ -- unique: true if the column has a single-column unique index.
1196
+ -- COLUMN_KEY = 'UNI' covers most cases, but may not be set when the column
1197
+ -- also participates in other indexes (showing 'MUL' instead on some MySQL versions).
1198
+ -- Also check INFORMATION_SCHEMA.STATISTICS for single-column unique indexes
1199
+ -- (NON_UNIQUE = 0) to match the PostgreSQL introspection behavior.
1200
+ 'unique', (
1201
+ c.COLUMN_KEY = 'UNI'
1202
+ OR EXISTS (
1203
+ SELECT 1
1204
+ FROM INFORMATION_SCHEMA.STATISTICS s_uni
1205
+ WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
1206
+ AND s_uni.TABLE_NAME = c.TABLE_NAME
1207
+ AND s_uni.COLUMN_NAME = c.COLUMN_NAME
1208
+ AND s_uni.NON_UNIQUE = 0
1209
+ AND s_uni.INDEX_NAME != 'PRIMARY'
1210
+ AND (
1211
+ SELECT COUNT(*)
1212
+ FROM INFORMATION_SCHEMA.STATISTICS s_cnt
1213
+ WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
1214
+ AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
1215
+ AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
1216
+ ) = 1
1217
+ )
1218
+ ),
1219
+ 'unique_name', (
1220
+ SELECT COALESCE(
1221
+ CASE WHEN c.COLUMN_KEY = 'UNI' THEN c.COLUMN_NAME ELSE NULL END,
1222
+ (
1223
+ SELECT s_uni.INDEX_NAME
1224
+ FROM INFORMATION_SCHEMA.STATISTICS s_uni
1225
+ WHERE s_uni.TABLE_SCHEMA = c.TABLE_SCHEMA
1226
+ AND s_uni.TABLE_NAME = c.TABLE_NAME
1227
+ AND s_uni.COLUMN_NAME = c.COLUMN_NAME
1228
+ AND s_uni.NON_UNIQUE = 0
1229
+ AND s_uni.INDEX_NAME != 'PRIMARY'
1230
+ AND (
1231
+ SELECT COUNT(*)
1232
+ FROM INFORMATION_SCHEMA.STATISTICS s_cnt
1233
+ WHERE s_cnt.TABLE_SCHEMA = s_uni.TABLE_SCHEMA
1234
+ AND s_cnt.TABLE_NAME = s_uni.TABLE_NAME
1235
+ AND s_cnt.INDEX_NAME = s_uni.INDEX_NAME
1236
+ ) = 1
1237
+ LIMIT 1
1238
+ )
1239
+ )
1240
+ ),
1241
+
1242
+ -- computed: true if column has a generation expression (virtual or stored)
1243
+ 'computed', c.GENERATION_EXPRESSION IS NOT NULL AND c.GENERATION_EXPRESSION != '',
1244
+
1245
+ -- options: for enum columns, the full COLUMN_TYPE string (e.g., "enum('a','b','c')")
1246
+ -- which gets parsed into individual values later
1247
+ 'options', CASE
1248
+ WHEN c.DATA_TYPE = 'enum' THEN c.COLUMN_TYPE
1249
+ ELSE NULL
1250
+ END,
1251
+
1252
+ -- Foreign key info (NULL if column is not part of a FK)
1253
+ 'foreign_key_schema', NULL, -- MySQL doesn't support cross-schema FKs here
1254
+ 'foreign_key_table', kcu_fk.REFERENCED_TABLE_NAME, -- referenced table
1255
+ 'foreign_key_column', kcu_fk.REFERENCED_COLUMN_NAME, -- referenced column
1256
+ 'foreign_key_name', kcu_fk.CONSTRAINT_NAME, -- FK constraint name
1257
+ 'foreign_key_on_update', rc.UPDATE_RULE, -- referential action on update (CASCADE, SET NULL, etc.)
1258
+ 'foreign_key_on_delete', rc.DELETE_RULE -- referential action on delete
1259
+ ) AS col_json
1260
+
1261
+ FROM INFORMATION_SCHEMA.COLUMNS c -- one row per column in the database
1262
+
1263
+ -- Join KEY_COLUMN_USAGE to find foreign key references for this column.
1264
+ -- Filter to only FK entries (REFERENCED_TABLE_NAME IS NOT NULL).
1265
+ LEFT JOIN INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu_fk
1266
+ ON c.TABLE_SCHEMA = kcu_fk.TABLE_SCHEMA
1267
+ AND c.TABLE_NAME = kcu_fk.TABLE_NAME
1268
+ AND c.COLUMN_NAME = kcu_fk.COLUMN_NAME
1269
+ AND kcu_fk.REFERENCED_TABLE_NAME IS NOT NULL
1270
+
1271
+ -- Join REFERENTIAL_CONSTRAINTS to get ON UPDATE / ON DELETE rules for the FK.
1272
+ LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc
1273
+ ON kcu_fk.CONSTRAINT_SCHEMA = rc.CONSTRAINT_SCHEMA
1274
+ AND kcu_fk.CONSTRAINT_NAME = rc.CONSTRAINT_NAME
1275
+
1276
+ WHERE c.TABLE_SCHEMA = t.TABLE_SCHEMA
1277
+ AND c.TABLE_NAME = t.TABLE_NAME
1278
+ ORDER BY c.ORDINAL_POSITION -- preserve original column order
1279
+ ) AS cols_ordered
1280
+ ) AS \`columns\`,
1281
+
1282
+ -- ===== INDEXES subquery =====
1283
+ -- Aggregates all indexes for this table into a JSON array.
1284
+ (
1285
+ SELECT JSON_ARRAYAGG(idx_json)
1286
+ FROM (
1287
+ SELECT JSON_OBJECT(
1288
+ 'name', s.INDEX_NAME, -- index name (e.g., 'PRIMARY', 'idx_email')
1289
+ 'method', s.INDEX_TYPE, -- index type (e.g., 'BTREE', 'HASH', 'FULLTEXT')
1290
+ 'unique', s.NON_UNIQUE = 0, -- NON_UNIQUE=0 means it IS unique
1291
+ 'primary', s.INDEX_NAME = 'PRIMARY', -- MySQL names the PK index 'PRIMARY'
1292
+ 'valid', TRUE, -- MySQL doesn't expose index validity status
1293
+ 'ready', TRUE, -- MySQL doesn't expose index readiness status
1294
+ 'partial', FALSE, -- MySQL doesn't support partial indexes
1295
+ 'predicate', NULL, -- no WHERE clause on indexes in MySQL
1296
+
1297
+ -- Index columns: nested subquery for columns in this index
1298
+ 'columns', (
1299
+ SELECT JSON_ARRAYAGG(idx_col_json)
1300
+ FROM (
1301
+ SELECT JSON_OBJECT(
1302
+ 'name', s2.COLUMN_NAME, -- column name in the index
1303
+ 'expression', NULL, -- MySQL doesn't expose expression indexes via STATISTICS
1304
+ -- COLLATION: 'A' = ascending, 'D' = descending, NULL = not sorted
1305
+ 'order', CASE s2.COLLATION WHEN 'A' THEN 'ASC' WHEN 'D' THEN 'DESC' ELSE NULL END,
1306
+ 'nulls', NULL -- MySQL doesn't expose NULLS FIRST/LAST
1307
+ ) AS idx_col_json
1308
+ FROM INFORMATION_SCHEMA.STATISTICS s2 -- one row per column per index
1309
+ WHERE s2.TABLE_SCHEMA = s.TABLE_SCHEMA
1310
+ AND s2.TABLE_NAME = s.TABLE_NAME
1311
+ AND s2.INDEX_NAME = s.INDEX_NAME
1312
+ ORDER BY s2.SEQ_IN_INDEX -- preserve column order within the index
1313
+ ) AS idx_cols_ordered
1314
+ )
1315
+ ) AS idx_json
1316
+ FROM (
1317
+ -- Deduplicate: STATISTICS has one row per (index, column), but we need one row per index.
1318
+ -- DISTINCT on INDEX_NAME gives us one entry per index with its metadata.
1319
+ SELECT DISTINCT INDEX_NAME, INDEX_TYPE, NON_UNIQUE, TABLE_SCHEMA, TABLE_NAME
1320
+ FROM INFORMATION_SCHEMA.STATISTICS
1321
+ WHERE TABLE_SCHEMA = t.TABLE_SCHEMA AND TABLE_NAME = t.TABLE_NAME
1322
+ ) s
1323
+ ) AS idxs_ordered
1324
+ ) AS \`indexes\`
1325
+
1326
+ -- === Main FROM: INFORMATION_SCHEMA.TABLES lists all tables and views ===
1327
+ FROM INFORMATION_SCHEMA.TABLES t
1328
+ -- Join VIEWS to get VIEW_DEFINITION for view tables
1329
+ LEFT JOIN INFORMATION_SCHEMA.VIEWS v
1330
+ ON t.TABLE_SCHEMA = v.TABLE_SCHEMA AND t.TABLE_NAME = v.TABLE_NAME
1331
+ WHERE t.TABLE_SCHEMA = ? -- only the target database
1332
+ AND t.TABLE_TYPE IN ('BASE TABLE', 'VIEW') -- exclude system tables like SYSTEM VIEW
1333
+ AND t.TABLE_NAME <> '_prisma_migrations' -- exclude Prisma migration tracking table
1334
+ ORDER BY t.TABLE_NAME;
1335
+ `;
1336
+ }
1337
+ __name(getTableIntrospectionQuery, "getTableIntrospectionQuery");
1338
+ function getEnumIntrospectionQuery() {
1339
+ return `
1340
+ SELECT
1341
+ c.TABLE_NAME AS table_name, -- table containing the enum column
1342
+ c.COLUMN_NAME AS column_name, -- column name
1343
+ c.COLUMN_TYPE AS column_type -- full type string including values (e.g., "enum('val1','val2')")
1344
+ FROM INFORMATION_SCHEMA.COLUMNS c
1345
+ WHERE c.TABLE_SCHEMA = ? -- only the target database
1346
+ AND c.DATA_TYPE = 'enum' -- only enum columns
1347
+ ORDER BY c.TABLE_NAME, c.COLUMN_NAME;
1348
+ `;
1349
+ }
1350
+ __name(getEnumIntrospectionQuery, "getEnumIntrospectionQuery");
1351
+ function parseEnumValues(columnType) {
1352
+ const match = columnType.match(/^enum\((.+)\)$/i);
1353
+ if (!match || !match[1]) return [];
1354
+ const valuesString = match[1];
1355
+ const values = [];
1356
+ let current = "";
1357
+ let inQuote = false;
1358
+ let i = 0;
1359
+ while (i < valuesString.length) {
1360
+ const char = valuesString[i];
1361
+ if (char === "'" && !inQuote) {
1362
+ inQuote = true;
1363
+ i++;
1364
+ continue;
1365
+ }
1366
+ if (char === "'" && inQuote) {
1367
+ if (valuesString[i + 1] === "'") {
1368
+ current += "'";
1369
+ i += 2;
1370
+ continue;
1371
+ }
1372
+ values.push(current);
1373
+ current = "";
1374
+ inQuote = false;
1375
+ i++;
1376
+ while (i < valuesString.length && (valuesString[i] === "," || valuesString[i] === " ")) {
1377
+ i++;
1378
+ }
1379
+ continue;
1380
+ }
1381
+ if (inQuote) {
1382
+ current += char;
1383
+ }
1384
+ i++;
1385
+ }
1386
+ return values;
1387
+ }
1388
+ __name(parseEnumValues, "parseEnumValues");
1389
+
1390
+ // src/actions/pull/provider/postgresql.ts
1391
+ import { DataFieldAttributeFactory as DataFieldAttributeFactory3 } from "@zenstackhq/language/factory";
1392
+ import { Client } from "pg";
1393
+ var pgTypnameToStandard = {
1394
+ int2: "smallint",
1395
+ int4: "integer",
1396
+ int8: "bigint",
1397
+ float4: "real",
1398
+ float8: "double precision",
1399
+ bool: "boolean",
1400
+ bpchar: "character",
1401
+ numeric: "decimal"
1402
+ };
1403
+ var standardTypePrecisions = {
1404
+ int2: 16,
1405
+ smallint: 16,
1406
+ int4: 32,
1407
+ integer: 32,
1408
+ int8: 64,
1409
+ bigint: 64,
1410
+ float4: 24,
1411
+ real: 24,
1412
+ float8: 53,
1413
+ "double precision": 53
1414
+ };
1415
+ var pgTypnameToZenStackNativeType = {
1416
+ // integers
1417
+ int2: "SmallInt",
1418
+ smallint: "SmallInt",
1419
+ int4: "Integer",
1420
+ integer: "Integer",
1421
+ int8: "BigInt",
1422
+ bigint: "BigInt",
1423
+ // decimals and floats
1424
+ numeric: "Decimal",
1425
+ decimal: "Decimal",
1426
+ float4: "Real",
1427
+ real: "Real",
1428
+ float8: "DoublePrecision",
1429
+ "double precision": "DoublePrecision",
1430
+ // boolean
1431
+ bool: "Boolean",
1432
+ boolean: "Boolean",
1433
+ // strings
1434
+ text: "Text",
1435
+ varchar: "VarChar",
1436
+ "character varying": "VarChar",
1437
+ bpchar: "Char",
1438
+ character: "Char",
1439
+ // uuid
1440
+ uuid: "Uuid",
1441
+ // dates/times
1442
+ date: "Date",
1443
+ time: "Time",
1444
+ timetz: "Timetz",
1445
+ timestamp: "Timestamp",
1446
+ timestamptz: "Timestamptz",
1447
+ // binary
1448
+ bytea: "ByteA",
1449
+ // json
1450
+ json: "Json",
1451
+ jsonb: "JsonB",
1452
+ // xml
1453
+ xml: "Xml",
1454
+ // network types
1455
+ inet: "Inet",
1456
+ // bit strings
1457
+ bit: "Bit",
1458
+ varbit: "VarBit",
1459
+ // oid
1460
+ oid: "Oid",
1461
+ // money
1462
+ money: "Money",
1463
+ // citext extension
1464
+ citext: "Citext"
1465
+ };
1466
+ var postgresql = {
1467
+ isSupportedFeature(feature) {
1468
+ const supportedFeatures = [
1469
+ "Schema",
1470
+ "NativeEnum"
1471
+ ];
1472
+ return supportedFeatures.includes(feature);
1473
+ },
1474
+ getBuiltinType(type2) {
1475
+ const t = (type2 || "").toLowerCase();
1476
+ const isArray = t.startsWith("_");
1477
+ switch (t.replace(/^_/, "")) {
1478
+ // integers
1479
+ case "int2":
1480
+ case "smallint":
1481
+ case "int4":
1482
+ case "integer":
1483
+ return {
1484
+ type: "Int",
1485
+ isArray
1486
+ };
1487
+ case "int8":
1488
+ case "bigint":
1489
+ return {
1490
+ type: "BigInt",
1491
+ isArray
1492
+ };
1493
+ // decimals and floats
1494
+ case "numeric":
1495
+ case "decimal":
1496
+ return {
1497
+ type: "Decimal",
1498
+ isArray
1499
+ };
1500
+ case "float4":
1501
+ case "real":
1502
+ case "float8":
1503
+ case "double precision":
1504
+ return {
1505
+ type: "Float",
1506
+ isArray
1507
+ };
1508
+ // boolean
1509
+ case "bool":
1510
+ case "boolean":
1511
+ return {
1512
+ type: "Boolean",
1513
+ isArray
1514
+ };
1515
+ // strings
1516
+ case "text":
1517
+ case "varchar":
1518
+ case "bpchar":
1519
+ case "character varying":
1520
+ case "character":
1521
+ return {
1522
+ type: "String",
1523
+ isArray
1524
+ };
1525
+ // uuid
1526
+ case "uuid":
1527
+ return {
1528
+ type: "String",
1529
+ isArray
1530
+ };
1531
+ // dates/times
1532
+ case "date":
1533
+ case "time":
1534
+ case "timetz":
1535
+ case "timestamp":
1536
+ case "timestamptz":
1537
+ return {
1538
+ type: "DateTime",
1539
+ isArray
1540
+ };
1541
+ // binary
1542
+ case "bytea":
1543
+ return {
1544
+ type: "Bytes",
1545
+ isArray
1546
+ };
1547
+ // json
1548
+ case "json":
1549
+ case "jsonb":
1550
+ return {
1551
+ type: "Json",
1552
+ isArray
1553
+ };
1554
+ default:
1555
+ return {
1556
+ type: "Unsupported",
1557
+ isArray
1558
+ };
1559
+ }
1560
+ },
1561
+ async introspect(connectionString, options) {
1562
+ const client = new Client({
1563
+ connectionString
1564
+ });
1565
+ await client.connect();
1566
+ try {
1567
+ const { rows: tables } = await client.query(tableIntrospectionQuery);
1568
+ const { rows: enums } = await client.query(enumIntrospectionQuery);
1569
+ const filteredTables = tables.filter((t) => options.schemas.includes(t.schema));
1570
+ const filteredEnums = enums.filter((e) => options.schemas.includes(e.schema_name));
1571
+ return {
1572
+ enums: filteredEnums,
1573
+ tables: filteredTables
1574
+ };
1575
+ } finally {
1576
+ await client.end();
1577
+ }
1578
+ },
1579
+ getDefaultDatabaseType(type2) {
1580
+ switch (type2) {
1581
+ case "String":
1582
+ return {
1583
+ type: "text"
1584
+ };
1585
+ case "Boolean":
1586
+ return {
1587
+ type: "boolean"
1588
+ };
1589
+ case "Int":
1590
+ return {
1591
+ type: "integer"
1592
+ };
1593
+ case "BigInt":
1594
+ return {
1595
+ type: "bigint"
1596
+ };
1597
+ case "Float":
1598
+ return {
1599
+ type: "double precision"
1600
+ };
1601
+ case "Decimal":
1602
+ return {
1603
+ type: "decimal"
1604
+ };
1605
+ case "DateTime":
1606
+ return {
1607
+ type: "timestamp",
1608
+ precision: 3
1609
+ };
1610
+ case "Json":
1611
+ return {
1612
+ type: "jsonb"
1613
+ };
1614
+ case "Bytes":
1615
+ return {
1616
+ type: "bytea"
1617
+ };
1618
+ }
1619
+ },
1620
+ getDefaultValue({ defaultValue, fieldType, datatype, datatype_name, services, enums }) {
1621
+ const val = defaultValue.trim();
1622
+ if (datatype === "enum" && datatype_name) {
1623
+ const enumDef = enums.find((e) => getDbName(e) === datatype_name);
1624
+ if (enumDef) {
1625
+ const enumValue = val.replace(/'/g, "").split("::")[0]?.trim();
1626
+ const enumField = enumDef.fields.find((f) => getDbName(f) === enumValue);
1627
+ if (enumField) {
1628
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
1629
+ }
1630
+ }
1631
+ return typeCastingConvert({
1632
+ defaultValue,
1633
+ enums,
1634
+ val,
1635
+ services
1636
+ });
1637
+ }
1638
+ switch (fieldType) {
1639
+ case "DateTime":
1640
+ if (val === "CURRENT_TIMESTAMP" || val === "now()") {
1641
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
1642
+ }
1643
+ if (val.includes("::")) {
1644
+ return typeCastingConvert({
1645
+ defaultValue,
1646
+ enums,
1647
+ val,
1648
+ services
1649
+ });
1650
+ }
1651
+ return (ab) => ab.StringLiteral.setValue(val);
1652
+ case "Int":
1653
+ case "BigInt":
1654
+ if (val.startsWith("nextval(")) {
1655
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
1656
+ }
1657
+ if (val.includes("::")) {
1658
+ return typeCastingConvert({
1659
+ defaultValue,
1660
+ enums,
1661
+ val,
1662
+ services
1663
+ });
1664
+ }
1665
+ return (ab) => ab.NumberLiteral.setValue(val);
1666
+ case "Float":
1667
+ if (val.includes("::")) {
1668
+ return typeCastingConvert({
1669
+ defaultValue,
1670
+ enums,
1671
+ val,
1672
+ services
1673
+ });
1674
+ }
1675
+ return normalizeFloatDefault(val);
1676
+ case "Decimal":
1677
+ if (val.includes("::")) {
1678
+ return typeCastingConvert({
1679
+ defaultValue,
1680
+ enums,
1681
+ val,
1682
+ services
1683
+ });
1684
+ }
1685
+ return normalizeDecimalDefault(val);
1686
+ case "Boolean":
1687
+ return (ab) => ab.BooleanLiteral.setValue(val === "true");
1688
+ case "String":
1689
+ if (val.includes("::")) {
1690
+ return typeCastingConvert({
1691
+ defaultValue,
1692
+ enums,
1693
+ val,
1694
+ services
1695
+ });
1696
+ }
1697
+ if (val.startsWith("'") && val.endsWith("'")) {
1698
+ return (ab) => ab.StringLiteral.setValue(val.slice(1, -1).replace(/''/g, "'"));
1699
+ }
1700
+ return (ab) => ab.StringLiteral.setValue(val);
1701
+ case "Json":
1702
+ if (val.includes("::")) {
1703
+ return typeCastingConvert({
1704
+ defaultValue,
1705
+ enums,
1706
+ val,
1707
+ services
1708
+ });
1709
+ }
1710
+ return (ab) => ab.StringLiteral.setValue(val);
1711
+ case "Bytes":
1712
+ if (val.includes("::")) {
1713
+ return typeCastingConvert({
1714
+ defaultValue,
1715
+ enums,
1716
+ val,
1717
+ services
1718
+ });
1719
+ }
1720
+ return (ab) => ab.StringLiteral.setValue(val);
1721
+ }
1722
+ if (val.includes("(") && val.includes(")")) {
1723
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
1724
+ }
1725
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
1726
+ return null;
1727
+ },
1728
+ getFieldAttributes({ fieldName, fieldType, datatype, length, precision, services }) {
1729
+ const factories = [];
1730
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
1731
+ factories.push(new DataFieldAttributeFactory3().setDecl(getAttributeRef("@updatedAt", services)));
1732
+ }
1733
+ const nativeTypeName = pgTypnameToZenStackNativeType[datatype.toLowerCase()] ?? datatype;
1734
+ const dbAttr = services.shared.workspace.IndexManager.allElements("Attribute").find((d) => d.name.toLowerCase() === `@db.${nativeTypeName.toLowerCase()}`)?.node;
1735
+ const defaultDatabaseType = this.getDefaultDatabaseType(fieldType);
1736
+ const normalizedDatatype = pgTypnameToStandard[datatype.toLowerCase()] ?? datatype.toLowerCase();
1737
+ const standardPrecision = standardTypePrecisions[datatype.toLowerCase()];
1738
+ const isStandardPrecision = standardPrecision !== void 0 && precision === standardPrecision;
1739
+ if (dbAttr && defaultDatabaseType && (defaultDatabaseType.type !== normalizedDatatype || defaultDatabaseType.precision && defaultDatabaseType.precision !== (length ?? precision))) {
1740
+ const dbAttrFactory = new DataFieldAttributeFactory3().setDecl(dbAttr);
1741
+ if ((length || precision) && !isStandardPrecision) {
1742
+ dbAttrFactory.addArg((a) => a.NumberLiteral.setValue(length || precision));
1743
+ }
1744
+ factories.push(dbAttrFactory);
1745
+ }
1746
+ return factories;
1747
+ }
1748
+ };
1749
+ var enumIntrospectionQuery = `
1750
+ SELECT
1751
+ n.nspname AS schema_name, -- schema the enum belongs to (e.g., 'public')
1752
+ t.typname AS enum_type, -- enum type name as defined in CREATE TYPE
1753
+ coalesce(json_agg(e.enumlabel ORDER BY e.enumsortorder), '[]') AS values -- ordered list of enum labels as JSON array
1754
+ FROM pg_type t -- pg_type: catalog of all data types
1755
+ JOIN pg_enum e ON t.oid = e.enumtypid -- pg_enum: one row per enum label; join to get labels for this enum type
1756
+ JOIN pg_namespace n ON n.oid = t.typnamespace -- pg_namespace: schema info; join to get the schema name
1757
+ GROUP BY schema_name, enum_type -- one row per enum type, with all labels aggregated
1758
+ ORDER BY schema_name, enum_type;`;
1759
+ var tableIntrospectionQuery = `
1760
+ -- Main query: one row per table/view with columns and indexes as nested JSON arrays.
1761
+ -- Joins pg_class (tables/views) with pg_namespace (schemas).
1762
+ SELECT
1763
+ "ns"."nspname" AS "schema", -- schema name (e.g., 'public')
1764
+ "cls"."relname" AS "name", -- table or view name
1765
+ CASE "cls"."relkind" -- relkind: 'r' = ordinary table, 'v' = view
1766
+ WHEN 'r' THEN 'table'
1767
+ WHEN 'v' THEN 'view'
1768
+ ELSE NULL
1769
+ END AS "type",
1770
+ CASE -- for views, retrieve the SQL definition
1771
+ WHEN "cls"."relkind" = 'v' THEN pg_get_viewdef("cls"."oid", true)
1772
+ ELSE NULL
1773
+ END AS "definition",
1774
+
1775
+ -- ===== COLUMNS subquery =====
1776
+ -- Aggregates all columns for this table into a JSON array.
1777
+ (
1778
+ SELECT coalesce(json_agg(agg), '[]')
1779
+ FROM (
1780
+ SELECT
1781
+ "att"."attname" AS "name", -- column name
1782
+
1783
+ -- datatype: if the type is an enum, report 'enum';
1784
+ -- if the column is generated/computed, construct the full DDL-like type definition
1785
+ -- (e.g., "text GENERATED ALWAYS AS (expr) STORED") so it can be rendered as Unsupported("...");
1786
+ -- otherwise use the pg_type name.
1787
+ CASE
1788
+ WHEN EXISTS (
1789
+ SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
1790
+ WHERE "e"."enumtypid" = "typ"."oid"
1791
+ ) THEN 'enum'
1792
+ WHEN "att"."attgenerated" != '' THEN
1793
+ format_type("att"."atttypid", "att"."atttypmod")
1794
+ || ' GENERATED ALWAYS AS ('
1795
+ || pg_get_expr("def"."adbin", "def"."adrelid")
1796
+ || ') '
1797
+ || CASE "att"."attgenerated"
1798
+ WHEN 's' THEN 'STORED'
1799
+ WHEN 'v' THEN 'VIRTUAL'
1800
+ ELSE 'STORED'
1801
+ END
1802
+ ELSE "typ"."typname"::text -- internal type name (e.g., 'int4', 'varchar', 'text'); cast to text to prevent CASE from coercing result to name type (max 63 chars)
1803
+ END AS "datatype",
1804
+
1805
+ -- datatype_name: for enums only, the actual enum type name (used to look up the enum definition)
1806
+ CASE
1807
+ WHEN EXISTS (
1808
+ SELECT 1 FROM "pg_catalog"."pg_enum" AS "e"
1809
+ WHERE "e"."enumtypid" = "typ"."oid"
1810
+ ) THEN "typ"."typname"
1811
+ ELSE NULL
1812
+ END AS "datatype_name",
1813
+
1814
+ "tns"."nspname" AS "datatype_schema", -- schema where the data type is defined
1815
+ "c"."character_maximum_length" AS "length", -- max length for char/varchar types (from information_schema)
1816
+ COALESCE("c"."numeric_precision", "c"."datetime_precision") AS "precision", -- numeric or datetime precision
1817
+
1818
+ -- Foreign key info (NULL if column is not part of a FK constraint)
1819
+ "fk_ns"."nspname" AS "foreign_key_schema", -- schema of the referenced table
1820
+ "fk_cls"."relname" AS "foreign_key_table", -- referenced table name
1821
+ "fk_att"."attname" AS "foreign_key_column", -- referenced column name
1822
+ "fk_con"."conname" AS "foreign_key_name", -- FK constraint name
1823
+
1824
+ -- FK referential actions: decode single-char codes to human-readable strings
1825
+ CASE "fk_con"."confupdtype"
1826
+ WHEN 'a' THEN 'NO ACTION'
1827
+ WHEN 'r' THEN 'RESTRICT'
1828
+ WHEN 'c' THEN 'CASCADE'
1829
+ WHEN 'n' THEN 'SET NULL'
1830
+ WHEN 'd' THEN 'SET DEFAULT'
1831
+ ELSE NULL
1832
+ END AS "foreign_key_on_update",
1833
+ CASE "fk_con"."confdeltype"
1834
+ WHEN 'a' THEN 'NO ACTION'
1835
+ WHEN 'r' THEN 'RESTRICT'
1836
+ WHEN 'c' THEN 'CASCADE'
1837
+ WHEN 'n' THEN 'SET NULL'
1838
+ WHEN 'd' THEN 'SET DEFAULT'
1839
+ ELSE NULL
1840
+ END AS "foreign_key_on_delete",
1841
+
1842
+ -- pk: true if this column is part of the table's primary key constraint
1843
+ "pk_con"."conkey" IS NOT NULL AS "pk",
1844
+
1845
+ -- unique: true if the column has a single-column UNIQUE constraint OR a single-column unique index
1846
+ (
1847
+ -- Check for a single-column UNIQUE constraint (contype = 'u')
1848
+ EXISTS (
1849
+ SELECT 1
1850
+ FROM "pg_catalog"."pg_constraint" AS "u_con"
1851
+ WHERE "u_con"."contype" = 'u' -- 'u' = unique constraint
1852
+ AND "u_con"."conrelid" = "cls"."oid" -- on this table
1853
+ AND array_length("u_con"."conkey", 1) = 1 -- single-column only
1854
+ AND "att"."attnum" = ANY ("u_con"."conkey") -- this column is in the constraint
1855
+ )
1856
+ OR
1857
+ -- Check for a single-column unique index (may exist without an explicit constraint)
1858
+ EXISTS (
1859
+ SELECT 1
1860
+ FROM "pg_catalog"."pg_index" AS "u_idx"
1861
+ WHERE "u_idx"."indrelid" = "cls"."oid" -- on this table
1862
+ AND "u_idx"."indisunique" = TRUE -- it's a unique index
1863
+ AND "u_idx"."indnkeyatts" = 1 -- single key column
1864
+ AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[]) -- this column is the key
1865
+ )
1866
+ ) AS "unique",
1867
+
1868
+ -- unique_name: the name of the unique constraint or index (whichever exists first)
1869
+ (
1870
+ SELECT COALESCE(
1871
+ -- Try constraint name first
1872
+ (
1873
+ SELECT "u_con"."conname"
1874
+ FROM "pg_catalog"."pg_constraint" AS "u_con"
1875
+ WHERE "u_con"."contype" = 'u'
1876
+ AND "u_con"."conrelid" = "cls"."oid"
1877
+ AND array_length("u_con"."conkey", 1) = 1
1878
+ AND "att"."attnum" = ANY ("u_con"."conkey")
1879
+ LIMIT 1
1880
+ ),
1881
+ -- Fall back to unique index name
1882
+ (
1883
+ SELECT "u_idx_cls"."relname"
1884
+ FROM "pg_catalog"."pg_index" AS "u_idx"
1885
+ JOIN "pg_catalog"."pg_class" AS "u_idx_cls" ON "u_idx"."indexrelid" = "u_idx_cls"."oid"
1886
+ WHERE "u_idx"."indrelid" = "cls"."oid"
1887
+ AND "u_idx"."indisunique" = TRUE
1888
+ AND "u_idx"."indnkeyatts" = 1
1889
+ AND "att"."attnum" = ANY ("u_idx"."indkey"::int2[])
1890
+ LIMIT 1
1891
+ )
1892
+ )
1893
+ ) AS "unique_name",
1894
+
1895
+ "att"."attgenerated" != '' AS "computed", -- true if column is a generated/computed column
1896
+ -- For generated columns, pg_attrdef stores the generation expression (not a default),
1897
+ -- so we must null it out to avoid emitting a spurious @default(dbgenerated(...)) attribute.
1898
+ CASE
1899
+ WHEN "att"."attgenerated" != '' THEN NULL
1900
+ ELSE pg_get_expr("def"."adbin", "def"."adrelid")
1901
+ END AS "default", -- column default expression as text (e.g., 'nextval(...)', '0', 'now()')
1902
+ "att"."attnotnull" != TRUE AS "nullable", -- true if column allows NULL values
1903
+
1904
+ -- options: for enum columns, aggregates all allowed enum labels into a JSON array
1905
+ coalesce(
1906
+ (
1907
+ SELECT json_agg("enm"."enumlabel") AS "o"
1908
+ FROM "pg_catalog"."pg_enum" AS "enm"
1909
+ WHERE "enm"."enumtypid" = "typ"."oid"
1910
+ ),
1911
+ '[]'
1912
+ ) AS "options"
1913
+
1914
+ -- === FROM / JOINs for the columns subquery ===
1915
+
1916
+ -- pg_attribute: one row per table column (attnum >= 0 excludes system columns)
1917
+ FROM "pg_catalog"."pg_attribute" AS "att"
1918
+
1919
+ -- pg_type: data type of the column (e.g., int4, text, custom_enum)
1920
+ INNER JOIN "pg_catalog"."pg_type" AS "typ" ON "typ"."oid" = "att"."atttypid"
1921
+
1922
+ -- pg_namespace for the type: needed to determine which schema the type lives in
1923
+ INNER JOIN "pg_catalog"."pg_namespace" AS "tns" ON "tns"."oid" = "typ"."typnamespace"
1924
+
1925
+ -- information_schema.columns: provides length/precision info not easily available from pg_catalog
1926
+ LEFT JOIN "information_schema"."columns" AS "c" ON "c"."table_schema" = "ns"."nspname"
1927
+ AND "c"."table_name" = "cls"."relname"
1928
+ AND "c"."column_name" = "att"."attname"
1929
+
1930
+ -- pg_constraint (primary key): join on contype='p' to detect if column is part of PK
1931
+ LEFT JOIN "pg_catalog"."pg_constraint" AS "pk_con" ON "pk_con"."contype" = 'p'
1932
+ AND "pk_con"."conrelid" = "cls"."oid"
1933
+ AND "att"."attnum" = ANY ("pk_con"."conkey")
1934
+
1935
+ -- pg_constraint (foreign key): join on contype='f' to get FK details for this column
1936
+ LEFT JOIN "pg_catalog"."pg_constraint" AS "fk_con" ON "fk_con"."contype" = 'f'
1937
+ AND "fk_con"."conrelid" = "cls"."oid"
1938
+ AND "att"."attnum" = ANY ("fk_con"."conkey")
1939
+
1940
+ -- pg_class for FK target table: resolve the referenced table's OID to its name
1941
+ LEFT JOIN "pg_catalog"."pg_class" AS "fk_cls" ON "fk_cls"."oid" = "fk_con"."confrelid"
1942
+
1943
+ -- pg_namespace for FK target: get the schema of the referenced table
1944
+ LEFT JOIN "pg_catalog"."pg_namespace" AS "fk_ns" ON "fk_ns"."oid" = "fk_cls"."relnamespace"
1945
+
1946
+ -- pg_attribute for FK target column: resolve the referenced column number to its name.
1947
+ -- Use array_position to correlate by position: find this source column's index in conkey,
1948
+ -- then pick the referenced attnum at that same index from confkey.
1949
+ -- This ensures composite FKs correctly map each source column to its corresponding target column.
1950
+ LEFT JOIN "pg_catalog"."pg_attribute" AS "fk_att" ON "fk_att"."attrelid" = "fk_cls"."oid"
1951
+ AND "fk_att"."attnum" = "fk_con"."confkey"[array_position("fk_con"."conkey", "att"."attnum")]
1952
+
1953
+ -- pg_attrdef: column defaults; adbin contains the internal expression, decoded via pg_get_expr()
1954
+ LEFT JOIN "pg_catalog"."pg_attrdef" AS "def" ON "def"."adrelid" = "cls"."oid" AND "def"."adnum" = "att"."attnum"
1955
+
1956
+ WHERE
1957
+ "att"."attrelid" = "cls"."oid" -- only columns belonging to this table
1958
+ AND "att"."attnum" >= 0 -- exclude system columns (ctid, xmin, etc. have attnum < 0)
1959
+ AND "att"."attisdropped" != TRUE -- exclude dropped (deleted) columns
1960
+ ORDER BY "att"."attnum" -- preserve original column order
1961
+ ) AS agg
1962
+ ) AS "columns",
1963
+
1964
+ -- ===== INDEXES subquery =====
1965
+ -- Aggregates all indexes for this table into a JSON array.
1966
+ (
1967
+ SELECT coalesce(json_agg(agg), '[]')
1968
+ FROM (
1969
+ SELECT
1970
+ "idx_cls"."relname" AS "name", -- index name
1971
+ "am"."amname" AS "method", -- access method (e.g., 'btree', 'hash', 'gin', 'gist')
1972
+ "idx"."indisunique" AS "unique", -- true if unique index
1973
+ "idx"."indisprimary" AS "primary", -- true if this is the PK index
1974
+ "idx"."indisvalid" AS "valid", -- false during concurrent index builds
1975
+ "idx"."indisready" AS "ready", -- true when index is ready for inserts
1976
+ ("idx"."indpred" IS NOT NULL) AS "partial", -- true if index has a WHERE clause (partial index)
1977
+ pg_get_expr("idx"."indpred", "idx"."indrelid") AS "predicate", -- the WHERE clause expression for partial indexes
1978
+
1979
+ -- Index columns: iterate over each position in the index key array
1980
+ (
1981
+ SELECT json_agg(
1982
+ json_build_object(
1983
+ -- 'name': column name, or for expression indexes the expression text
1984
+ 'name', COALESCE("att"."attname", pg_get_indexdef("idx"."indexrelid", "s"."i", true)),
1985
+ -- 'expression': non-null only for expression-based index columns (e.g., lower(name))
1986
+ 'expression', CASE WHEN "att"."attname" IS NULL THEN pg_get_indexdef("idx"."indexrelid", "s"."i", true) ELSE NULL END,
1987
+ -- 'order': sort direction; bit 0 of indoption = 1 means DESC
1988
+ 'order', CASE ((( "idx"."indoption"::int2[] )["s"."i"] & 1)) WHEN 1 THEN 'DESC' ELSE 'ASC' END,
1989
+ -- 'nulls': null ordering; bit 1 of indoption = 1 means NULLS FIRST
1990
+ 'nulls', CASE (((( "idx"."indoption"::int2[] )["s"."i"] >> 1) & 1)) WHEN 1 THEN 'NULLS FIRST' ELSE 'NULLS LAST' END
1991
+ )
1992
+ ORDER BY "s"."i" -- preserve column order within the index
1993
+ )
1994
+ -- generate_subscripts creates one row per index key position (1-based)
1995
+ FROM generate_subscripts("idx"."indkey"::int2[], 1) AS "s"("i")
1996
+ -- Join to pg_attribute to resolve column numbers to names
1997
+ -- NULL attname means it's an expression index column
1998
+ LEFT JOIN "pg_catalog"."pg_attribute" AS "att"
1999
+ ON "att"."attrelid" = "cls"."oid"
2000
+ AND "att"."attnum" = ("idx"."indkey"::int2[])["s"."i"]
2001
+ ) AS "columns"
2002
+
2003
+ FROM "pg_catalog"."pg_index" AS "idx" -- pg_index: one row per index
2004
+ JOIN "pg_catalog"."pg_class" AS "idx_cls" ON "idx"."indexrelid" = "idx_cls"."oid" -- index's own pg_class entry (for the name)
2005
+ JOIN "pg_catalog"."pg_am" AS "am" ON "idx_cls"."relam" = "am"."oid" -- access method catalog
2006
+ WHERE "idx"."indrelid" = "cls"."oid" -- only indexes on this table
2007
+ ORDER BY "idx_cls"."relname"
2008
+ ) AS agg
2009
+ ) AS "indexes"
2010
+
2011
+ -- === Main FROM: pg_class (tables and views) joined with pg_namespace (schemas) ===
2012
+ FROM "pg_catalog"."pg_class" AS "cls"
2013
+ INNER JOIN "pg_catalog"."pg_namespace" AS "ns" ON "cls"."relnamespace" = "ns"."oid"
2014
+ WHERE
2015
+ "ns"."nspname" !~ '^pg_' -- exclude PostgreSQL internal schemas (pg_catalog, pg_toast, etc.)
2016
+ AND "ns"."nspname" != 'information_schema' -- exclude the information_schema
2017
+ AND "cls"."relkind" IN ('r', 'v') -- only tables ('r') and views ('v')
2018
+ AND "cls"."relname" !~ '^pg_' -- exclude system tables starting with pg_
2019
+ AND "cls"."relname" !~ '_prisma_migrations' -- exclude Prisma migration tracking table
2020
+ ORDER BY "ns"."nspname", "cls"."relname" ASC;
2021
+ `;
2022
+ function typeCastingConvert({ defaultValue, enums, val, services }) {
2023
+ const [value, type2] = val.replace(/'/g, "").split("::").map((s) => s.trim());
2024
+ switch (type2) {
2025
+ case "character varying":
2026
+ case "uuid":
2027
+ case "json":
2028
+ case "jsonb":
2029
+ case "text":
2030
+ if (value === "NULL") return null;
2031
+ return (ab) => ab.StringLiteral.setValue(value);
2032
+ case "real":
2033
+ return (ab) => ab.NumberLiteral.setValue(value);
2034
+ default: {
2035
+ const enumDef = enums.find((e) => getDbName(e, true) === type2);
2036
+ if (!enumDef) {
2037
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("dbgenerated", services)).addArg((a) => a.setValue((v) => v.StringLiteral.setValue(val)));
2038
+ }
2039
+ const enumField = enumDef.fields.find((v) => getDbName(v) === value);
2040
+ if (!enumField) {
2041
+ throw new CliError(`Enum value ${value} not found in enum ${type2} for default value ${defaultValue}`);
2042
+ }
2043
+ return (ab) => ab.ReferenceExpr.setTarget(enumField);
2044
+ }
2045
+ }
2046
+ }
2047
+ __name(typeCastingConvert, "typeCastingConvert");
2048
+
2049
+ // src/actions/pull/provider/sqlite.ts
2050
+ import { DataFieldAttributeFactory as DataFieldAttributeFactory4 } from "@zenstackhq/language/factory";
2051
+ var sqlite = {
2052
+ isSupportedFeature(feature) {
2053
+ switch (feature) {
2054
+ case "Schema":
2055
+ return false;
2056
+ case "NativeEnum":
2057
+ return false;
2058
+ default:
2059
+ return false;
2060
+ }
2061
+ },
2062
+ getBuiltinType(type2) {
2063
+ const t = (type2 || "").toLowerCase().trim().replace(/\(.*\)$/, "").trim();
2064
+ const isArray = false;
2065
+ switch (t) {
2066
+ // INTEGER types (SQLite: INT, INTEGER, TINYINT, SMALLINT, MEDIUMINT, INT2, INT8)
2067
+ case "integer":
2068
+ case "int":
2069
+ case "tinyint":
2070
+ case "smallint":
2071
+ case "mediumint":
2072
+ case "int2":
2073
+ case "int8":
2074
+ return {
2075
+ type: "Int",
2076
+ isArray
2077
+ };
2078
+ // BIGINT - map to BigInt for large integers
2079
+ case "bigint":
2080
+ case "unsigned big int":
2081
+ return {
2082
+ type: "BigInt",
2083
+ isArray
2084
+ };
2085
+ // TEXT types (SQLite: CHARACTER, VARCHAR, VARYING CHARACTER, NCHAR, NATIVE CHARACTER, NVARCHAR, TEXT, CLOB)
2086
+ case "text":
2087
+ case "varchar":
2088
+ case "char":
2089
+ case "character":
2090
+ case "varying character":
2091
+ case "nchar":
2092
+ case "native character":
2093
+ case "nvarchar":
2094
+ case "clob":
2095
+ return {
2096
+ type: "String",
2097
+ isArray
2098
+ };
2099
+ // BLOB type
2100
+ case "blob":
2101
+ return {
2102
+ type: "Bytes",
2103
+ isArray
2104
+ };
2105
+ // REAL types (SQLite: REAL, DOUBLE, DOUBLE PRECISION, FLOAT)
2106
+ case "real":
2107
+ case "float":
2108
+ case "double":
2109
+ case "double precision":
2110
+ return {
2111
+ type: "Float",
2112
+ isArray
2113
+ };
2114
+ // NUMERIC types (SQLite: NUMERIC, DECIMAL)
2115
+ case "numeric":
2116
+ case "decimal":
2117
+ return {
2118
+ type: "Decimal",
2119
+ isArray
2120
+ };
2121
+ // DateTime types
2122
+ case "datetime":
2123
+ case "date":
2124
+ case "time":
2125
+ case "timestamp":
2126
+ return {
2127
+ type: "DateTime",
2128
+ isArray
2129
+ };
2130
+ // JSON types
2131
+ case "json":
2132
+ case "jsonb":
2133
+ return {
2134
+ type: "Json",
2135
+ isArray
2136
+ };
2137
+ // Boolean types
2138
+ case "boolean":
2139
+ case "bool":
2140
+ return {
2141
+ type: "Boolean",
2142
+ isArray
2143
+ };
2144
+ default: {
2145
+ if (!t) {
2146
+ return {
2147
+ type: "Bytes",
2148
+ isArray
2149
+ };
2150
+ }
2151
+ if (t.includes("int")) {
2152
+ return {
2153
+ type: "Int",
2154
+ isArray
2155
+ };
2156
+ }
2157
+ if (t.includes("char") || t.includes("clob") || t.includes("text")) {
2158
+ return {
2159
+ type: "String",
2160
+ isArray
2161
+ };
2162
+ }
2163
+ if (t.includes("blob")) {
2164
+ return {
2165
+ type: "Bytes",
2166
+ isArray
2167
+ };
2168
+ }
2169
+ if (t.includes("real") || t.includes("floa") || t.includes("doub")) {
2170
+ return {
2171
+ type: "Float",
2172
+ isArray
2173
+ };
2174
+ }
2175
+ return {
2176
+ type: "Unsupported",
2177
+ isArray
2178
+ };
2179
+ }
2180
+ }
2181
+ },
2182
+ getDefaultDatabaseType() {
2183
+ return void 0;
2184
+ },
2185
+ async introspect(connectionString, _options) {
2186
+ const SQLite2 = (await import("better-sqlite3")).default;
2187
+ const db = new SQLite2(connectionString, {
2188
+ readonly: true
2189
+ });
2190
+ try {
2191
+ const all = /* @__PURE__ */ __name((sql) => {
2192
+ const stmt = db.prepare(sql);
2193
+ return stmt.all();
2194
+ }, "all");
2195
+ const tablesRaw = all("SELECT name, type, sql AS definition FROM sqlite_schema WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%' ORDER BY name");
2196
+ const autoIncrementTables = /* @__PURE__ */ new Set();
2197
+ for (const t of tablesRaw) {
2198
+ if (t.type === "table" && t.definition) {
2199
+ if (/\bAUTOINCREMENT\b/i.test(t.definition)) {
2200
+ autoIncrementTables.add(t.name);
2201
+ }
2202
+ }
2203
+ }
2204
+ const tables = [];
2205
+ for (const t of tablesRaw) {
2206
+ const tableName = t.name;
2207
+ const schema = "";
2208
+ const hasAutoIncrement = autoIncrementTables.has(tableName);
2209
+ const columnsInfo = all(`PRAGMA table_xinfo('${tableName.replace(/'/g, "''")}')`);
2210
+ const tableNameEsc = tableName.replace(/'/g, "''");
2211
+ const idxList = all(`PRAGMA index_list('${tableNameEsc}')`).filter((r) => !r.name.startsWith("sqlite_autoindex_"));
2212
+ const uniqueSingleColumn = /* @__PURE__ */ new Set();
2213
+ const uniqueIndexRows = idxList.filter((r) => r.unique === 1 && r.partial !== 1);
2214
+ for (const idx of uniqueIndexRows) {
2215
+ const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
2216
+ if (idxCols.length === 1 && idxCols[0]?.name) {
2217
+ uniqueSingleColumn.add(idxCols[0].name);
2218
+ }
2219
+ }
2220
+ const indexes = idxList.map((idx) => {
2221
+ const idxCols = all(`PRAGMA index_info('${idx.name.replace(/'/g, "''")}')`);
2222
+ return {
2223
+ name: idx.name,
2224
+ method: null,
2225
+ unique: idx.unique === 1,
2226
+ primary: false,
2227
+ valid: true,
2228
+ ready: true,
2229
+ partial: idx.partial === 1,
2230
+ predicate: idx.partial === 1 ? "[partial]" : null,
2231
+ columns: idxCols.map((col) => ({
2232
+ name: col.name,
2233
+ expression: null,
2234
+ order: null,
2235
+ nulls: null
2236
+ }))
2237
+ };
2238
+ });
2239
+ const fkRows = all(`PRAGMA foreign_key_list('${tableName.replace(/'/g, "''")}')`);
2240
+ const fkConstraintNames = /* @__PURE__ */ new Map();
2241
+ if (t.definition) {
2242
+ const fkRegex = /CONSTRAINT\s+(?:["'`]([^"'`]+)["'`]|(\w+))\s+FOREIGN\s+KEY\s*\(([^)]+)\)/gi;
2243
+ let match;
2244
+ while ((match = fkRegex.exec(t.definition)) !== null) {
2245
+ const constraintName = match[1] || match[2];
2246
+ const columnList = match[3];
2247
+ if (constraintName && columnList) {
2248
+ const columns2 = columnList.split(",").map((col) => col.trim().replace(/^["'`]|["'`]$/g, ""));
2249
+ for (const col of columns2) {
2250
+ if (col) {
2251
+ fkConstraintNames.set(col, constraintName);
2252
+ }
2253
+ }
2254
+ }
2255
+ }
2256
+ }
2257
+ const fkByColumn = /* @__PURE__ */ new Map();
2258
+ for (const fk of fkRows) {
2259
+ fkByColumn.set(fk.from, {
2260
+ foreign_key_schema: "",
2261
+ foreign_key_table: fk.table || null,
2262
+ foreign_key_column: fk.to || null,
2263
+ foreign_key_name: fkConstraintNames.get(fk.from) ?? null,
2264
+ foreign_key_on_update: fk.on_update ?? null,
2265
+ foreign_key_on_delete: fk.on_delete ?? null
2266
+ });
2267
+ }
2268
+ const generatedColDefs = t.definition ? extractColumnTypeDefs(t.definition) : /* @__PURE__ */ new Map();
2269
+ const columns = [];
2270
+ for (const c of columnsInfo) {
2271
+ const hidden = c.hidden ?? 0;
2272
+ if (hidden === 1) continue;
2273
+ const isGenerated = hidden === 2 || hidden === 3;
2274
+ const fk = fkByColumn.get(c.name);
2275
+ let defaultValue = c.dflt_value;
2276
+ if (hasAutoIncrement && c.pk) {
2277
+ defaultValue = "autoincrement";
2278
+ }
2279
+ let datatype = c.type || "";
2280
+ if (isGenerated) {
2281
+ const fullDef = generatedColDefs.get(c.name);
2282
+ if (fullDef) {
2283
+ datatype = fullDef;
2284
+ }
2285
+ }
2286
+ columns.push({
2287
+ name: c.name,
2288
+ datatype,
2289
+ datatype_name: null,
2290
+ length: null,
2291
+ precision: null,
2292
+ datatype_schema: schema,
2293
+ foreign_key_schema: fk?.foreign_key_schema ?? null,
2294
+ foreign_key_table: fk?.foreign_key_table ?? null,
2295
+ foreign_key_column: fk?.foreign_key_column ?? null,
2296
+ foreign_key_name: fk?.foreign_key_name ?? null,
2297
+ foreign_key_on_update: fk?.foreign_key_on_update ?? null,
2298
+ foreign_key_on_delete: fk?.foreign_key_on_delete ?? null,
2299
+ pk: !!c.pk,
2300
+ computed: isGenerated,
2301
+ nullable: c.notnull !== 1,
2302
+ default: defaultValue,
2303
+ unique: uniqueSingleColumn.has(c.name),
2304
+ unique_name: null
2305
+ });
2306
+ }
2307
+ tables.push({
2308
+ schema,
2309
+ name: tableName,
2310
+ columns,
2311
+ type: t.type,
2312
+ definition: t.definition,
2313
+ indexes
2314
+ });
2315
+ }
2316
+ const enums = [];
2317
+ return {
2318
+ tables,
2319
+ enums
2320
+ };
2321
+ } finally {
2322
+ db.close();
2323
+ }
2324
+ },
2325
+ getDefaultValue({ defaultValue, fieldType, services, enums }) {
2326
+ const val = defaultValue.trim();
2327
+ switch (fieldType) {
2328
+ case "DateTime":
2329
+ if (val === "CURRENT_TIMESTAMP" || val === "now()") {
2330
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("now", services));
2331
+ }
2332
+ return (ab) => ab.StringLiteral.setValue(val);
2333
+ case "Int":
2334
+ case "BigInt":
2335
+ if (val === "autoincrement") {
2336
+ return (ab) => ab.InvocationExpr.setFunction(getFunctionRef("autoincrement", services));
2337
+ }
2338
+ return (ab) => ab.NumberLiteral.setValue(val);
2339
+ case "Float":
2340
+ return normalizeFloatDefault(val);
2341
+ case "Decimal":
2342
+ return normalizeDecimalDefault(val);
2343
+ case "Boolean":
2344
+ return (ab) => ab.BooleanLiteral.setValue(val === "true" || val === "1");
2345
+ case "String":
2346
+ if (val.startsWith("'") && val.endsWith("'")) {
2347
+ const strippedName = val.slice(1, -1);
2348
+ const enumDef = enums.find((e) => e.fields.find((v) => getDbName(v) === strippedName));
2349
+ if (enumDef) {
2350
+ const enumField = enumDef.fields.find((v) => getDbName(v) === strippedName);
2351
+ if (enumField) return (ab) => ab.ReferenceExpr.setTarget(enumField);
2352
+ }
2353
+ return (ab) => ab.StringLiteral.setValue(strippedName);
2354
+ }
2355
+ return (ab) => ab.StringLiteral.setValue(val);
2356
+ case "Json":
2357
+ return (ab) => ab.StringLiteral.setValue(val);
2358
+ case "Bytes":
2359
+ return (ab) => ab.StringLiteral.setValue(val);
2360
+ }
2361
+ console.warn(`Unsupported default value type: "${defaultValue}" for field type "${fieldType}". Skipping default value.`);
2362
+ return null;
2363
+ },
2364
+ getFieldAttributes({ fieldName, fieldType, services }) {
2365
+ const factories = [];
2366
+ if (fieldType === "DateTime" && (fieldName.toLowerCase() === "updatedat" || fieldName.toLowerCase() === "updated_at")) {
2367
+ factories.push(new DataFieldAttributeFactory4().setDecl(getAttributeRef("@updatedAt", services)));
2368
+ }
2369
+ return factories;
2370
+ }
2371
+ };
2372
+ function extractColumnTypeDefs(ddl) {
2373
+ const openIdx = ddl.indexOf("(");
2374
+ if (openIdx === -1) return /* @__PURE__ */ new Map();
2375
+ let depth = 1;
2376
+ let closeIdx = -1;
2377
+ for (let i = openIdx + 1; i < ddl.length; i++) {
2378
+ if (ddl[i] === "(") depth++;
2379
+ else if (ddl[i] === ")") {
2380
+ depth--;
2381
+ if (depth === 0) {
2382
+ closeIdx = i;
2383
+ break;
2384
+ }
2385
+ }
2386
+ }
2387
+ if (closeIdx === -1) return /* @__PURE__ */ new Map();
2388
+ const content = ddl.substring(openIdx + 1, closeIdx);
2389
+ const defs = [];
2390
+ let current = "";
2391
+ depth = 0;
2392
+ for (const char of content) {
2393
+ if (char === "(") depth++;
2394
+ else if (char === ")") depth--;
2395
+ else if (char === "," && depth === 0) {
2396
+ defs.push(current.trim());
2397
+ current = "";
2398
+ continue;
2399
+ }
2400
+ current += char;
2401
+ }
2402
+ if (current.trim()) defs.push(current.trim());
2403
+ const result = /* @__PURE__ */ new Map();
2404
+ for (const def of defs) {
2405
+ const nameMatch = def.match(/^(?:["'`]([^"'`]+)["'`]|(\w+))\s+(.+)/s);
2406
+ if (nameMatch) {
2407
+ const name = nameMatch[1] || nameMatch[2];
2408
+ const typeDef = nameMatch[3];
2409
+ if (name && typeDef) {
2410
+ result.set(name, typeDef.trim());
2411
+ }
2412
+ }
2413
+ }
2414
+ return result;
2415
+ }
2416
+ __name(extractColumnTypeDefs, "extractColumnTypeDefs");
2417
+
2418
+ // src/actions/pull/provider/index.ts
2419
+ var providers = {
2420
+ mysql,
2421
+ postgresql,
2422
+ sqlite
2423
+ };
2424
+
234
2425
  // src/actions/db.ts
235
2426
  async function run2(command, options) {
236
2427
  switch (command) {
237
2428
  case "push":
238
2429
  await runPush(options);
239
2430
  break;
2431
+ case "pull":
2432
+ await runPull(options);
2433
+ break;
240
2434
  }
241
2435
  }
242
2436
  __name(run2, "run");
@@ -264,22 +2458,421 @@ async function runPush(options) {
264
2458
  }
265
2459
  }
266
2460
  __name(runPush, "runPush");
2461
+ async function runPull(options) {
2462
+ const spinner = ora();
2463
+ try {
2464
+ const schemaFile = getSchemaFile(options.schema);
2465
+ const outPath = options.output ? path2.resolve(options.output) : void 0;
2466
+ const treatAsFile = !!outPath && (fs2.existsSync(outPath) && fs2.lstatSync(outPath).isFile() || path2.extname(outPath) !== "");
2467
+ const { model, services } = await loadSchemaDocument(schemaFile, {
2468
+ returnServices: true,
2469
+ mergeImports: treatAsFile
2470
+ });
2471
+ const SUPPORTED_PROVIDERS = Object.keys(providers);
2472
+ const datasource = getDatasource(model);
2473
+ if (!SUPPORTED_PROVIDERS.includes(datasource.provider)) {
2474
+ throw new CliError(`Unsupported datasource provider: ${datasource.provider}`);
2475
+ }
2476
+ const provider = providers[datasource.provider];
2477
+ if (!provider) {
2478
+ throw new CliError(`No introspection provider found for: ${datasource.provider}`);
2479
+ }
2480
+ spinner.start("Introspecting database...");
2481
+ const { enums, tables } = await provider.introspect(datasource.url, {
2482
+ schemas: datasource.allSchemas,
2483
+ modelCasing: options.modelCasing
2484
+ });
2485
+ spinner.succeed("Database introspected");
2486
+ console.log(colors4.blue("Syncing schema..."));
2487
+ const newModel = {
2488
+ $type: "Model",
2489
+ $container: void 0,
2490
+ $containerProperty: void 0,
2491
+ $containerIndex: void 0,
2492
+ declarations: [
2493
+ ...model.declarations.filter((d) => [
2494
+ "DataSource"
2495
+ ].includes(d.$type))
2496
+ ],
2497
+ imports: model.imports
2498
+ };
2499
+ syncEnums({
2500
+ dbEnums: enums,
2501
+ model: newModel,
2502
+ services,
2503
+ options,
2504
+ defaultSchema: datasource.defaultSchema,
2505
+ oldModel: model,
2506
+ provider
2507
+ });
2508
+ const resolvedRelations = [];
2509
+ for (const table of tables) {
2510
+ const relations = syncTable({
2511
+ table,
2512
+ model: newModel,
2513
+ provider,
2514
+ services,
2515
+ options,
2516
+ defaultSchema: datasource.defaultSchema,
2517
+ oldModel: model
2518
+ });
2519
+ resolvedRelations.push(...relations);
2520
+ }
2521
+ for (const relation of resolvedRelations) {
2522
+ const similarRelations = resolvedRelations.filter((rr) => {
2523
+ return rr !== relation && (rr.schema === relation.schema && rr.table === relation.table && rr.references.schema === relation.references.schema && rr.references.table === relation.references.table || rr.schema === relation.references.schema && rr.columns[0] === relation.references.columns[0] && rr.references.schema === relation.schema && rr.references.table === relation.table);
2524
+ }).length;
2525
+ const selfRelation = relation.references.schema === relation.schema && relation.references.table === relation.table;
2526
+ syncRelation({
2527
+ model: newModel,
2528
+ relation,
2529
+ services,
2530
+ options,
2531
+ selfRelation,
2532
+ similarRelations
2533
+ });
2534
+ }
2535
+ consolidateEnums({
2536
+ newModel,
2537
+ oldModel: model
2538
+ });
2539
+ console.log(colors4.blue("Schema synced"));
2540
+ const baseDir = path2.dirname(path2.resolve(schemaFile));
2541
+ const baseDirUrlPath = new URL(`file://${baseDir}`).pathname;
2542
+ const docs = services.shared.workspace.LangiumDocuments.all.filter(({ uri }) => uri.path.toLowerCase().startsWith(baseDirUrlPath.toLowerCase())).toArray();
2543
+ const docsSet = new Set(docs.map((d) => d.uri.toString()));
2544
+ console.log(colors4.bold("\nApplying changes to ZModel..."));
2545
+ const deletedModels = [];
2546
+ const deletedEnums = [];
2547
+ const addedModels = [];
2548
+ const addedEnums = [];
2549
+ const modelChanges = /* @__PURE__ */ new Map();
2550
+ const getModelChanges = /* @__PURE__ */ __name((modelName) => {
2551
+ if (!modelChanges.has(modelName)) {
2552
+ modelChanges.set(modelName, {
2553
+ addedFields: [],
2554
+ deletedFields: [],
2555
+ updatedFields: [],
2556
+ addedAttributes: [],
2557
+ deletedAttributes: [],
2558
+ updatedAttributes: []
2559
+ });
2560
+ }
2561
+ return modelChanges.get(modelName);
2562
+ }, "getModelChanges");
2563
+ services.shared.workspace.IndexManager.allElements("DataModel", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
2564
+ const model2 = decl.node.$container;
2565
+ const index = model2.declarations.findIndex((d) => d === decl.node);
2566
+ model2.declarations.splice(index, 1);
2567
+ deletedModels.push(colors4.red(`- Model ${decl.name} deleted`));
2568
+ });
2569
+ if (provider.isSupportedFeature("NativeEnum")) services.shared.workspace.IndexManager.allElements("Enum", docsSet).filter((declaration) => !newModel.declarations.find((d) => getDbName(d) === getDbName(declaration.node))).forEach((decl) => {
2570
+ const model2 = decl.node.$container;
2571
+ const index = model2.declarations.findIndex((d) => d === decl.node);
2572
+ model2.declarations.splice(index, 1);
2573
+ deletedEnums.push(colors4.red(`- Enum ${decl.name} deleted`));
2574
+ });
2575
+ newModel.declarations.filter((d) => [
2576
+ DataModel,
2577
+ Enum
2578
+ ].includes(d.$type)).forEach((_declaration) => {
2579
+ const newDataModel = _declaration;
2580
+ const declarations = services.shared.workspace.IndexManager.allElements(newDataModel.$type, docsSet).toArray();
2581
+ const originalDataModel = declarations.find((d) => getDbName(d.node) === getDbName(newDataModel))?.node;
2582
+ if (!originalDataModel) {
2583
+ if (newDataModel.$type === "DataModel") {
2584
+ addedModels.push(colors4.green(`+ Model ${newDataModel.name} added`));
2585
+ } else if (newDataModel.$type === "Enum") {
2586
+ addedEnums.push(colors4.green(`+ Enum ${newDataModel.name} added`));
2587
+ }
2588
+ model.declarations.push(newDataModel);
2589
+ newDataModel.$container = model;
2590
+ newDataModel.fields.forEach((f) => {
2591
+ if (f.$type === "DataField" && f.type.reference?.ref) {
2592
+ const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
2593
+ if (ref && f.type.reference) {
2594
+ f.type.reference = {
2595
+ ref,
2596
+ $refText: ref.name ?? f.type.reference.$refText
2597
+ };
2598
+ }
2599
+ }
2600
+ });
2601
+ return;
2602
+ }
2603
+ newDataModel.fields.forEach((f) => {
2604
+ let originalFields = originalDataModel.fields.filter((d) => getDbName(d) === getDbName(f));
2605
+ const isRelationField = f.$type === "DataField" && !!f.attributes?.some((a) => a?.decl?.ref?.name === "@relation");
2606
+ if (originalFields.length === 0 && isRelationField && !getRelationFieldsKey(f)) {
2607
+ return;
2608
+ }
2609
+ if (originalFields.length === 0) {
2610
+ const newFieldsKey = getRelationFieldsKey(f);
2611
+ if (newFieldsKey) {
2612
+ originalFields = originalDataModel.fields.filter((d) => getRelationFieldsKey(d) === newFieldsKey);
2613
+ }
2614
+ }
2615
+ if (originalFields.length === 0) {
2616
+ originalFields = originalDataModel.fields.filter((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
2617
+ }
2618
+ if (originalFields.length === 0) {
2619
+ originalFields = originalDataModel.fields.filter((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
2620
+ }
2621
+ if (originalFields.length > 1) {
2622
+ const isBackReferenceField = !getRelationFieldsKey(f);
2623
+ if (!isBackReferenceField) {
2624
+ console.warn(colors4.yellow(`Found more original fields, need to tweak the search algorithm. ${originalDataModel.name}->[${originalFields.map((of) => of.name).join(", ")}](${f.name})`));
2625
+ }
2626
+ return;
2627
+ }
2628
+ const originalField = originalFields.at(0);
2629
+ if (originalField && f.$type === "DataField" && originalField.$type === "DataField") {
2630
+ const newType = f.type;
2631
+ const oldType = originalField.type;
2632
+ const fieldUpdates = [];
2633
+ const isOldTypeEnumWithoutNativeSupport = oldType.reference?.ref?.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
2634
+ if (newType.type && oldType.type !== newType.type && !isOldTypeEnumWithoutNativeSupport) {
2635
+ fieldUpdates.push(`type: ${oldType.type} -> ${newType.type}`);
2636
+ oldType.type = newType.type;
2637
+ }
2638
+ if (newType.reference?.ref && oldType.reference?.ref) {
2639
+ const newRefName = getDbName(newType.reference.ref);
2640
+ const oldRefName = getDbName(oldType.reference.ref);
2641
+ if (newRefName !== oldRefName) {
2642
+ fieldUpdates.push(`reference: ${oldType.reference.$refText} -> ${newType.reference.$refText}`);
2643
+ oldType.reference = {
2644
+ ref: newType.reference.ref,
2645
+ $refText: newType.reference.$refText
2646
+ };
2647
+ }
2648
+ } else if (newType.reference?.ref && !oldType.reference) {
2649
+ fieldUpdates.push(`type: ${oldType.type} -> ${newType.reference.$refText}`);
2650
+ oldType.reference = newType.reference;
2651
+ oldType.type = void 0;
2652
+ } else if (!newType.reference && oldType.reference?.ref && newType.type) {
2653
+ const isEnumWithoutNativeSupport = oldType.reference.ref.$type === "Enum" && !provider.isSupportedFeature("NativeEnum");
2654
+ if (!isEnumWithoutNativeSupport) {
2655
+ fieldUpdates.push(`type: ${oldType.reference.$refText} -> ${newType.type}`);
2656
+ oldType.type = newType.type;
2657
+ oldType.reference = void 0;
2658
+ }
2659
+ }
2660
+ if (!!newType.optional !== !!oldType.optional) {
2661
+ fieldUpdates.push(`optional: ${!!oldType.optional} -> ${!!newType.optional}`);
2662
+ oldType.optional = newType.optional;
2663
+ }
2664
+ if (!!newType.array !== !!oldType.array) {
2665
+ fieldUpdates.push(`array: ${!!oldType.array} -> ${!!newType.array}`);
2666
+ oldType.array = newType.array;
2667
+ }
2668
+ if (fieldUpdates.length > 0) {
2669
+ getModelChanges(originalDataModel.name).updatedFields.push(colors4.yellow(`~ ${originalField.name} (${fieldUpdates.join(", ")})`));
2670
+ }
2671
+ const newDefaultAttr = f.attributes.find((a) => a.decl.$refText === "@default");
2672
+ const oldDefaultAttr = originalField.attributes.find((a) => a.decl.$refText === "@default");
2673
+ if (newDefaultAttr && oldDefaultAttr) {
2674
+ const serializeArgs = /* @__PURE__ */ __name((args) => args.map((arg) => {
2675
+ if (arg.value?.$type === "StringLiteral") return `"${arg.value.value}"`;
2676
+ if (arg.value?.$type === "NumberLiteral") return String(arg.value.value);
2677
+ if (arg.value?.$type === "BooleanLiteral") return String(arg.value.value);
2678
+ if (arg.value?.$type === "InvocationExpr") return arg.value.function?.$refText ?? "";
2679
+ if (arg.value?.$type === "ReferenceExpr") return arg.value.target?.$refText ?? "";
2680
+ if (arg.value?.$type === "ArrayExpr") {
2681
+ return `[${(arg.value.items ?? []).map((item) => {
2682
+ if (item.$type === "ReferenceExpr") return item.target?.$refText ?? "";
2683
+ return item.$type ?? "unknown";
2684
+ }).join(",")}]`;
2685
+ }
2686
+ return arg.value?.$type ?? "unknown";
2687
+ }).join(","), "serializeArgs");
2688
+ const newArgsStr = serializeArgs(newDefaultAttr.args ?? []);
2689
+ const oldArgsStr = serializeArgs(oldDefaultAttr.args ?? []);
2690
+ if (newArgsStr !== oldArgsStr) {
2691
+ oldDefaultAttr.args = newDefaultAttr.args.map((arg) => ({
2692
+ ...arg,
2693
+ $container: oldDefaultAttr
2694
+ }));
2695
+ getModelChanges(originalDataModel.name).updatedAttributes.push(colors4.yellow(`~ @default on ${originalDataModel.name}.${originalField.name}`));
2696
+ }
2697
+ }
2698
+ }
2699
+ if (!originalField) {
2700
+ getModelChanges(originalDataModel.name).addedFields.push(colors4.green(`+ ${f.name}`));
2701
+ f.$container = originalDataModel;
2702
+ originalDataModel.fields.push(f);
2703
+ if (f.$type === "DataField" && f.type.reference?.ref) {
2704
+ const ref = declarations.find((d) => getDbName(d.node) === getDbName(f.type.reference.ref))?.node;
2705
+ if (ref) {
2706
+ f.type.reference = {
2707
+ ref,
2708
+ $refText: ref.name ?? f.type.reference.$refText
2709
+ };
2710
+ }
2711
+ }
2712
+ return;
2713
+ }
2714
+ originalField.attributes.filter((attr) => !f.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && isDatabaseManagedAttribute(attr.decl.$refText)).forEach((attr) => {
2715
+ const field = attr.$container;
2716
+ const index = field.attributes.findIndex((d) => d === attr);
2717
+ field.attributes.splice(index, 1);
2718
+ getModelChanges(originalDataModel.name).deletedAttributes.push(colors4.yellow(`- ${attr.decl.$refText} from field: ${originalDataModel.name}.${field.name}`));
2719
+ });
2720
+ f.attributes.filter((attr) => !originalField.attributes.find((d) => d.decl.$refText === attr.decl.$refText) && isDatabaseManagedAttribute(attr.decl.$refText)).forEach((attr) => {
2721
+ const cloned = {
2722
+ ...attr,
2723
+ $container: originalField
2724
+ };
2725
+ originalField.attributes.push(cloned);
2726
+ getModelChanges(originalDataModel.name).addedAttributes.push(colors4.green(`+ ${attr.decl.$refText} to field: ${originalDataModel.name}.${f.name}`));
2727
+ });
2728
+ });
2729
+ originalDataModel.fields.filter((f) => {
2730
+ const matchByDbName = newDataModel.fields.find((d) => getDbName(d) === getDbName(f));
2731
+ if (matchByDbName) return false;
2732
+ const originalFieldsKey = getRelationFieldsKey(f);
2733
+ if (originalFieldsKey) {
2734
+ const matchByFieldsKey = newDataModel.fields.find((d) => getRelationFieldsKey(d) === originalFieldsKey);
2735
+ if (matchByFieldsKey) return false;
2736
+ }
2737
+ const matchByFkName = newDataModel.fields.find((d) => getRelationFkName(d) === getRelationFkName(f) && !!getRelationFkName(d) && !!getRelationFkName(f));
2738
+ if (matchByFkName) return false;
2739
+ const matchByTypeRef = newDataModel.fields.find((d) => f.$type === "DataField" && d.$type === "DataField" && f.type.reference?.ref && d.type.reference?.ref && getDbName(f.type.reference.ref) === getDbName(d.type.reference.ref));
2740
+ return !matchByTypeRef;
2741
+ }).forEach((f) => {
2742
+ const _model = f.$container;
2743
+ const index = _model.fields.findIndex((d) => d === f);
2744
+ _model.fields.splice(index, 1);
2745
+ getModelChanges(_model.name).deletedFields.push(colors4.red(`- ${f.name}`));
2746
+ });
2747
+ });
2748
+ if (deletedModels.length > 0) {
2749
+ console.log(colors4.bold("\nDeleted Models:"));
2750
+ deletedModels.forEach((msg) => {
2751
+ console.log(msg);
2752
+ });
2753
+ }
2754
+ if (deletedEnums.length > 0) {
2755
+ console.log(colors4.bold("\nDeleted Enums:"));
2756
+ deletedEnums.forEach((msg) => {
2757
+ console.log(msg);
2758
+ });
2759
+ }
2760
+ if (addedModels.length > 0) {
2761
+ console.log(colors4.bold("\nAdded Models:"));
2762
+ addedModels.forEach((msg) => {
2763
+ console.log(msg);
2764
+ });
2765
+ }
2766
+ if (addedEnums.length > 0) {
2767
+ console.log(colors4.bold("\nAdded Enums:"));
2768
+ addedEnums.forEach((msg) => {
2769
+ console.log(msg);
2770
+ });
2771
+ }
2772
+ if (modelChanges.size > 0) {
2773
+ console.log(colors4.bold("\nModel Changes:"));
2774
+ modelChanges.forEach((changes, modelName) => {
2775
+ const hasChanges = changes.addedFields.length > 0 || changes.deletedFields.length > 0 || changes.updatedFields.length > 0 || changes.addedAttributes.length > 0 || changes.deletedAttributes.length > 0 || changes.updatedAttributes.length > 0;
2776
+ if (hasChanges) {
2777
+ console.log(colors4.cyan(` ${modelName}:`));
2778
+ if (changes.addedFields.length > 0) {
2779
+ console.log(colors4.gray(" Added Fields:"));
2780
+ changes.addedFields.forEach((msg) => {
2781
+ console.log(` ${msg}`);
2782
+ });
2783
+ }
2784
+ if (changes.deletedFields.length > 0) {
2785
+ console.log(colors4.gray(" Deleted Fields:"));
2786
+ changes.deletedFields.forEach((msg) => {
2787
+ console.log(` ${msg}`);
2788
+ });
2789
+ }
2790
+ if (changes.updatedFields.length > 0) {
2791
+ console.log(colors4.gray(" Updated Fields:"));
2792
+ changes.updatedFields.forEach((msg) => {
2793
+ console.log(` ${msg}`);
2794
+ });
2795
+ }
2796
+ if (changes.addedAttributes.length > 0) {
2797
+ console.log(colors4.gray(" Added Attributes:"));
2798
+ changes.addedAttributes.forEach((msg) => {
2799
+ console.log(` ${msg}`);
2800
+ });
2801
+ }
2802
+ if (changes.deletedAttributes.length > 0) {
2803
+ console.log(colors4.gray(" Deleted Attributes:"));
2804
+ changes.deletedAttributes.forEach((msg) => {
2805
+ console.log(` ${msg}`);
2806
+ });
2807
+ }
2808
+ if (changes.updatedAttributes.length > 0) {
2809
+ console.log(colors4.gray(" Updated Attributes:"));
2810
+ changes.updatedAttributes.forEach((msg) => {
2811
+ console.log(` ${msg}`);
2812
+ });
2813
+ }
2814
+ }
2815
+ });
2816
+ }
2817
+ const generator = new ZModelCodeGenerator({
2818
+ quote: options.quote ?? "single",
2819
+ indent: options.indent ?? 4
2820
+ });
2821
+ if (options.output) {
2822
+ if (treatAsFile) {
2823
+ const zmodelSchema = await formatDocument(generator.generate(newModel));
2824
+ console.log(colors4.blue(`Writing to ${outPath}`));
2825
+ fs2.mkdirSync(path2.dirname(outPath), {
2826
+ recursive: true
2827
+ });
2828
+ fs2.writeFileSync(outPath, zmodelSchema);
2829
+ } else {
2830
+ fs2.mkdirSync(outPath, {
2831
+ recursive: true
2832
+ });
2833
+ const baseDir2 = path2.dirname(path2.resolve(schemaFile));
2834
+ for (const { uri, parseResult: { value: documentModel } } of docs) {
2835
+ const zmodelSchema = await formatDocument(generator.generate(documentModel));
2836
+ const relPath = path2.relative(baseDir2, uri.fsPath);
2837
+ const targetFile = path2.join(outPath, relPath);
2838
+ fs2.mkdirSync(path2.dirname(targetFile), {
2839
+ recursive: true
2840
+ });
2841
+ console.log(colors4.blue(`Writing to ${targetFile}`));
2842
+ fs2.writeFileSync(targetFile, zmodelSchema);
2843
+ }
2844
+ }
2845
+ } else {
2846
+ for (const { uri, parseResult: { value: documentModel } } of docs) {
2847
+ const zmodelSchema = await formatDocument(generator.generate(documentModel));
2848
+ console.log(colors4.blue(`Writing to ${path2.relative(process.cwd(), uri.fsPath).replace(/\\/g, "/")}`));
2849
+ fs2.writeFileSync(uri.fsPath, zmodelSchema);
2850
+ }
2851
+ }
2852
+ console.log(colors4.green.bold("\nPull completed successfully!"));
2853
+ } catch (error) {
2854
+ spinner.fail("Pull failed");
2855
+ console.error(error);
2856
+ throw error;
2857
+ }
2858
+ }
2859
+ __name(runPull, "runPull");
267
2860
 
268
2861
  // src/actions/format.ts
269
- import { formatDocument } from "@zenstackhq/language";
270
- import colors3 from "colors";
2862
+ import { formatDocument as formatDocument2 } from "@zenstackhq/language";
2863
+ import colors5 from "colors";
271
2864
  import fs3 from "fs";
272
2865
  async function run3(options) {
273
2866
  const schemaFile = getSchemaFile(options.schema);
274
2867
  let formattedContent;
275
2868
  try {
276
- formattedContent = await formatDocument(fs3.readFileSync(schemaFile, "utf-8"));
2869
+ formattedContent = await formatDocument2(fs3.readFileSync(schemaFile, "utf-8"));
277
2870
  } catch (error) {
278
- console.error(colors3.red("\u2717 Schema formatting failed."));
2871
+ console.error(colors5.red("\u2717 Schema formatting failed."));
279
2872
  throw error;
280
2873
  }
281
2874
  fs3.writeFileSync(schemaFile, formattedContent, "utf-8");
282
- console.log(colors3.green("\u2713 Schema formatting completed successfully."));
2875
+ console.log(colors5.green("\u2713 Schema formatting completed successfully."));
283
2876
  }
284
2877
  __name(run3, "run");
285
2878
 
@@ -287,14 +2880,14 @@ __name(run3, "run");
287
2880
  import { invariant, singleDebounce } from "@zenstackhq/common-helpers";
288
2881
  import { ZModelLanguageMetaData } from "@zenstackhq/language";
289
2882
  import { isPlugin } from "@zenstackhq/language/ast";
290
- import { getLiteral, getLiteralArray } from "@zenstackhq/language/utils";
291
- import colors4 from "colors";
2883
+ import { getLiteral, getLiteralArray as getLiteralArray2 } from "@zenstackhq/language/utils";
2884
+ import colors6 from "colors";
292
2885
  import { createJiti } from "jiti";
293
2886
  import fs6 from "fs";
294
- import path4 from "path";
2887
+ import path5 from "path";
295
2888
  import { pathToFileURL } from "url";
296
2889
  import { watch } from "chokidar";
297
- import ora from "ora";
2890
+ import ora2 from "ora";
298
2891
 
299
2892
  // src/plugins/index.ts
300
2893
  var plugins_exports = {};
@@ -306,16 +2899,16 @@ __export(plugins_exports, {
306
2899
  // src/plugins/prisma.ts
307
2900
  import { PrismaSchemaGenerator as PrismaSchemaGenerator2 } from "@zenstackhq/sdk";
308
2901
  import fs4 from "fs";
309
- import path2 from "path";
2902
+ import path3 from "path";
310
2903
  var plugin = {
311
2904
  name: "Prisma Schema Generator",
312
2905
  statusText: "Generating Prisma schema",
313
2906
  async generate({ model, defaultOutputPath, pluginOptions }) {
314
- let outFile = path2.join(defaultOutputPath, "schema.prisma");
2907
+ let outFile = path3.join(defaultOutputPath, "schema.prisma");
315
2908
  if (typeof pluginOptions["output"] === "string") {
316
- outFile = path2.resolve(defaultOutputPath, pluginOptions["output"]);
317
- if (!fs4.existsSync(path2.dirname(outFile))) {
318
- fs4.mkdirSync(path2.dirname(outFile), {
2909
+ outFile = path3.resolve(defaultOutputPath, pluginOptions["output"]);
2910
+ if (!fs4.existsSync(path3.dirname(outFile))) {
2911
+ fs4.mkdirSync(path3.dirname(outFile), {
319
2912
  recursive: true
320
2913
  });
321
2914
  }
@@ -329,14 +2922,14 @@ var prisma_default = plugin;
329
2922
  // src/plugins/typescript.ts
330
2923
  import { TsSchemaGenerator } from "@zenstackhq/sdk";
331
2924
  import fs5 from "fs";
332
- import path3 from "path";
2925
+ import path4 from "path";
333
2926
  var plugin2 = {
334
2927
  name: "TypeScript Schema Generator",
335
2928
  statusText: "Generating TypeScript schema",
336
2929
  async generate({ model, defaultOutputPath, pluginOptions }) {
337
2930
  let outDir = defaultOutputPath;
338
2931
  if (typeof pluginOptions["output"] === "string") {
339
- outDir = path3.resolve(defaultOutputPath, pluginOptions["output"]);
2932
+ outDir = path4.resolve(defaultOutputPath, pluginOptions["output"]);
340
2933
  if (!fs5.existsSync(outDir)) {
341
2934
  fs5.mkdirSync(outDir, {
342
2935
  recursive: true
@@ -360,12 +2953,18 @@ var plugin2 = {
360
2953
  var typescript_default = plugin2;
361
2954
 
362
2955
  // src/actions/generate.ts
2956
+ import semver from "semver";
363
2957
  async function run4(options) {
2958
+ try {
2959
+ await checkForMismatchedPackages(process.cwd());
2960
+ } catch (err) {
2961
+ console.warn(colors6.yellow(`Failed to check for mismatched ZenStack packages: ${err}`));
2962
+ }
364
2963
  const model = await pureGenerate(options, false);
365
2964
  if (options.watch) {
366
2965
  const logsEnabled = !options.silent;
367
2966
  if (logsEnabled) {
368
- console.log(colors4.green(`
2967
+ console.log(colors6.green(`
369
2968
  Enabled watch mode!`));
370
2969
  }
371
2970
  const schemaExtensions = ZModelLanguageMetaData.fileExtensions;
@@ -442,14 +3041,14 @@ async function pureGenerate(options, fromWatch) {
442
3041
  const outputPath = getOutputPath(options, schemaFile);
443
3042
  await runPlugins(schemaFile, model, outputPath, options);
444
3043
  if (!options.silent) {
445
- console.log(colors4.green(`Generation completed successfully in ${Date.now() - start}ms.
3044
+ console.log(colors6.green(`Generation completed successfully in ${Date.now() - start}ms.
446
3045
  `));
447
3046
  if (!fromWatch) {
448
3047
  console.log(`You can now create a ZenStack client with it.
449
3048
 
450
3049
  \`\`\`ts
451
3050
  import { ZenStackClient } from '@zenstackhq/orm';
452
- import { schema } from '${path4.relative(".", outputPath)}/schema';
3051
+ import { schema } from '${path5.relative(".", outputPath)}/schema';
453
3052
 
454
3053
  const client = new ZenStackClient(schema, {
455
3054
  dialect: { ... }
@@ -474,7 +3073,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
474
3073
  throw new CliError(`Unknown core plugin: ${provider}`);
475
3074
  }
476
3075
  } else {
477
- cliPlugin = await loadPluginModule(provider, path4.dirname(schemaFile));
3076
+ cliPlugin = await loadPluginModule(provider, path5.dirname(schemaFile));
478
3077
  }
479
3078
  if (cliPlugin) {
480
3079
  const pluginOptions = getPluginOptions(plugin3);
@@ -513,7 +3112,7 @@ async function runPlugins(schemaFile, model, outputPath, options) {
513
3112
  invariant(typeof cliPlugin.generate === "function", `Plugin ${cliPlugin.name} does not have a generate function`);
514
3113
  let spinner;
515
3114
  if (!options.silent) {
516
- spinner = ora(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
3115
+ spinner = ora2(cliPlugin.statusText ?? `Running plugin ${cliPlugin.name}`).start();
517
3116
  }
518
3117
  try {
519
3118
  await cliPlugin.generate({
@@ -543,7 +3142,7 @@ function getPluginOptions(plugin3) {
543
3142
  if (field.name === "provider") {
544
3143
  continue;
545
3144
  }
546
- const value = getLiteral(field.value) ?? getLiteralArray(field.value);
3145
+ const value = getLiteral(field.value) ?? getLiteralArray2(field.value);
547
3146
  if (value === void 0) {
548
3147
  console.warn(`Plugin "${plugin3.name}" option "${field.name}" has unsupported value, skipping`);
549
3148
  continue;
@@ -556,7 +3155,7 @@ __name(getPluginOptions, "getPluginOptions");
556
3155
  async function loadPluginModule(provider, basePath) {
557
3156
  let moduleSpec = provider;
558
3157
  if (moduleSpec.startsWith(".")) {
559
- moduleSpec = path4.resolve(basePath, moduleSpec);
3158
+ moduleSpec = path5.resolve(basePath, moduleSpec);
560
3159
  }
561
3160
  const importAsEsm = /* @__PURE__ */ __name(async (spec) => {
562
3161
  try {
@@ -594,13 +3193,13 @@ async function loadPluginModule(provider, basePath) {
594
3193
  }
595
3194
  }
596
3195
  for (const suffix of esmSuffixes) {
597
- const indexPath = path4.join(moduleSpec, `index${suffix}`);
3196
+ const indexPath = path5.join(moduleSpec, `index${suffix}`);
598
3197
  if (fs6.existsSync(indexPath)) {
599
3198
  return await importAsEsm(pathToFileURL(indexPath).toString());
600
3199
  }
601
3200
  }
602
3201
  for (const suffix of tsSuffixes) {
603
- const indexPath = path4.join(moduleSpec, `index${suffix}`);
3202
+ const indexPath = path5.join(moduleSpec, `index${suffix}`);
604
3203
  if (fs6.existsSync(indexPath)) {
605
3204
  return await importAsTs(indexPath);
606
3205
  }
@@ -612,13 +3211,46 @@ async function loadPluginModule(provider, basePath) {
612
3211
  }
613
3212
  }
614
3213
  __name(loadPluginModule, "loadPluginModule");
3214
+ async function checkForMismatchedPackages(projectPath) {
3215
+ const packages = await getZenStackPackages(projectPath);
3216
+ if (!packages.length) {
3217
+ return false;
3218
+ }
3219
+ const versions = /* @__PURE__ */ new Set();
3220
+ for (const { version: version2 } of packages) {
3221
+ if (version2) {
3222
+ versions.add(version2);
3223
+ }
3224
+ }
3225
+ if (versions.size > 1) {
3226
+ const message = "WARNING: Multiple versions of ZenStack packages detected.\n This will probably cause issues and break your types.";
3227
+ const slashes = "/".repeat(73);
3228
+ const latestVersion = semver.sort(Array.from(versions)).reverse()[0];
3229
+ console.warn(colors6.yellow(`${slashes}
3230
+
3231
+ ${message}
3232
+ `));
3233
+ for (const { pkg, version: version2 } of packages) {
3234
+ if (!version2) continue;
3235
+ if (version2 === latestVersion) {
3236
+ console.log(` ${pkg.padEnd(32)} ${colors6.green(version2)}`);
3237
+ } else {
3238
+ console.log(` ${pkg.padEnd(32)} ${colors6.yellow(version2)}`);
3239
+ }
3240
+ }
3241
+ console.warn(`
3242
+ ${colors6.yellow(slashes)}`);
3243
+ return true;
3244
+ }
3245
+ return false;
3246
+ }
3247
+ __name(checkForMismatchedPackages, "checkForMismatchedPackages");
615
3248
 
616
3249
  // src/actions/info.ts
617
- import colors5 from "colors";
618
- import path5 from "path";
3250
+ import colors7 from "colors";
619
3251
  async function run5(projectPath) {
620
3252
  const packages = await getZenStackPackages(projectPath);
621
- if (!packages) {
3253
+ if (!packages.length) {
622
3254
  console.error("Unable to locate package.json. Are you in a valid project directory?");
623
3255
  return;
624
3256
  }
@@ -628,59 +3260,19 @@ async function run5(projectPath) {
628
3260
  if (version2) {
629
3261
  versions.add(version2);
630
3262
  }
631
- console.log(` ${colors5.green(pkg.padEnd(20))} ${version2}`);
3263
+ console.log(` ${colors7.green(pkg.padEnd(20))} ${version2}`);
632
3264
  }
633
3265
  if (versions.size > 1) {
634
- console.warn(colors5.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
3266
+ console.warn(colors7.yellow("WARNING: Multiple versions of Zenstack packages detected. This may cause issues."));
635
3267
  }
636
3268
  }
637
3269
  __name(run5, "run");
638
- async function getZenStackPackages(projectPath) {
639
- let pkgJson;
640
- const resolvedPath = path5.resolve(projectPath);
641
- try {
642
- pkgJson = (await import(path5.join(resolvedPath, "package.json"), {
643
- with: {
644
- type: "json"
645
- }
646
- })).default;
647
- } catch {
648
- return [];
649
- }
650
- const packages = Array.from(new Set([
651
- ...Object.keys(pkgJson.dependencies ?? {}),
652
- ...Object.keys(pkgJson.devDependencies ?? {})
653
- ].filter((p) => p.startsWith("@zenstackhq/") || p === "zenstack"))).sort();
654
- const result = await Promise.all(packages.map(async (pkg) => {
655
- try {
656
- const depPkgJson = (await import(`${pkg}/package.json`, {
657
- with: {
658
- type: "json"
659
- }
660
- })).default;
661
- if (depPkgJson.private) {
662
- return void 0;
663
- }
664
- return {
665
- pkg,
666
- version: depPkgJson.version
667
- };
668
- } catch {
669
- return {
670
- pkg,
671
- version: void 0
672
- };
673
- }
674
- }));
675
- return result.filter((p) => !!p);
676
- }
677
- __name(getZenStackPackages, "getZenStackPackages");
678
3270
 
679
3271
  // src/actions/init.ts
680
- import colors6 from "colors";
3272
+ import colors8 from "colors";
681
3273
  import fs7 from "fs";
682
3274
  import path6 from "path";
683
- import ora2 from "ora";
3275
+ import ora3 from "ora";
684
3276
  import { detect, resolveCommand } from "package-manager-detector";
685
3277
 
686
3278
  // src/actions/templates.ts
@@ -731,7 +3323,7 @@ async function run6(projectPath) {
731
3323
  name: "npm"
732
3324
  };
733
3325
  }
734
- console.log(colors6.gray(`Using package manager: ${pm.agent}`));
3326
+ console.log(colors8.gray(`Using package manager: ${pm.agent}`));
735
3327
  for (const pkg of packages) {
736
3328
  const resolved = resolveCommand(pm.agent, "add", [
737
3329
  pkg.name,
@@ -742,7 +3334,7 @@ async function run6(projectPath) {
742
3334
  if (!resolved) {
743
3335
  throw new CliError(`Unable to determine how to install package "${pkg.name}". Please install it manually.`);
744
3336
  }
745
- const spinner = ora2(`Installing "${pkg.name}"`).start();
3337
+ const spinner = ora3(`Installing "${pkg.name}"`).start();
746
3338
  try {
747
3339
  execSync(`${resolved.command} ${resolved.args.join(" ")}`, {
748
3340
  cwd: projectPath
@@ -760,11 +3352,11 @@ async function run6(projectPath) {
760
3352
  if (!fs7.existsSync(path6.join(projectPath, generationFolder, "schema.zmodel"))) {
761
3353
  fs7.writeFileSync(path6.join(projectPath, generationFolder, "schema.zmodel"), STARTER_ZMODEL);
762
3354
  } else {
763
- console.log(colors6.yellow("Schema file already exists. Skipping generation of sample."));
3355
+ console.log(colors8.yellow("Schema file already exists. Skipping generation of sample."));
764
3356
  }
765
- console.log(colors6.green("ZenStack project initialized successfully!"));
766
- console.log(colors6.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
767
- console.log(colors6.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
3357
+ console.log(colors8.green("ZenStack project initialized successfully!"));
3358
+ console.log(colors8.gray(`See "${generationFolder}/schema.zmodel" for your database schema.`));
3359
+ console.log(colors8.gray("Run `zenstack generate` to compile the the schema into a TypeScript file."));
768
3360
  }
769
3361
  __name(run6, "run");
770
3362
 
@@ -773,19 +3365,19 @@ import fs8 from "fs";
773
3365
  import path7 from "path";
774
3366
 
775
3367
  // src/actions/seed.ts
776
- import colors7 from "colors";
3368
+ import colors9 from "colors";
777
3369
  import { execaCommand } from "execa";
778
3370
  async function run7(options, args) {
779
3371
  const pkgJsonConfig = getPkgJsonConfig(process.cwd());
780
3372
  if (!pkgJsonConfig.seed) {
781
3373
  if (!options.noWarnings) {
782
- console.warn(colors7.yellow("No seed script defined in package.json. Skipping seeding."));
3374
+ console.warn(colors9.yellow("No seed script defined in package.json. Skipping seeding."));
783
3375
  }
784
3376
  return;
785
3377
  }
786
3378
  const command = `${pkgJsonConfig.seed}${args.length > 0 ? " " + args.join(" ") : ""}`;
787
3379
  if (options.printStatus) {
788
- console.log(colors7.gray(`Running seed script "${command}"...`));
3380
+ console.log(colors9.gray(`Running seed script "${command}"...`));
789
3381
  }
790
3382
  try {
791
3383
  await execaCommand(command, {
@@ -793,7 +3385,7 @@ async function run7(options, args) {
793
3385
  stderr: "inherit"
794
3386
  });
795
3387
  } catch (err) {
796
- console.error(colors7.red(err instanceof Error ? err.message : String(err)));
3388
+ console.error(colors9.red(err instanceof Error ? err.message : String(err)));
797
3389
  throw new CliError("Failed to seed the database. Please check the error message above for details.");
798
3390
  }
799
3391
  }
@@ -914,8 +3506,8 @@ function handleSubProcessError2(err) {
914
3506
  __name(handleSubProcessError2, "handleSubProcessError");
915
3507
 
916
3508
  // src/actions/proxy.ts
917
- import { isDataSource as isDataSource2, isInvocationExpr, isLiteralExpr } from "@zenstackhq/language/ast";
918
- import { getStringLiteral } from "@zenstackhq/language/utils";
3509
+ import { isDataSource as isDataSource2, isInvocationExpr as isInvocationExpr2, isLiteralExpr } from "@zenstackhq/language/ast";
3510
+ import { getStringLiteral as getStringLiteral2 } from "@zenstackhq/language/utils";
919
3511
  import { ZenStackClient } from "@zenstackhq/orm";
920
3512
  import { MysqlDialect } from "@zenstackhq/orm/dialects/mysql";
921
3513
  import { PostgresDialect } from "@zenstackhq/orm/dialects/postgres";
@@ -923,7 +3515,7 @@ import { SqliteDialect } from "@zenstackhq/orm/dialects/sqlite";
923
3515
  import { RPCApiHandler } from "@zenstackhq/server/api";
924
3516
  import { ZenStackMiddleware } from "@zenstackhq/server/express";
925
3517
  import SQLite from "better-sqlite3";
926
- import colors9 from "colors";
3518
+ import colors11 from "colors";
927
3519
  import cors from "cors";
928
3520
  import express from "express";
929
3521
  import { createJiti as createJiti2 } from "jiti";
@@ -932,11 +3524,11 @@ import path9 from "path";
932
3524
  import { Pool as PgPool } from "pg";
933
3525
 
934
3526
  // src/utils/version-utils.ts
935
- import colors8 from "colors";
3527
+ import colors10 from "colors";
936
3528
  import fs9 from "fs";
937
3529
  import path8 from "path";
938
3530
  import { fileURLToPath as fileURLToPath2 } from "url";
939
- import semver from "semver";
3531
+ import semver2 from "semver";
940
3532
  var CHECK_VERSION_TIMEOUT = 2e3;
941
3533
  var VERSION_CHECK_TAG = "next";
942
3534
  function getVersion() {
@@ -956,8 +3548,8 @@ async function checkNewVersion() {
956
3548
  } catch {
957
3549
  return;
958
3550
  }
959
- if (latestVersion && currVersion && semver.gt(latestVersion, currVersion)) {
960
- console.log(`A newer version ${colors8.cyan(latestVersion)} is available.`);
3551
+ if (latestVersion && currVersion && semver2.gt(latestVersion, currVersion)) {
3552
+ console.log(`A newer version ${colors10.cyan(latestVersion)} is available.`);
961
3553
  }
962
3554
  }
963
3555
  __name(checkNewVersion, "checkNewVersion");
@@ -971,7 +3563,7 @@ async function getLatestVersion() {
971
3563
  if (fetchResult.ok) {
972
3564
  const data = await fetchResult.json();
973
3565
  const latestVersion = data?.version;
974
- if (typeof latestVersion === "string" && semver.valid(latestVersion)) {
3566
+ if (typeof latestVersion === "string" && semver2.valid(latestVersion)) {
975
3567
  return latestVersion;
976
3568
  }
977
3569
  }
@@ -987,7 +3579,7 @@ async function run9(options) {
987
3579
  ];
988
3580
  const log = options.logLevel?.filter((level) => allowedLogLevels.includes(level));
989
3581
  const schemaFile = getSchemaFile(options.schema);
990
- console.log(colors9.gray(`Loading ZModel schema from: ${schemaFile}`));
3582
+ console.log(colors11.gray(`Loading ZModel schema from: ${schemaFile}`));
991
3583
  let outputPath = getOutputPath(options, schemaFile);
992
3584
  if (!path9.isAbsolute(outputPath)) {
993
3585
  outputPath = path9.resolve(process.cwd(), outputPath);
@@ -1002,7 +3594,7 @@ async function run9(options) {
1002
3594
  }
1003
3595
  databaseUrl = evaluateUrl(schemaUrl);
1004
3596
  }
1005
- const provider = getStringLiteral(dataSource?.fields.find((f) => f.name === "provider")?.value);
3597
+ const provider = getStringLiteral2(dataSource?.fields.find((f) => f.name === "provider")?.value);
1006
3598
  const dialect = createDialect(provider, databaseUrl, outputPath);
1007
3599
  const jiti = createJiti2(import.meta.url);
1008
3600
  const schemaModule = await jiti.import(path9.join(outputPath, "schema"));
@@ -1034,10 +3626,10 @@ async function run9(options) {
1034
3626
  __name(run9, "run");
1035
3627
  function evaluateUrl(schemaUrl) {
1036
3628
  if (isLiteralExpr(schemaUrl)) {
1037
- return getStringLiteral(schemaUrl);
1038
- } else if (isInvocationExpr(schemaUrl)) {
3629
+ return getStringLiteral2(schemaUrl);
3630
+ } else if (isInvocationExpr2(schemaUrl)) {
1039
3631
  const envFunction = schemaUrl;
1040
- const envName = getStringLiteral(envFunction.args[0]?.value);
3632
+ const envName = getStringLiteral2(envFunction.args[0]?.value);
1041
3633
  const envValue = process.env[envName];
1042
3634
  if (!envValue) {
1043
3635
  throw new CliError(`Environment variable ${envName} is not set`);
@@ -1073,20 +3665,20 @@ function createDialect(provider, databaseUrl, outputPath) {
1073
3665
  resolvedUrl = path9.join(outputPath, filePath);
1074
3666
  }
1075
3667
  }
1076
- console.log(colors9.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
3668
+ console.log(colors11.gray(`Connecting to SQLite database at: ${resolvedUrl}`));
1077
3669
  return new SqliteDialect({
1078
3670
  database: new SQLite(resolvedUrl)
1079
3671
  });
1080
3672
  }
1081
3673
  case "postgresql":
1082
- console.log(colors9.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
3674
+ console.log(colors11.gray(`Connecting to PostgreSQL database at: ${redactDatabaseUrl(databaseUrl)}`));
1083
3675
  return new PostgresDialect({
1084
3676
  pool: new PgPool({
1085
3677
  connectionString: databaseUrl
1086
3678
  })
1087
3679
  });
1088
3680
  case "mysql":
1089
- console.log(colors9.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
3681
+ console.log(colors11.gray(`Connecting to MySQL database at: ${redactDatabaseUrl(databaseUrl)}`));
1090
3682
  return new MysqlDialect({
1091
3683
  pool: createMysqlPool(databaseUrl)
1092
3684
  });
@@ -1119,11 +3711,11 @@ function startServer(client, schema, options) {
1119
3711
  });
1120
3712
  const server = app.listen(options.port, () => {
1121
3713
  console.log(`ZenStack proxy server is running on port: ${options.port}`);
1122
- console.log(`You can visit ZenStack Studio at: ${colors9.blue("https://studio.zenstack.dev")}`);
3714
+ console.log(`You can visit ZenStack Studio at: ${colors11.blue("https://studio.zenstack.dev")}`);
1123
3715
  });
1124
3716
  server.on("error", (err) => {
1125
3717
  if (err.code === "EADDRINUSE") {
1126
- console.error(colors9.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
3718
+ console.error(colors11.red(`Port ${options.port} is already in use. Please choose a different port using -p option.`));
1127
3719
  } else {
1128
3720
  throw new CliError(`Failed to start the server: ${err.message}`);
1129
3721
  }
@@ -1153,7 +3745,7 @@ import fs13 from "fs";
1153
3745
  import * as os2 from "os";
1154
3746
 
1155
3747
  // src/constants.ts
1156
- var TELEMETRY_TRACKING_TOKEN = "74944eb779d7d3b4ce185be843fde9fc";
3748
+ var TELEMETRY_TRACKING_TOKEN = "<TELEMETRY_TRACKING_TOKEN>";
1157
3749
 
1158
3750
  // src/utils/is-ci.ts
1159
3751
  import { env } from "process";
@@ -1416,7 +4008,7 @@ var proxyAction = /* @__PURE__ */ __name(async (options) => {
1416
4008
  function createProgram() {
1417
4009
  const program = new Command("zen").alias("zenstack").helpOption("-h, --help", "Show this help message").version(getVersion(), "-v --version", "Show CLI version");
1418
4010
  const schemaExtensions = ZModelLanguageMetaData2.fileExtensions.join(", ");
1419
- program.description(`${colors10.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
4011
+ program.description(`${colors12.bold.blue("\u03B6")} ZenStack is the modern data layer for TypeScript apps.
1420
4012
 
1421
4013
  Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAfterError();
1422
4014
  const schemaOption = new Option("--schema <file>", `schema file (with extension ${schemaExtensions}). Defaults to "zenstack/schema.zmodel" unless specified in package.json.`);
@@ -1431,6 +4023,7 @@ Documentation: https://zenstack.dev/docs`).showHelpAfterError().showSuggestionAf
1431
4023
  migrateCommand.command("resolve").addOption(schemaOption).addOption(noVersionCheckOption).addOption(migrationsOption).addOption(new Option("--applied <migration>", "record a specific migration as applied")).addOption(new Option("--rolled-back <migration>", "record a specific migration as rolled back")).description("Resolve issues with database migrations in deployment databases").action((options) => migrateAction("resolve", options));
1432
4024
  const dbCommand = program.command("db").description("Manage your database schema during development");
1433
4025
  dbCommand.command("push").description("Push the state from your schema to your database").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new Option("--accept-data-loss", "ignore data loss warnings")).addOption(new Option("--force-reset", "force a reset of the database before push")).action((options) => dbAction("push", options));
4026
+ dbCommand.command("pull").description("Introspect your database.").addOption(schemaOption).addOption(noVersionCheckOption).addOption(new Option("-o, --output <path>", "set custom output path for the introspected schema. If a file path is provided, all schemas are merged into that single file. If a directory path is provided, files are written to the directory and imports are kept.")).addOption(new Option("--model-casing <pascal|camel|snake|none>", "set the casing of generated models").default("pascal")).addOption(new Option("--field-casing <pascal|camel|snake|none>", "set the casing of generated fields").default("camel")).addOption(new Option("--always-map", "always add @map and @@map attributes to models and fields").default(false)).addOption(new Option("--quote <double|single>", "set the quote style of generated schema files").default("single")).addOption(new Option("--indent <number>", "set the indentation of the generated schema files").default(4)).action((options) => dbAction("pull", options));
1434
4027
  dbCommand.command("seed").description("Seed the database").allowExcessArguments(true).addHelpText("after", `
1435
4028
  Seed script is configured under the "zenstack.seed" field in package.json.
1436
4029
  E.g.:
@@ -1467,10 +4060,10 @@ async function main() {
1467
4060
  if (e instanceof CommanderError) {
1468
4061
  exitCode = e.exitCode;
1469
4062
  } else if (e instanceof CliError) {
1470
- console.error(colors10.red(e.message));
4063
+ console.error(colors12.red(e.message));
1471
4064
  exitCode = 1;
1472
4065
  } else {
1473
- console.error(colors10.red(`Unhandled error: ${e}`));
4066
+ console.error(colors12.red(`Unhandled error: ${e}`));
1474
4067
  exitCode = 1;
1475
4068
  }
1476
4069
  }