@dooor-ai/cortexdb 0.6.0 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,38 @@
1
1
  #!/usr/bin/env node
2
2
  "use strict";
3
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
4
+ if (k2 === undefined) k2 = k;
5
+ var desc = Object.getOwnPropertyDescriptor(m, k);
6
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
7
+ desc = { enumerable: true, get: function() { return m[k]; } };
8
+ }
9
+ Object.defineProperty(o, k2, desc);
10
+ }) : (function(o, m, k, k2) {
11
+ if (k2 === undefined) k2 = k;
12
+ o[k2] = m[k];
13
+ }));
14
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
15
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
16
+ }) : function(o, v) {
17
+ o["default"] = v;
18
+ });
19
+ var __importStar = (this && this.__importStar) || (function () {
20
+ var ownKeys = function(o) {
21
+ ownKeys = Object.getOwnPropertyNames || function (o) {
22
+ var ar = [];
23
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
24
+ return ar;
25
+ };
26
+ return ownKeys(o);
27
+ };
28
+ return function (mod) {
29
+ if (mod && mod.__esModule) return mod;
30
+ var result = {};
31
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
32
+ __setModuleDefault(result, mod);
33
+ return result;
34
+ };
35
+ })();
3
36
  var __importDefault = (this && this.__importDefault) || function (mod) {
4
37
  return (mod && mod.__esModule) ? mod : { "default": mod };
5
38
  };
@@ -122,13 +155,18 @@ function printUsage() {
122
155
  `Usage:\n` +
123
156
  ` ${scriptName} schema diff [--dir path]\n` +
124
157
  ` ${scriptName} schema apply [--dir path] [--no-generate-types] [--out path]\n` +
158
+ ` ${scriptName} schema validate [--dir path]\n` +
125
159
  ` ${scriptName} schema generate-types --out src/generated/cortex-schema.ts [--dir path]\n` +
160
+ ` ${scriptName} migrate status\n` +
161
+ ` ${scriptName} migrate rollback [--steps N]\n` +
162
+ ` ${scriptName} db reset [--force]\n` +
163
+ ` ${scriptName} db pull [--format yaml|ts]\n` +
126
164
  `\nEnvironment variables:\n` +
127
- ` DOOOR_CONFIG Caminho para o config YAML (default ./dooor/config.yaml)\n` +
165
+ ` DOOOR_CONFIG Path to config YAML (default ./dooor/config.yaml)\n` +
128
166
  ` CORTEXDB_CONNECTION Full connection string (cortexdb://key@host:port)\n` +
129
167
  ` CORTEXDB_BASE_URL Base URL (default http://localhost:8000)\n` +
130
168
  ` CORTEXDB_API_KEY API key (required if no connection string)\n` +
131
- ` CORTEXDB_SCHEMA_DIR Override manual do diretório de schemas\n`);
169
+ ` CORTEXDB_SCHEMA_DIR Override schema directory\n`);
132
170
  }
133
171
  function parseArgs(argv) {
134
172
  const opts = {};
@@ -200,7 +238,7 @@ function loadYamlFiles(schemaDir) {
200
238
  }
201
239
  const files = node_fs_1.default.readdirSync(schemaDir).filter((file) => file.endsWith(".yml") || file.endsWith(".yaml"));
202
240
  if (files.length === 0) {
203
- throw new Error(`No YAML schema files found in ${schemaDir}`);
241
+ return { version: 1, collections: [] }; // Return empty instead of throwing
204
242
  }
205
243
  const collections = [];
206
244
  for (const file of files) {
@@ -225,6 +263,89 @@ function loadYamlFiles(schemaDir) {
225
263
  collections,
226
264
  };
227
265
  }
266
+ /**
267
+ * Load TypeScript schemas using decorators (@Collection, @Field)
268
+ */
269
+ function loadTypeScriptFiles(schemaDir) {
270
+ try {
271
+ // Import the loader dynamically to avoid breaking if deps not installed
272
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
273
+ const loaderModule = require("../schema-decorators/loader");
274
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
275
+ const validatorModule = require("../schema-decorators/validator");
276
+ const collections = loaderModule.loadTypeScriptSchemasSync(schemaDir);
277
+ // Validate schemas
278
+ const validationResult = validatorModule.validateSchema(collections);
279
+ // Show warnings
280
+ if (validationResult.warnings.length > 0) {
281
+ console.log(validatorModule.formatValidationErrors({
282
+ valid: true,
283
+ errors: [],
284
+ warnings: validationResult.warnings
285
+ }));
286
+ }
287
+ // Fail on errors
288
+ if (!validationResult.valid) {
289
+ console.error(validatorModule.formatValidationErrors(validationResult));
290
+ throw new Error(`Schema validation failed with ${validationResult.errors.length} error(s)`);
291
+ }
292
+ // Convert LoadedCollection[] to DSLCollection[]
293
+ return {
294
+ version: 1,
295
+ collections: collections.map((col) => ({
296
+ name: col.name,
297
+ description: col.description,
298
+ database: col.database,
299
+ config: col.config,
300
+ fields: col.fields.map((field) => ({
301
+ name: field.name,
302
+ type: field.type,
303
+ description: field.description,
304
+ required: field.required,
305
+ indexed: field.indexed,
306
+ unique: field.unique,
307
+ filterable: field.filterable,
308
+ vectorize: field.vectorize,
309
+ default: field.default,
310
+ values: field.values,
311
+ store_in: field.store_in,
312
+ extract_config: field.extract_config,
313
+ schema: field.schema,
314
+ })),
315
+ })),
316
+ };
317
+ }
318
+ catch (error) {
319
+ // If ts-node or decorators not available, silently return empty
320
+ if (error.code === "MODULE_NOT_FOUND" || error.message?.includes("Cannot find module")) {
321
+ return { version: 1, collections: [] };
322
+ }
323
+ throw error;
324
+ }
325
+ }
326
+ /**
327
+ * Load schemas from both YAML and TypeScript sources
328
+ */
329
+ function loadAllSchemas(schemaDir) {
330
+ const yamlSchema = loadYamlFiles(schemaDir);
331
+ const tsSchema = loadTypeScriptFiles(schemaDir);
332
+ const allCollections = [...yamlSchema.collections, ...tsSchema.collections];
333
+ if (allCollections.length === 0) {
334
+ throw new Error(`No schema files found in ${schemaDir}. Create .yml, .yaml files, or .ts files with @Collection decorators.`);
335
+ }
336
+ // Check for duplicate collection names
337
+ const names = new Set();
338
+ for (const collection of allCollections) {
339
+ if (names.has(collection.name)) {
340
+ throw new Error(`Duplicate collection name '${collection.name}' found in schemas (check both YAML and TypeScript files)`);
341
+ }
342
+ names.add(collection.name);
343
+ }
344
+ return {
345
+ version: 1,
346
+ collections: allCollections,
347
+ };
348
+ }
228
349
  function validateCollection(collection) {
229
350
  if (!collection.name || typeof collection.name !== "string") {
230
351
  throw new Error(`Collection is missing a valid name`);
@@ -417,7 +538,10 @@ async function resolveEmbeddingProviders(collections, client) {
417
538
  });
418
539
  }
419
540
  async function fetchCurrentCollections(client, desired) {
420
- return await client.collections.list();
541
+ const current = await client.collections.list();
542
+ console.log('\n🔍 DEBUG: Current collections in database:', current.map(c => c.name).join(', ') || '(none)');
543
+ console.log('🔍 DEBUG: Desired collections:', desired.map(c => c.name).join(', '));
544
+ return current;
421
545
  }
422
546
  function buildEmbeddingProviderIndex(providers) {
423
547
  const index = new Map();
@@ -442,7 +566,7 @@ function buildEmbeddingProviderIndex(providers) {
442
566
  return index;
443
567
  }
444
568
  async function commandDiff(schemaDir) {
445
- const desiredSchema = loadYamlFiles(schemaDir);
569
+ const desiredSchema = loadAllSchemas(schemaDir);
446
570
  const client = getClient();
447
571
  desiredSchema.collections.forEach(validateCollection);
448
572
  const currentCollections = await fetchCurrentCollections(client, desiredSchema.collections);
@@ -473,7 +597,7 @@ async function commandDiff(schemaDir) {
473
597
  return diff.update.length > 0 ? 1 : 0;
474
598
  }
475
599
  async function commandApply(schemaDir, migrationName, createOnly) {
476
- const desiredSchema = loadYamlFiles(schemaDir);
600
+ const desiredSchema = loadAllSchemas(schemaDir);
477
601
  const client = getClient();
478
602
  desiredSchema.collections.forEach(validateCollection);
479
603
  const currentCollections = await fetchCurrentCollections(client, desiredSchema.collections);
@@ -492,8 +616,11 @@ async function commandApply(schemaDir, migrationName, createOnly) {
492
616
  // Process CREATE operations
493
617
  for (const collection of diff.create) {
494
618
  console.log(`\n📋 Generating migration for new collection: ${collection.name}`);
619
+ // Use collection database or default from connection string
620
+ const database = collection.database || client.getDefaultDatabase();
621
+ console.log(`🔍 DEBUG: Collection database:`, database || '(default)');
495
622
  const yamlContent = buildYamlForCollection(collection);
496
- const migration = await generateMigration(client, yamlContent, migrationName, collection.database);
623
+ const migration = await generateMigration(client, yamlContent, migrationName, database);
497
624
  if (migration) {
498
625
  const { version, name, up_sql, down_sql, warnings } = migration;
499
626
  // Save migration files
@@ -510,7 +637,7 @@ async function commandApply(schemaDir, migrationName, createOnly) {
510
637
  if (!createOnly) {
511
638
  // Apply migration
512
639
  console.log(` 🚀 Applying migration...`);
513
- await applyMigration(client, version, `${version}_${name}.sql`, up_sql);
640
+ await applyMigration(client, version, `${version}_${name}.sql`, up_sql, database, yamlContent);
514
641
  console.log(` ✓ Migration applied successfully`);
515
642
  appliedMigrations.push({ version, name, collection: collection.name });
516
643
  }
@@ -519,8 +646,10 @@ async function commandApply(schemaDir, migrationName, createOnly) {
519
646
  // Process UPDATE operations
520
647
  for (const entry of diff.update) {
521
648
  console.log(`\n📋 Generating migration for updated collection: ${entry.desired.name}`);
649
+ // Use collection database or default from connection string
650
+ const database = entry.desired.database || client.getDefaultDatabase();
522
651
  const yamlContent = buildYamlForCollection(entry.desired);
523
- const migration = await generateMigration(client, yamlContent, migrationName, entry.desired.database);
652
+ const migration = await generateMigration(client, yamlContent, migrationName, database);
524
653
  if (migration) {
525
654
  const { version, name, up_sql, down_sql, warnings } = migration;
526
655
  // Save migration files
@@ -537,7 +666,7 @@ async function commandApply(schemaDir, migrationName, createOnly) {
537
666
  if (!createOnly) {
538
667
  // Apply migration
539
668
  console.log(` 🚀 Applying migration...`);
540
- await applyMigration(client, version, `${version}_${name}.sql`, up_sql);
669
+ await applyMigration(client, version, `${version}_${name}.sql`, up_sql, database, yamlContent);
541
670
  console.log(` ✓ Migration applied successfully`);
542
671
  appliedMigrations.push({ version, name, collection: entry.desired.name });
543
672
  }
@@ -566,18 +695,30 @@ function buildYamlForCollection(collection) {
566
695
  if (collection.config) {
567
696
  yamlObj.config = collection.config;
568
697
  }
569
- return require('yaml').stringify(yamlObj);
698
+ if (collection.database) {
699
+ yamlObj.database = collection.database;
700
+ }
701
+ const yamlString = require('yaml').stringify(yamlObj);
702
+ // Debug: log the YAML being sent
703
+ console.log('\n🔍 DEBUG: YAML being sent to backend:');
704
+ console.log('---');
705
+ console.log(yamlString);
706
+ console.log('---\n');
707
+ return yamlString;
570
708
  }
571
709
  async function generateMigration(client, yamlContent, name, database) {
572
710
  try {
711
+ console.log('🔍 DEBUG: Calling /migrations/generate with database:', database || '(default)');
573
712
  const response = await client.http.post('/migrations/generate', {
574
713
  yaml_content: yamlContent,
575
714
  name,
576
715
  database,
577
716
  });
717
+ console.log('🔍 DEBUG: Migration generated successfully');
578
718
  return response;
579
719
  }
580
720
  catch (error) {
721
+ console.log('🔍 DEBUG: Generate migration error:', error.response?.status, error.response?.data?.detail || error.message);
581
722
  if (error.response?.status === 400 && error.response?.data?.detail?.includes('No schema changes')) {
582
723
  // No changes detected - this is ok
583
724
  return null;
@@ -585,7 +726,7 @@ async function generateMigration(client, yamlContent, name, database) {
585
726
  throw error;
586
727
  }
587
728
  }
588
- async function applyMigration(client, version, filename, upSql) {
729
+ async function applyMigration(client, version, filename, upSql, database, yamlSchema) {
589
730
  const crypto = require('crypto');
590
731
  const checksum = crypto.createHash('sha256').update(upSql).digest('hex');
591
732
  await client.http.post('/migrations/apply', {
@@ -593,8 +734,398 @@ async function applyMigration(client, version, filename, upSql) {
593
734
  filename,
594
735
  up_sql: upSql,
595
736
  checksum,
737
+ database,
738
+ yaml_schema: yamlSchema,
596
739
  });
597
740
  }
741
+ async function commandDbReset(force = false) {
742
+ const client = getClient();
743
+ const migrationsDir = node_path_1.default.resolve(node_process_1.default.cwd(), "dooor", "migrations");
744
+ // Check if migrations directory exists
745
+ if (!node_fs_1.default.existsSync(migrationsDir)) {
746
+ console.error("✖ No migrations directory found at dooor/migrations/");
747
+ console.error(" Run 'dooor schema apply' first to generate migrations.");
748
+ return 1;
749
+ }
750
+ // Read all migration files (only UP migrations, not .down.sql)
751
+ const migrationFiles = node_fs_1.default.readdirSync(migrationsDir)
752
+ .filter((file) => file.endsWith(".sql") && !file.endsWith(".down.sql"))
753
+ .sort();
754
+ if (migrationFiles.length === 0) {
755
+ console.error("✖ No migration files found in dooor/migrations/");
756
+ console.error(" Run 'dooor schema apply' first to generate migrations.");
757
+ return 1;
758
+ }
759
+ // Check current database state before warning
760
+ console.log("\n🔍 Checking database state...\n");
761
+ let tableInfo = [];
762
+ try {
763
+ const database = client.getDefaultDatabase();
764
+ const statusResponse = await client.http.get(`/migrations/status?database=${database || ""}`);
765
+ tableInfo = (statusResponse.tables || []).map((t) => ({ name: t.name, rows: t.row_count }));
766
+ }
767
+ catch {
768
+ // If status endpoint doesn't exist, continue without table info
769
+ }
770
+ // Display warning with red styling
771
+ const chalk = await Promise.resolve().then(() => __importStar(require("chalk"))).then(m => m.default);
772
+ console.log(chalk.bgRed.white.bold(" ⚠️ DESTRUCTIVE OPERATION "));
773
+ console.log("");
774
+ console.log(chalk.red.bold("This will completely reset the database!"));
775
+ console.log("");
776
+ if (tableInfo.length > 0) {
777
+ const totalRows = tableInfo.reduce((sum, t) => sum + t.rows, 0);
778
+ console.log(chalk.red(` 📊 ${tableInfo.length} table(s) will be dropped:`));
779
+ tableInfo.forEach((t) => {
780
+ const rowText = t.rows > 0 ? chalk.yellow(`(${t.rows} rows)`) : chalk.gray("(empty)");
781
+ console.log(chalk.red(` • ${t.name} ${rowText}`));
782
+ });
783
+ console.log("");
784
+ if (totalRows > 0) {
785
+ console.log(chalk.red.bold(` 💀 ${totalRows} total row(s) will be PERMANENTLY DELETED`));
786
+ console.log("");
787
+ }
788
+ }
789
+ else {
790
+ console.log(chalk.red(" • All user tables will be dropped"));
791
+ console.log(chalk.red(" • All data will be permanently lost"));
792
+ console.log("");
793
+ }
794
+ console.log(chalk.white(` 📋 ${migrationFiles.length} migration(s) will be re-applied`));
795
+ console.log("");
796
+ // Prompt for confirmation unless --force
797
+ if (!force) {
798
+ const readline = await Promise.resolve().then(() => __importStar(require("readline")));
799
+ const rl = readline.createInterface({
800
+ input: node_process_1.default.stdin,
801
+ output: node_process_1.default.stdout,
802
+ });
803
+ console.log(chalk.yellow.bold("This action cannot be undone."));
804
+ console.log("");
805
+ const answer = await new Promise((resolve) => {
806
+ rl.question(chalk.red.bold("Type 'yes' to confirm reset: "), (ans) => {
807
+ rl.close();
808
+ resolve(ans);
809
+ });
810
+ });
811
+ if (answer.toLowerCase() !== "yes") {
812
+ console.log("\n✖ Reset cancelled.");
813
+ return 1;
814
+ }
815
+ }
816
+ console.log("\n🔄 Resetting database...\n");
817
+ // Load all migrations
818
+ const crypto = require("crypto");
819
+ const migrations = [];
820
+ for (const filename of migrationFiles) {
821
+ const filePath = node_path_1.default.join(migrationsDir, filename);
822
+ const upSql = node_fs_1.default.readFileSync(filePath, "utf-8");
823
+ const checksum = crypto.createHash("sha256").update(upSql).digest("hex");
824
+ // Extract version from filename (e.g., 20250108_120000_123456_create_users.sql -> 20250108_120000_123456)
825
+ const name = filename.replace(".sql", "");
826
+ const parts = name.split("_");
827
+ let version = name;
828
+ if (parts.length >= 3 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
829
+ // Format: YYYYMMDD_HHMMSS_microseconds_name
830
+ version = `${parts[0]}_${parts[1]}_${parts[2]}`;
831
+ }
832
+ else if (parts.length >= 2 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
833
+ // Format: YYYYMMDD_HHMMSS_name
834
+ version = `${parts[0]}_${parts[1]}`;
835
+ }
836
+ migrations.push({
837
+ version,
838
+ filename,
839
+ up_sql: upSql,
840
+ checksum,
841
+ });
842
+ }
843
+ try {
844
+ const database = client.getDefaultDatabase();
845
+ // Cast to any to access private http property (same pattern as generateMigration/applyMigration)
846
+ const response = await client.http.post("/migrations/reset", {
847
+ database,
848
+ migrations,
849
+ });
850
+ // Display results
851
+ if (response.dropped_tables && response.dropped_tables.length > 0) {
852
+ console.log(`✓ Dropped ${response.dropped_tables.length} table(s):`);
853
+ response.dropped_tables.forEach((table) => {
854
+ console.log(` - ${table}`);
855
+ });
856
+ }
857
+ else {
858
+ console.log("✓ No user tables to drop");
859
+ }
860
+ console.log("");
861
+ if (response.applied_migrations && response.applied_migrations.length > 0) {
862
+ console.log(`✓ Applied ${response.applied_migrations.length} migration(s):`);
863
+ response.applied_migrations.forEach((version) => {
864
+ console.log(` - ${version}`);
865
+ });
866
+ }
867
+ else {
868
+ console.log("✓ No migrations to apply");
869
+ }
870
+ if (response.warnings && response.warnings.length > 0) {
871
+ console.log("\n⚠️ Warnings:");
872
+ response.warnings.forEach((warning) => {
873
+ console.log(` - ${warning}`);
874
+ });
875
+ }
876
+ console.log("\n✔ Database reset completed successfully!");
877
+ return 0;
878
+ }
879
+ catch (error) {
880
+ const message = error.response?.data?.detail || error.message;
881
+ console.error(`\n✖ Reset failed: ${message}`);
882
+ return 1;
883
+ }
884
+ }
885
+ async function commandMigrateStatus() {
886
+ const client = getClient();
887
+ const migrationsDir = node_path_1.default.resolve(node_process_1.default.cwd(), "dooor", "migrations");
888
+ const chalk = await Promise.resolve().then(() => __importStar(require("chalk"))).then(m => m.default);
889
+ console.log(chalk.bold("\n📋 Migration Status\n"));
890
+ // Fetch applied migrations from the server
891
+ let appliedMigrations = [];
892
+ try {
893
+ const database = client.getDefaultDatabase();
894
+ const queryParam = database ? `?database=${database}` : '';
895
+ appliedMigrations = await client.http.get(`/migrations/history${queryParam}`);
896
+ }
897
+ catch (error) {
898
+ // If endpoint fails, assume no migrations applied
899
+ if (error.response?.status !== 404) {
900
+ console.warn(chalk.yellow(` ⚠ Could not fetch migration history: ${error.message}`));
901
+ }
902
+ }
903
+ // Read local migration files
904
+ let localMigrations = [];
905
+ if (node_fs_1.default.existsSync(migrationsDir)) {
906
+ localMigrations = node_fs_1.default.readdirSync(migrationsDir)
907
+ .filter((file) => file.endsWith(".sql") && !file.endsWith(".down.sql"))
908
+ .sort();
909
+ }
910
+ // Build map of applied migrations by version
911
+ const appliedMap = new Map();
912
+ appliedMigrations.forEach((m) => {
913
+ appliedMap.set(m.version, m);
914
+ });
915
+ // Extract versions from local files
916
+ const localVersions = new Map();
917
+ localMigrations.forEach((filename) => {
918
+ const name = filename.replace(".sql", "");
919
+ const parts = name.split("_");
920
+ let version = name;
921
+ if (parts.length >= 3 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
922
+ version = `${parts[0]}_${parts[1]}_${parts[2]}`;
923
+ }
924
+ else if (parts.length >= 2 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
925
+ version = `${parts[0]}_${parts[1]}`;
926
+ }
927
+ localVersions.set(version, filename);
928
+ });
929
+ // Table header
930
+ const colVersion = 28;
931
+ const colName = 40;
932
+ const colStatus = 12;
933
+ const colDate = 20;
934
+ const header = chalk.gray("─".repeat(colVersion + colName + colStatus + colDate + 9)) + "\n" +
935
+ chalk.bold(" " + "VERSION".padEnd(colVersion) + " │ " +
936
+ "NAME".padEnd(colName) + " │ " +
937
+ "STATUS".padEnd(colStatus) + " │ " +
938
+ "APPLIED AT") + "\n" +
939
+ chalk.gray("─".repeat(colVersion + colName + colStatus + colDate + 9));
940
+ console.log(header);
941
+ // Show applied migrations
942
+ if (appliedMigrations.length === 0 && localMigrations.length === 0) {
943
+ console.log(chalk.gray("\n No migrations found.\n"));
944
+ return 0;
945
+ }
946
+ // Combine and sort all migrations
947
+ const allVersions = new Set([
948
+ ...appliedMap.keys(),
949
+ ...localVersions.keys()
950
+ ]);
951
+ const sortedVersions = Array.from(allVersions).sort();
952
+ for (const version of sortedVersions) {
953
+ const applied = appliedMap.get(version);
954
+ const localFile = localVersions.get(version);
955
+ let displayName = "";
956
+ let status = "";
957
+ let statusColor = chalk.gray;
958
+ let dateStr = "";
959
+ if (applied) {
960
+ // Extract name from filename
961
+ const filenameWithoutExt = applied.filename.replace(".sql", "");
962
+ const versionPrefix = version + "_";
963
+ displayName = filenameWithoutExt.startsWith(versionPrefix)
964
+ ? filenameWithoutExt.slice(versionPrefix.length)
965
+ : filenameWithoutExt;
966
+ if (applied.status === "applied") {
967
+ status = "applied";
968
+ statusColor = chalk.green;
969
+ }
970
+ else if (applied.status === "rolled_back") {
971
+ status = "rolled_back";
972
+ statusColor = chalk.yellow;
973
+ if (applied.rolled_back_at) {
974
+ dateStr = formatDate(applied.rolled_back_at);
975
+ }
976
+ }
977
+ else {
978
+ status = applied.status;
979
+ statusColor = chalk.gray;
980
+ }
981
+ if (applied.applied_at && applied.status !== "rolled_back") {
982
+ dateStr = formatDate(applied.applied_at);
983
+ }
984
+ }
985
+ else if (localFile) {
986
+ // Pending migration (exists locally but not applied)
987
+ const filenameWithoutExt = localFile.replace(".sql", "");
988
+ const versionPrefix = version + "_";
989
+ displayName = filenameWithoutExt.startsWith(versionPrefix)
990
+ ? filenameWithoutExt.slice(versionPrefix.length)
991
+ : filenameWithoutExt;
992
+ status = "pending";
993
+ statusColor = chalk.blue;
994
+ dateStr = "-";
995
+ }
996
+ // Truncate name if too long
997
+ if (displayName.length > colName) {
998
+ displayName = displayName.slice(0, colName - 3) + "...";
999
+ }
1000
+ const row = " " + chalk.white(version.padEnd(colVersion)) + " │ " +
1001
+ chalk.white(displayName.padEnd(colName)) + " │ " +
1002
+ statusColor(status.padEnd(colStatus)) + " │ " +
1003
+ chalk.gray(dateStr);
1004
+ console.log(row);
1005
+ }
1006
+ console.log(chalk.gray("─".repeat(colVersion + colName + colStatus + colDate + 9)));
1007
+ // Summary
1008
+ const appliedCount = appliedMigrations.filter(m => m.status === "applied").length;
1009
+ const rolledBackCount = appliedMigrations.filter(m => m.status === "rolled_back").length;
1010
+ const pendingCount = Array.from(localVersions.keys()).filter(v => !appliedMap.has(v)).length;
1011
+ console.log("");
1012
+ console.log(chalk.bold("Summary:"));
1013
+ console.log(chalk.green(` ✓ Applied: ${appliedCount}`));
1014
+ if (rolledBackCount > 0) {
1015
+ console.log(chalk.yellow(` ↩ Rolled back: ${rolledBackCount}`));
1016
+ }
1017
+ if (pendingCount > 0) {
1018
+ console.log(chalk.blue(` ⏳ Pending: ${pendingCount}`));
1019
+ }
1020
+ console.log("");
1021
+ return 0;
1022
+ }
1023
+ function formatDate(isoString) {
1024
+ try {
1025
+ const date = new Date(isoString);
1026
+ return date.toLocaleString("en-US", {
1027
+ month: "short",
1028
+ day: "2-digit",
1029
+ year: "numeric",
1030
+ hour: "2-digit",
1031
+ minute: "2-digit",
1032
+ hour12: false
1033
+ });
1034
+ }
1035
+ catch {
1036
+ return isoString;
1037
+ }
1038
+ }
1039
+ async function commandMigrateRollback(steps = 1) {
1040
+ const client = getClient();
1041
+ const migrationsDir = node_path_1.default.resolve(node_process_1.default.cwd(), "dooor", "migrations");
1042
+ const chalk = await Promise.resolve().then(() => __importStar(require("chalk"))).then(m => m.default);
1043
+ console.log(chalk.bold("\n↩️ Migration Rollback\n"));
1044
+ // Check if migrations directory exists
1045
+ if (!node_fs_1.default.existsSync(migrationsDir)) {
1046
+ console.error(chalk.red("✖ No migrations directory found at dooor/migrations/"));
1047
+ return 1;
1048
+ }
1049
+ // Fetch applied migrations from the server
1050
+ let appliedMigrations = [];
1051
+ try {
1052
+ const database = client.getDefaultDatabase();
1053
+ const queryParam = database ? `?database=${database}` : '';
1054
+ appliedMigrations = await client.http.get(`/migrations/history${queryParam}`);
1055
+ }
1056
+ catch (error) {
1057
+ console.error(chalk.red(`✖ Could not fetch migration history: ${error.message}`));
1058
+ return 1;
1059
+ }
1060
+ // Filter only applied migrations and sort by version descending
1061
+ const appliedOnly = appliedMigrations
1062
+ .filter(m => m.status === "applied")
1063
+ .sort((a, b) => b.version.localeCompare(a.version));
1064
+ if (appliedOnly.length === 0) {
1065
+ console.log(chalk.yellow("No migrations to rollback."));
1066
+ return 0;
1067
+ }
1068
+ // Get the migrations to rollback (last N applied)
1069
+ const toRollback = appliedOnly.slice(0, steps);
1070
+ console.log(chalk.white(`Rolling back ${toRollback.length} migration(s):\n`));
1071
+ let rolledBack = 0;
1072
+ let failed = 0;
1073
+ for (const migration of toRollback) {
1074
+ const { version, filename } = migration;
1075
+ // Find the .down.sql file
1076
+ const downFilename = filename.replace(".sql", ".down.sql");
1077
+ const downFilePath = node_path_1.default.join(migrationsDir, downFilename);
1078
+ if (!node_fs_1.default.existsSync(downFilePath)) {
1079
+ console.log(chalk.red(` ✖ ${version} - Down file not found: ${downFilename}`));
1080
+ failed++;
1081
+ continue;
1082
+ }
1083
+ // Read the down SQL
1084
+ const downSql = node_fs_1.default.readFileSync(downFilePath, "utf-8");
1085
+ // Call the rollback endpoint
1086
+ try {
1087
+ const database = client.getDefaultDatabase();
1088
+ await client.http.post('/migrations/rollback', {
1089
+ version,
1090
+ down_sql: downSql,
1091
+ database,
1092
+ });
1093
+ console.log(chalk.green(` ✓ ${version} - ${extractMigrationName(filename)}`));
1094
+ rolledBack++;
1095
+ }
1096
+ catch (error) {
1097
+ const message = error.response?.data?.detail || error.message;
1098
+ console.log(chalk.red(` ✖ ${version} - Failed: ${message}`));
1099
+ failed++;
1100
+ // Stop on first failure
1101
+ break;
1102
+ }
1103
+ }
1104
+ console.log("");
1105
+ if (rolledBack > 0) {
1106
+ console.log(chalk.green(`✔ Rolled back ${rolledBack} migration(s) successfully.`));
1107
+ }
1108
+ if (failed > 0) {
1109
+ console.log(chalk.red(`✖ ${failed} migration(s) failed to rollback.`));
1110
+ return 1;
1111
+ }
1112
+ return 0;
1113
+ }
1114
+ function extractMigrationName(filename) {
1115
+ // Remove .sql extension
1116
+ const name = filename.replace(".sql", "");
1117
+ // Split by underscore and skip version parts (YYYYMMDD_HHMMSS_microseconds)
1118
+ const parts = name.split("_");
1119
+ if (parts.length >= 4 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
1120
+ // Format: YYYYMMDD_HHMMSS_microseconds_name
1121
+ return parts.slice(3).join("_");
1122
+ }
1123
+ else if (parts.length >= 3 && parts[0].match(/^\d+$/) && parts[1].match(/^\d+$/)) {
1124
+ // Format: YYYYMMDD_HHMMSS_name
1125
+ return parts.slice(2).join("_");
1126
+ }
1127
+ return name;
1128
+ }
598
1129
  function mapFieldTypeToTs(field) {
599
1130
  switch (field.type) {
600
1131
  case __1.FieldType.STRING:
@@ -732,7 +1263,7 @@ async function commandGenerateTypes(schemaDir, outPath) {
732
1263
  if (!resolvedOut) {
733
1264
  resolvedOut = "dooor/generated/cortex-schema.ts";
734
1265
  }
735
- const desiredSchema = loadYamlFiles(schemaDir);
1266
+ const desiredSchema = loadAllSchemas(schemaDir);
736
1267
  desiredSchema.collections.forEach(validateCollection);
737
1268
  const absoluteOut = node_path_1.default.resolve(node_process_1.default.cwd(), resolvedOut);
738
1269
  const content = buildTypesContent(schemaDir, desiredSchema);
@@ -742,6 +1273,271 @@ async function commandGenerateTypes(schemaDir, outPath) {
742
1273
  console.log(`✔ Types generated at ${absoluteOut}`);
743
1274
  return 0;
744
1275
  }
1276
+ async function commandSchemaValidate(schemaDir) {
1277
+ const chalk = await Promise.resolve().then(() => __importStar(require("chalk"))).then(m => m.default);
1278
+ console.log(chalk.bold("\n🔍 Schema Validation\n"));
1279
+ try {
1280
+ // Load all schemas (YAML and TS)
1281
+ const desiredSchema = loadAllSchemas(schemaDir);
1282
+ if (desiredSchema.collections.length === 0) {
1283
+ console.log(chalk.yellow("No schemas found to validate."));
1284
+ return 0;
1285
+ }
1286
+ console.log(chalk.white(`Found ${desiredSchema.collections.length} collection(s) to validate:\n`));
1287
+ // Run basic validation for each collection
1288
+ let hasErrors = false;
1289
+ const allWarnings = [];
1290
+ for (const collection of desiredSchema.collections) {
1291
+ try {
1292
+ validateCollection(collection);
1293
+ console.log(chalk.green(` ✓ ${collection.name}`));
1294
+ }
1295
+ catch (error) {
1296
+ console.log(chalk.red(` ✖ ${collection.name}: ${error.message}`));
1297
+ hasErrors = true;
1298
+ }
1299
+ }
1300
+ // Try to run advanced validation from the validator module
1301
+ try {
1302
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
1303
+ const validatorModule = require("../schema-decorators/validator");
1304
+ // Convert DSLCollection[] to the format expected by validateSchema
1305
+ const collectionsForValidation = desiredSchema.collections.map((col) => ({
1306
+ name: col.name,
1307
+ description: col.description,
1308
+ database: col.database,
1309
+ config: col.config,
1310
+ fields: col.fields.map((field) => ({
1311
+ name: field.name,
1312
+ type: field.type,
1313
+ description: field.description,
1314
+ required: field.required,
1315
+ indexed: field.indexed,
1316
+ unique: field.unique,
1317
+ filterable: field.filterable,
1318
+ vectorize: field.vectorize,
1319
+ default: field.default,
1320
+ values: field.values,
1321
+ store_in: field.store_in,
1322
+ extract_config: field.extract_config,
1323
+ schema: field.schema,
1324
+ })),
1325
+ }));
1326
+ const validationResult = validatorModule.validateSchema(collectionsForValidation);
1327
+ // Collect warnings
1328
+ if (validationResult.warnings && validationResult.warnings.length > 0) {
1329
+ allWarnings.push(...validationResult.warnings);
1330
+ }
1331
+ // Check for errors
1332
+ if (!validationResult.valid) {
1333
+ hasErrors = true;
1334
+ console.log("");
1335
+ console.log(validatorModule.formatValidationErrors(validationResult));
1336
+ }
1337
+ }
1338
+ catch (validatorError) {
1339
+ // Validator module not available, skip advanced validation
1340
+ if (validatorError.code !== "MODULE_NOT_FOUND") {
1341
+ console.warn(chalk.yellow(`\n ⚠ Advanced validation skipped: ${validatorError.message}`));
1342
+ }
1343
+ }
1344
+ console.log("");
1345
+ // Show warnings
1346
+ if (allWarnings.length > 0) {
1347
+ console.log(chalk.yellow("Warnings:"));
1348
+ allWarnings.forEach((warning) => {
1349
+ console.log(chalk.yellow(` ⚠ ${warning}`));
1350
+ });
1351
+ console.log("");
1352
+ }
1353
+ // Summary
1354
+ if (hasErrors) {
1355
+ console.log(chalk.red("✖ Schema validation failed with errors."));
1356
+ return 1;
1357
+ }
1358
+ console.log(chalk.green(`✔ All ${desiredSchema.collections.length} schema(s) are valid.`));
1359
+ return 0;
1360
+ }
1361
+ catch (error) {
1362
+ console.error(chalk.red(`✖ ${error.message}`));
1363
+ return 1;
1364
+ }
1365
+ }
1366
+ async function commandDbPull(format = "yaml") {
1367
+ const client = getClient();
1368
+ const chalk = await Promise.resolve().then(() => __importStar(require("chalk"))).then(m => m.default);
1369
+ const outputDir = node_path_1.default.resolve(node_process_1.default.cwd(), "dooor", "schemas");
1370
+ console.log(chalk.bold("\n📥 Database Introspection\n"));
1371
+ try {
1372
+ const database = client.getDefaultDatabase();
1373
+ const queryParam = database ? `?database=${database}` : '';
1374
+ const tables = await client.http.get(`/database/introspect${queryParam}`);
1375
+ if (tables.length === 0) {
1376
+ console.log(chalk.yellow("No user tables found in the database."));
1377
+ return 0;
1378
+ }
1379
+ console.log(chalk.white(`Found ${tables.length} table(s):\n`));
1380
+ // Ensure output directory exists
1381
+ node_fs_1.default.mkdirSync(outputDir, { recursive: true });
1382
+ const generatedFiles = [];
1383
+ for (const table of tables) {
1384
+ console.log(chalk.white(` Processing ${table.name}...`));
1385
+ if (format === "yaml") {
1386
+ const yamlContent = generateYamlSchema(table);
1387
+ const filePath = node_path_1.default.join(outputDir, `${table.name}.yaml`);
1388
+ node_fs_1.default.writeFileSync(filePath, yamlContent, "utf-8");
1389
+ generatedFiles.push(filePath);
1390
+ console.log(chalk.green(` ✓ Generated ${table.name}.yaml`));
1391
+ }
1392
+ else {
1393
+ const tsContent = generateTsSchema(table);
1394
+ const filePath = node_path_1.default.join(outputDir, `${table.name}.schema.ts`);
1395
+ node_fs_1.default.writeFileSync(filePath, tsContent, "utf-8");
1396
+ generatedFiles.push(filePath);
1397
+ console.log(chalk.green(` ✓ Generated ${table.name}.schema.ts`));
1398
+ }
1399
+ }
1400
+ console.log("");
1401
+ console.log(chalk.green(`✔ Generated ${generatedFiles.length} schema file(s) in ${outputDir}`));
1402
+ return 0;
1403
+ }
1404
+ catch (error) {
1405
+ const message = error.response?.data?.detail || error.message;
1406
+ console.error(chalk.red(`✖ Introspection failed: ${message}`));
1407
+ return 1;
1408
+ }
1409
+ }
1410
+ function pgTypeToFieldType(pgType) {
1411
+ const typeMap = {
1412
+ // Text types
1413
+ 'text': 'text',
1414
+ 'varchar': 'string',
1415
+ 'character varying': 'string',
1416
+ 'char': 'string',
1417
+ 'character': 'string',
1418
+ 'name': 'string',
1419
+ // Numeric types
1420
+ 'integer': 'int',
1421
+ 'int': 'int',
1422
+ 'int4': 'int',
1423
+ 'smallint': 'int',
1424
+ 'int2': 'int',
1425
+ 'bigint': 'int',
1426
+ 'int8': 'int',
1427
+ 'serial': 'int',
1428
+ 'bigserial': 'int',
1429
+ 'real': 'float',
1430
+ 'float4': 'float',
1431
+ 'double precision': 'float',
1432
+ 'float8': 'float',
1433
+ 'numeric': 'float',
1434
+ 'decimal': 'float',
1435
+ // Boolean
1436
+ 'boolean': 'boolean',
1437
+ 'bool': 'boolean',
1438
+ // Date/Time
1439
+ 'date': 'date',
1440
+ 'timestamp': 'datetime',
1441
+ 'timestamp without time zone': 'datetime',
1442
+ 'timestamp with time zone': 'datetime',
1443
+ 'timestamptz': 'datetime',
1444
+ 'time': 'string',
1445
+ // JSON
1446
+ 'json': 'json',
1447
+ 'jsonb': 'json',
1448
+ // UUID
1449
+ 'uuid': 'string',
1450
+ // Arrays - simplified
1451
+ 'array': 'json',
1452
+ // Binary
1453
+ 'bytea': 'file',
1454
+ };
1455
+ const normalized = pgType.toLowerCase().replace(/\(\d+\)/, '').trim();
1456
+ return typeMap[normalized] || 'string';
1457
+ }
1458
+ function generateYamlSchema(table) {
1459
+ const yaml = require('yaml');
1460
+ const fields = table.columns
1461
+ .filter(col => col.name !== 'id' && col.name !== 'created_at' && col.name !== 'updated_at')
1462
+ .map(col => {
1463
+ const field = {
1464
+ name: col.name,
1465
+ type: pgTypeToFieldType(col.type),
1466
+ };
1467
+ if (!col.nullable) {
1468
+ field.required = true;
1469
+ }
1470
+ if (col.is_unique) {
1471
+ field.unique = true;
1472
+ }
1473
+ if (col.is_primary) {
1474
+ field.indexed = true;
1475
+ }
1476
+ return field;
1477
+ });
1478
+ const schema = {
1479
+ version: 1,
1480
+ collections: [
1481
+ {
1482
+ name: table.name,
1483
+ fields,
1484
+ },
1485
+ ],
1486
+ };
1487
+ return yaml.stringify(schema);
1488
+ }
1489
+ function generateTsSchema(table) {
1490
+ const pascalName = table.name
1491
+ .split('_')
1492
+ .map(part => part.charAt(0).toUpperCase() + part.slice(1))
1493
+ .join('');
1494
+ const lines = [];
1495
+ lines.push(`import { Collection, Field } from '@dooor-ai/cortexdb';`);
1496
+ lines.push('');
1497
+ lines.push(`@Collection({`);
1498
+ lines.push(` name: '${table.name}',`);
1499
+ lines.push(`})`);
1500
+ lines.push(`export class ${pascalName}Schema {`);
1501
+ for (const col of table.columns) {
1502
+ // Skip common auto-generated columns
1503
+ if (col.name === 'id' || col.name === 'created_at' || col.name === 'updated_at') {
1504
+ continue;
1505
+ }
1506
+ const fieldType = pgTypeToFieldType(col.type);
1507
+ const decoratorParts = [`type: '${fieldType}'`];
1508
+ if (!col.nullable) {
1509
+ decoratorParts.push('required: true');
1510
+ }
1511
+ if (col.is_unique) {
1512
+ decoratorParts.push('unique: true');
1513
+ }
1514
+ if (col.is_primary) {
1515
+ decoratorParts.push('indexed: true');
1516
+ }
1517
+ lines.push(` @Field({ ${decoratorParts.join(', ')} })`);
1518
+ lines.push(` ${col.name}!: ${fieldTypeToTsType(fieldType)};`);
1519
+ lines.push('');
1520
+ }
1521
+ lines.push('}');
1522
+ lines.push('');
1523
+ return lines.join('\n');
1524
+ }
1525
+ function fieldTypeToTsType(fieldType) {
1526
+ const typeMap = {
1527
+ 'string': 'string',
1528
+ 'text': 'string',
1529
+ 'int': 'number',
1530
+ 'float': 'number',
1531
+ 'boolean': 'boolean',
1532
+ 'date': 'string',
1533
+ 'datetime': 'string',
1534
+ 'json': 'any',
1535
+ 'file': 'string',
1536
+ 'enum': 'string',
1537
+ 'array': 'any[]',
1538
+ };
1539
+ return typeMap[fieldType] || 'any';
1540
+ }
745
1541
  async function main() {
746
1542
  const argv = node_process_1.default.argv.slice(2);
747
1543
  if (argv.length === 0) {
@@ -758,8 +1554,84 @@ async function main() {
758
1554
  printUsage();
759
1555
  node_process_1.default.exit(0);
760
1556
  }
1557
+ if (topic === "db") {
1558
+ if (command === "reset") {
1559
+ const options = parseArgs(rest);
1560
+ const force = options.force === true;
1561
+ try {
1562
+ const exitCode = await commandDbReset(force);
1563
+ node_process_1.default.exit(exitCode);
1564
+ }
1565
+ catch (error) {
1566
+ console.error(`✖ ${error.message}`);
1567
+ node_process_1.default.exit(1);
1568
+ }
1569
+ }
1570
+ else if (command === "pull") {
1571
+ const options = parseArgs(rest);
1572
+ let format = "yaml";
1573
+ if (typeof options.format === "string") {
1574
+ const normalizedFormat = options.format.toLowerCase();
1575
+ if (normalizedFormat !== "yaml" && normalizedFormat !== "ts") {
1576
+ console.error("✖ --format must be 'yaml' or 'ts'");
1577
+ node_process_1.default.exit(1);
1578
+ }
1579
+ format = normalizedFormat;
1580
+ }
1581
+ try {
1582
+ const exitCode = await commandDbPull(format);
1583
+ node_process_1.default.exit(exitCode);
1584
+ }
1585
+ catch (error) {
1586
+ console.error(`✖ ${error.message}`);
1587
+ node_process_1.default.exit(1);
1588
+ }
1589
+ }
1590
+ else {
1591
+ console.error(`Unknown db command '${command}'. Expected 'reset' or 'pull'.\n`);
1592
+ printUsage();
1593
+ node_process_1.default.exit(1);
1594
+ }
1595
+ }
1596
+ if (topic === "migrate") {
1597
+ if (command === "status") {
1598
+ try {
1599
+ const exitCode = await commandMigrateStatus();
1600
+ node_process_1.default.exit(exitCode);
1601
+ }
1602
+ catch (error) {
1603
+ console.error(`✖ ${error.message}`);
1604
+ node_process_1.default.exit(1);
1605
+ }
1606
+ }
1607
+ else if (command === "rollback") {
1608
+ try {
1609
+ const options = parseArgs(rest);
1610
+ let steps = 1;
1611
+ if (typeof options.steps === "string") {
1612
+ const parsed = parseInt(options.steps, 10);
1613
+ if (isNaN(parsed) || parsed < 1) {
1614
+ console.error("✖ --steps must be a positive integer");
1615
+ node_process_1.default.exit(1);
1616
+ }
1617
+ steps = parsed;
1618
+ }
1619
+ const exitCode = await commandMigrateRollback(steps);
1620
+ node_process_1.default.exit(exitCode);
1621
+ }
1622
+ catch (error) {
1623
+ console.error(`✖ ${error.message}`);
1624
+ node_process_1.default.exit(1);
1625
+ }
1626
+ }
1627
+ else {
1628
+ console.error(`Unknown migrate command '${command}'. Expected 'status' or 'rollback'.\n`);
1629
+ printUsage();
1630
+ node_process_1.default.exit(1);
1631
+ }
1632
+ }
761
1633
  if (topic !== "schema") {
762
- console.error(`Unknown topic '${topic}'. Expected 'schema'.\n`);
1634
+ console.error(`Unknown topic '${topic}'. Expected 'schema', 'migrate', or 'db'.\n`);
763
1635
  printUsage();
764
1636
  node_process_1.default.exit(1);
765
1637
  }
@@ -784,6 +1656,9 @@ async function main() {
784
1656
  exitCode = typesResult !== 0 ? typesResult : exitCode;
785
1657
  }
786
1658
  }
1659
+ else if (command === "validate") {
1660
+ exitCode = await commandSchemaValidate(schemaDir);
1661
+ }
787
1662
  else if (command === "generate-types") {
788
1663
  exitCode = await commandGenerateTypes(schemaDir, options.out);
789
1664
  }